Bumping manifests a=b2g-bump
[gecko.git] / js / src / frontend / BytecodeEmitter.cpp
blob8f289fe27408914a012145ccee2ed204f7b1c379
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /*
8 * JS bytecode generation.
9 */
11 #include "frontend/BytecodeEmitter.h"
13 #include "mozilla/ArrayUtils.h"
14 #include "mozilla/DebugOnly.h"
15 #include "mozilla/FloatingPoint.h"
16 #include "mozilla/PodOperations.h"
17 #include "mozilla/UniquePtr.h"
19 #include <string.h>
21 #include "jsapi.h"
22 #include "jsatom.h"
23 #include "jscntxt.h"
24 #include "jsfun.h"
25 #include "jsnum.h"
26 #include "jsopcode.h"
27 #include "jsscript.h"
28 #include "jstypes.h"
29 #include "jsutil.h"
31 #include "asmjs/AsmJSLink.h"
32 #include "frontend/Parser.h"
33 #include "frontend/TokenStream.h"
34 #include "vm/Debugger.h"
35 #include "vm/GeneratorObject.h"
36 #include "vm/Stack.h"
38 #include "jsatominlines.h"
39 #include "jsobjinlines.h"
40 #include "jsscriptinlines.h"
42 #include "frontend/ParseMaps-inl.h"
43 #include "frontend/ParseNode-inl.h"
44 #include "vm/NativeObject-inl.h"
45 #include "vm/ScopeObject-inl.h"
47 using namespace js;
48 using namespace js::gc;
49 using namespace js::frontend;
51 using mozilla::DebugOnly;
52 using mozilla::NumberIsInt32;
53 using mozilla::PodCopy;
54 using mozilla::UniquePtr;
56 static bool
57 SetSrcNoteOffset(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned index, unsigned which, ptrdiff_t offset);
59 static bool
60 UpdateSourceCoordNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset);
62 struct frontend::StmtInfoBCE : public StmtInfoBase
64 StmtInfoBCE* down; /* info for enclosing statement */
65 StmtInfoBCE* downScope; /* next enclosing lexical scope */
67 ptrdiff_t update; /* loop update offset (top if none) */
68 ptrdiff_t breaks; /* offset of last break in loop */
69 ptrdiff_t continues; /* offset of last continue in loop */
70 uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */
72 explicit StmtInfoBCE(ExclusiveContext* cx) : StmtInfoBase(cx) {}
75 * To reuse space, alias two of the ptrdiff_t fields for use during
76 * try/catch/finally code generation and backpatching.
78 * Only a loop, switch, or label statement info record can have breaks and
79 * continues, and only a for loop has an update backpatch chain, so it's
80 * safe to overlay these for the "trying" StmtTypes.
83 ptrdiff_t& gosubs() {
84 MOZ_ASSERT(type == STMT_FINALLY);
85 return breaks;
88 ptrdiff_t& guardJump() {
89 MOZ_ASSERT(type == STMT_TRY || type == STMT_FINALLY);
90 return continues;
95 namespace {
97 struct LoopStmtInfo : public StmtInfoBCE
99 int32_t stackDepth; // Stack depth when this loop was pushed.
100 uint32_t loopDepth; // Loop depth.
102 // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
103 bool canIonOsr;
105 explicit LoopStmtInfo(ExclusiveContext* cx) : StmtInfoBCE(cx) {}
107 static LoopStmtInfo* fromStmtInfo(StmtInfoBCE* stmt) {
108 MOZ_ASSERT(stmt->isLoop());
109 return static_cast<LoopStmtInfo*>(stmt);
113 } // anonymous namespace
115 BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent,
116 Parser<FullParseHandler>* parser, SharedContext* sc,
117 HandleScript script, Handle<LazyScript*> lazyScript,
118 bool insideEval, HandleScript evalCaller,
119 bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode)
120 : sc(sc),
121 parent(parent),
122 script(sc->context, script),
123 lazyScript(sc->context, lazyScript),
124 prolog(sc->context, lineNum),
125 main(sc->context, lineNum),
126 current(&main),
127 parser(parser),
128 evalCaller(evalCaller),
129 topStmt(nullptr),
130 topScopeStmt(nullptr),
131 staticScope(sc->context),
132 atomIndices(sc->context),
133 firstLine(lineNum),
134 localsToFrameSlots_(sc->context),
135 stackDepth(0), maxStackDepth(0),
136 arrayCompDepth(0),
137 emitLevel(0),
138 constList(sc->context),
139 tryNoteList(sc->context),
140 blockScopeList(sc->context),
141 yieldOffsetList(sc->context),
142 typesetCount(0),
143 hasSingletons(false),
144 emittingForInit(false),
145 emittingRunOnceLambda(false),
146 insideEval(insideEval),
147 hasGlobalScope(hasGlobalScope),
148 emitterMode(emitterMode)
150 MOZ_ASSERT_IF(evalCaller, insideEval);
151 MOZ_ASSERT_IF(emitterMode == LazyFunction, lazyScript);
154 bool
155 BytecodeEmitter::init()
157 return atomIndices.ensureMap(sc->context);
160 bool
161 BytecodeEmitter::updateLocalsToFrameSlots()
163 // Assign stack slots to unaliased locals (aliased locals are stored in the
164 // call object and don't need their own stack slots). We do this by filling
165 // a Vector that can be used to map a local to its stack slot.
167 if (localsToFrameSlots_.length() == script->bindings.numLocals()) {
168 // CompileScript calls updateNumBlockScoped to update the block scope
169 // depth. Do nothing if the depth didn't change.
170 return true;
173 localsToFrameSlots_.clear();
175 if (!localsToFrameSlots_.reserve(script->bindings.numLocals()))
176 return false;
178 uint32_t slot = 0;
179 for (BindingIter bi(script); !bi.done(); bi++) {
180 if (bi->kind() == Binding::ARGUMENT)
181 continue;
183 if (bi->aliased())
184 localsToFrameSlots_.infallibleAppend(UINT32_MAX);
185 else
186 localsToFrameSlots_.infallibleAppend(slot++);
189 for (size_t i = 0; i < script->bindings.numBlockScoped(); i++)
190 localsToFrameSlots_.infallibleAppend(slot++);
192 return true;
195 static ptrdiff_t
196 EmitCheck(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t delta)
198 ptrdiff_t offset = bce->code().length();
200 // Start it off moderately large to avoid repeated resizings early on.
201 // ~98% of cases fit within 1024 bytes.
202 if (bce->code().capacity() == 0 && !bce->code().reserve(1024))
203 return -1;
205 jsbytecode dummy = 0;
206 if (!bce->code().appendN(dummy, delta)) {
207 js_ReportOutOfMemory(cx);
208 return -1;
210 return offset;
213 static void
214 UpdateDepth(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t target)
216 jsbytecode* pc = bce->code(target);
217 JSOp op = (JSOp) *pc;
218 const JSCodeSpec* cs = &js_CodeSpec[op];
220 if (cs->format & JOF_TMPSLOT_MASK) {
222 * An opcode may temporarily consume stack space during execution.
223 * Account for this in maxStackDepth separately from uses/defs here.
225 uint32_t depth = (uint32_t) bce->stackDepth +
226 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT);
227 if (depth > bce->maxStackDepth)
228 bce->maxStackDepth = depth;
231 int nuses = StackUses(nullptr, pc);
232 int ndefs = StackDefs(nullptr, pc);
234 bce->stackDepth -= nuses;
235 MOZ_ASSERT(bce->stackDepth >= 0);
236 bce->stackDepth += ndefs;
237 if ((uint32_t)bce->stackDepth > bce->maxStackDepth)
238 bce->maxStackDepth = bce->stackDepth;
241 #ifdef DEBUG
242 static bool
243 CheckStrictOrSloppy(BytecodeEmitter* bce, JSOp op)
245 if (IsCheckStrictOp(op) && !bce->sc->strict)
246 return false;
247 if (IsCheckSloppyOp(op) && bce->sc->strict)
248 return false;
249 return true;
251 #endif
253 ptrdiff_t
254 frontend::Emit1(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
256 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
257 ptrdiff_t offset = EmitCheck(cx, bce, 1);
258 if (offset < 0)
259 return -1;
261 jsbytecode* code = bce->code(offset);
262 code[0] = jsbytecode(op);
263 UpdateDepth(cx, bce, offset);
264 return offset;
267 ptrdiff_t
268 frontend::Emit2(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, jsbytecode op1)
270 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
271 ptrdiff_t offset = EmitCheck(cx, bce, 2);
272 if (offset < 0)
273 return -1;
275 jsbytecode* code = bce->code(offset);
276 code[0] = jsbytecode(op);
277 code[1] = op1;
278 UpdateDepth(cx, bce, offset);
279 return offset;
282 ptrdiff_t
283 frontend::Emit3(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, jsbytecode op1,
284 jsbytecode op2)
286 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
288 /* These should filter through EmitVarOp. */
289 MOZ_ASSERT(!IsArgOp(op));
290 MOZ_ASSERT(!IsLocalOp(op));
292 ptrdiff_t offset = EmitCheck(cx, bce, 3);
293 if (offset < 0)
294 return -1;
296 jsbytecode* code = bce->code(offset);
297 code[0] = jsbytecode(op);
298 code[1] = op1;
299 code[2] = op2;
300 UpdateDepth(cx, bce, offset);
301 return offset;
304 ptrdiff_t
305 frontend::EmitN(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, size_t extra)
307 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
308 ptrdiff_t length = 1 + (ptrdiff_t)extra;
309 ptrdiff_t offset = EmitCheck(cx, bce, length);
310 if (offset < 0)
311 return -1;
313 jsbytecode* code = bce->code(offset);
314 code[0] = jsbytecode(op);
315 /* The remaining |extra| bytes are set by the caller */
318 * Don't UpdateDepth if op's use-count comes from the immediate
319 * operand yet to be stored in the extra bytes after op.
321 if (js_CodeSpec[op].nuses >= 0)
322 UpdateDepth(cx, bce, offset);
324 return offset;
327 static ptrdiff_t
328 EmitJump(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, ptrdiff_t off)
330 ptrdiff_t offset = EmitCheck(cx, bce, 5);
331 if (offset < 0)
332 return -1;
334 jsbytecode* code = bce->code(offset);
335 code[0] = jsbytecode(op);
336 SET_JUMP_OFFSET(code, off);
337 UpdateDepth(cx, bce, offset);
338 return offset;
341 static ptrdiff_t
342 EmitCall(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, uint16_t argc, ParseNode* pn=nullptr)
344 if (pn && !UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
345 return -1;
346 return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc));
349 // Dup the var in operand stack slot "slot". The first item on the operand
350 // stack is one slot past the last fixed slot. The last (most recent) item is
351 // slot bce->stackDepth - 1.
353 // The instruction that is written (JSOP_DUPAT) switches the depth around so
354 // that it is addressed from the sp instead of from the fp. This is useful when
355 // you don't know the size of the fixed stack segment (nfixed), as is the case
356 // when compiling scripts (because each statement is parsed and compiled
357 // separately, but they all together form one script with one fixed stack
358 // frame).
359 static bool
360 EmitDupAt(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned slot)
362 MOZ_ASSERT(slot < unsigned(bce->stackDepth));
363 // The slot's position on the operand stack, measured from the top.
364 unsigned slotFromTop = bce->stackDepth - 1 - slot;
365 if (slotFromTop >= JS_BIT(24)) {
366 bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
367 return false;
369 ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3);
370 if (off < 0)
371 return false;
372 jsbytecode* pc = bce->code(off);
373 SET_UINT24(pc, slotFromTop);
374 return true;
377 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
378 const char js_with_statement_str[] = "with statement";
379 const char js_finally_block_str[] = "finally block";
380 const char js_script_str[] = "script";
382 static const char * const statementName[] = {
383 "label statement", /* LABEL */
384 "if statement", /* IF */
385 "else statement", /* ELSE */
386 "destructuring body", /* BODY */
387 "switch statement", /* SWITCH */
388 "block", /* BLOCK */
389 js_with_statement_str, /* WITH */
390 "catch block", /* CATCH */
391 "try block", /* TRY */
392 js_finally_block_str, /* FINALLY */
393 js_finally_block_str, /* SUBROUTINE */
394 "do loop", /* DO_LOOP */
395 "for loop", /* FOR_LOOP */
396 "for/in loop", /* FOR_IN_LOOP */
397 "for/of loop", /* FOR_OF_LOOP */
398 "while loop", /* WHILE_LOOP */
399 "spread", /* SPREAD */
402 static_assert(MOZ_ARRAY_LENGTH(statementName) == STMT_LIMIT,
403 "statementName array and StmtType enum must be consistent");
405 static const char*
406 StatementName(StmtInfoBCE* topStmt)
408 if (!topStmt)
409 return js_script_str;
410 return statementName[topStmt->type];
413 static void
414 ReportStatementTooLarge(TokenStream& ts, StmtInfoBCE* topStmt)
416 ts.reportError(JSMSG_NEED_DIET, StatementName(topStmt));
420 * Emit a backpatch op with offset pointing to the previous jump of this type,
421 * so that we can walk back up the chain fixing up the op and jump offset.
423 static ptrdiff_t
424 EmitBackPatchOp(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t* lastp)
426 ptrdiff_t offset, delta;
428 offset = bce->offset();
429 delta = offset - *lastp;
430 *lastp = offset;
431 MOZ_ASSERT(delta > 0);
432 return EmitJump(cx, bce, JSOP_BACKPATCH, delta);
435 static inline unsigned
436 LengthOfSetLine(unsigned line)
438 return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1);
441 /* Updates line number notes, not column notes. */
442 static inline bool
443 UpdateLineNumberNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset)
445 TokenStream* ts = &bce->parser->tokenStream;
446 bool onThisLine;
447 if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine(), &onThisLine))
448 return ts->reportError(JSMSG_OUT_OF_MEMORY);
449 if (!onThisLine) {
450 unsigned line = ts->srcCoords.lineNum(offset);
451 unsigned delta = line - bce->currentLine();
454 * Encode any change in the current source line number by using
455 * either several SRC_NEWLINE notes or just one SRC_SETLINE note,
456 * whichever consumes less space.
458 * NB: We handle backward line number deltas (possible with for
459 * loops where the update part is emitted after the body, but its
460 * line number is <= any line number in the body) here by letting
461 * unsigned delta_ wrap to a very large number, which triggers a
462 * SRC_SETLINE.
464 bce->current->currentLine = line;
465 bce->current->lastColumn = 0;
466 if (delta >= LengthOfSetLine(line)) {
467 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0)
468 return false;
469 } else {
470 do {
471 if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0)
472 return false;
473 } while (--delta != 0);
476 return true;
479 /* Updates the line number and column number information in the source notes. */
480 static bool
481 UpdateSourceCoordNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset)
483 if (!UpdateLineNumberNotes(cx, bce, offset))
484 return false;
486 uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset);
487 ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn);
488 if (colspan != 0) {
489 // If the column span is so large that we can't store it, then just
490 // discard this information. This can happen with minimized or otherwise
491 // machine-generated code. Even gigantic column numbers are still
492 // valuable if you have a source map to relate them to something real;
493 // but it's better to fail soft here.
494 if (!SN_REPRESENTABLE_COLSPAN(colspan))
495 return true;
496 if (NewSrcNote2(cx, bce, SRC_COLSPAN, SN_COLSPAN_TO_OFFSET(colspan)) < 0)
497 return false;
498 bce->current->lastColumn = columnIndex;
500 return true;
503 static ptrdiff_t
504 EmitLoopHead(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* nextpn)
506 if (nextpn) {
508 * Try to give the JSOP_LOOPHEAD the same line number as the next
509 * instruction. nextpn is often a block, in which case the next
510 * instruction typically comes from the first statement inside.
512 MOZ_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
513 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
514 nextpn = nextpn->pn_head;
515 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
516 return -1;
519 return Emit1(cx, bce, JSOP_LOOPHEAD);
522 static bool
523 EmitLoopEntry(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* nextpn)
525 if (nextpn) {
526 /* Update the line number, as for LOOPHEAD. */
527 MOZ_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
528 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
529 nextpn = nextpn->pn_head;
530 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
531 return false;
534 LoopStmtInfo* loop = LoopStmtInfo::fromStmtInfo(bce->topStmt);
535 MOZ_ASSERT(loop->loopDepth > 0);
537 uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr);
538 return Emit2(cx, bce, JSOP_LOOPENTRY, loopDepthAndFlags) >= 0;
542 * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
543 * a type set to store its result.
545 static inline void
546 CheckTypeSet(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
548 if (js_CodeSpec[op].format & JOF_TYPESET) {
549 if (bce->typesetCount < UINT16_MAX)
550 bce->typesetCount++;
555 * Macro to emit a bytecode followed by a uint16_t immediate operand stored in
556 * big-endian order.
558 * NB: We use cx and bce from our caller's lexical environment, and return
559 * false on error.
561 #define EMIT_UINT16_IMM_OP(op, i) \
562 JS_BEGIN_MACRO \
563 if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
564 return false; \
565 CheckTypeSet(cx, bce, op); \
566 JS_END_MACRO
568 static bool
569 FlushPops(ExclusiveContext* cx, BytecodeEmitter* bce, int* npops)
571 MOZ_ASSERT(*npops != 0);
572 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
573 *npops = 0;
574 return true;
577 static bool
578 PopIterator(ExclusiveContext* cx, BytecodeEmitter* bce)
580 if (Emit1(cx, bce, JSOP_ENDITER) < 0)
581 return false;
582 return true;
585 namespace {
587 class NonLocalExitScope {
588 ExclusiveContext* cx;
589 BytecodeEmitter* bce;
590 const uint32_t savedScopeIndex;
591 const int savedDepth;
592 uint32_t openScopeIndex;
594 NonLocalExitScope(const NonLocalExitScope&) = delete;
596 public:
597 explicit NonLocalExitScope(ExclusiveContext* cx_, BytecodeEmitter* bce_)
598 : cx(cx_),
599 bce(bce_),
600 savedScopeIndex(bce->blockScopeList.length()),
601 savedDepth(bce->stackDepth),
602 openScopeIndex(UINT32_MAX) {
603 if (bce->staticScope) {
604 StmtInfoBCE* stmt = bce->topStmt;
605 while (1) {
606 MOZ_ASSERT(stmt);
607 if (stmt->isNestedScope) {
608 openScopeIndex = stmt->blockScopeIndex;
609 break;
611 stmt = stmt->down;
616 ~NonLocalExitScope() {
617 for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++)
618 bce->blockScopeList.recordEnd(n, bce->offset());
619 bce->stackDepth = savedDepth;
622 bool popScopeForNonLocalExit(uint32_t blockScopeIndex) {
623 uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex);
624 uint32_t parent = openScopeIndex;
626 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent))
627 return false;
628 openScopeIndex = bce->blockScopeList.length() - 1;
629 return true;
632 bool prepareForNonLocalJump(StmtInfoBCE* toStmt);
636 * Emit additional bytecode(s) for non-local jumps.
638 bool
639 NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE* toStmt)
641 int npops = 0;
643 #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false
645 for (StmtInfoBCE* stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) {
646 switch (stmt->type) {
647 case STMT_FINALLY:
648 FLUSH_POPS();
649 if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0)
650 return false;
651 break;
653 case STMT_WITH:
654 if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
655 return false;
656 MOZ_ASSERT(stmt->isNestedScope);
657 if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
658 return false;
659 break;
661 case STMT_FOR_OF_LOOP:
662 npops += 2;
663 break;
665 case STMT_FOR_IN_LOOP:
666 /* The iterator and the current value are on the stack. */
667 npops += 1;
668 FLUSH_POPS();
669 if (!PopIterator(cx, bce))
670 return false;
671 break;
673 case STMT_SPREAD:
674 MOZ_ASSERT_UNREACHABLE("can't break/continue/return from inside a spread");
675 break;
677 case STMT_SUBROUTINE:
679 * There's a [exception or hole, retsub pc-index] pair on the
680 * stack that we need to pop.
682 npops += 2;
683 break;
685 default:;
688 if (stmt->isBlockScope) {
689 MOZ_ASSERT(stmt->isNestedScope);
690 StaticBlockObject& blockObj = stmt->staticBlock();
691 if (Emit1(cx, bce, JSOP_DEBUGLEAVEBLOCK) < 0)
692 return false;
693 if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
694 return false;
695 if (blockObj.needsClone()) {
696 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0)
697 return false;
702 FLUSH_POPS();
703 return true;
705 #undef FLUSH_POPS
708 } // anonymous namespace
710 static ptrdiff_t
711 EmitGoto(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* toStmt, ptrdiff_t* lastp,
712 SrcNoteType noteType = SRC_NULL)
714 NonLocalExitScope nle(cx, bce);
716 if (!nle.prepareForNonLocalJump(toStmt))
717 return -1;
719 if (noteType != SRC_NULL) {
720 if (NewSrcNote(cx, bce, noteType) < 0)
721 return -1;
724 return EmitBackPatchOp(cx, bce, lastp);
727 static bool
728 BackPatch(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t last, jsbytecode* target, jsbytecode op)
730 jsbytecode* pc, *stop;
731 ptrdiff_t delta, span;
733 pc = bce->code(last);
734 stop = bce->code(-1);
735 while (pc != stop) {
736 delta = GET_JUMP_OFFSET(pc);
737 span = target - pc;
738 SET_JUMP_OFFSET(pc, span);
739 *pc = op;
740 pc -= delta;
742 return true;
745 #define SET_STATEMENT_TOP(stmt, top) \
746 ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
748 static void
749 PushStatementInner(BytecodeEmitter* bce, StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
751 SET_STATEMENT_TOP(stmt, top);
752 PushStatement(bce, stmt, type);
755 static void
756 PushStatementBCE(BytecodeEmitter* bce, StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
758 PushStatementInner(bce, stmt, type, top);
759 MOZ_ASSERT(!stmt->isLoop());
762 static void
763 PushLoopStatement(BytecodeEmitter* bce, LoopStmtInfo* stmt, StmtType type, ptrdiff_t top)
765 PushStatementInner(bce, stmt, type, top);
766 MOZ_ASSERT(stmt->isLoop());
768 LoopStmtInfo* downLoop = nullptr;
769 for (StmtInfoBCE* outer = stmt->down; outer; outer = outer->down) {
770 if (outer->isLoop()) {
771 downLoop = LoopStmtInfo::fromStmtInfo(outer);
772 break;
776 stmt->stackDepth = bce->stackDepth;
777 stmt->loopDepth = downLoop ? downLoop->loopDepth + 1 : 1;
779 int loopSlots;
780 if (type == STMT_SPREAD)
781 loopSlots = 3;
782 else if (type == STMT_FOR_IN_LOOP || type == STMT_FOR_OF_LOOP)
783 loopSlots = 2;
784 else
785 loopSlots = 0;
787 MOZ_ASSERT(loopSlots <= stmt->stackDepth);
789 if (downLoop)
790 stmt->canIonOsr = (downLoop->canIonOsr &&
791 stmt->stackDepth == downLoop->stackDepth + loopSlots);
792 else
793 stmt->canIonOsr = stmt->stackDepth == loopSlots;
797 * Return the enclosing lexical scope, which is the innermost enclosing static
798 * block object or compiler created function.
800 static JSObject*
801 EnclosingStaticScope(BytecodeEmitter* bce)
803 if (bce->staticScope)
804 return bce->staticScope;
806 if (!bce->sc->isFunctionBox()) {
807 MOZ_ASSERT(!bce->parent);
808 return nullptr;
811 return bce->sc->asFunctionBox()->function();
814 #ifdef DEBUG
815 static bool
816 AllLocalsAliased(StaticBlockObject& obj)
818 for (unsigned i = 0; i < obj.numVariables(); i++)
819 if (!obj.isAliased(i))
820 return false;
821 return true;
823 #endif
825 static bool
826 ComputeAliasedSlots(ExclusiveContext* cx, BytecodeEmitter* bce, Handle<StaticBlockObject*> blockObj)
828 uint32_t numAliased = bce->script->bindings.numAliasedBodyLevelLocals();
830 for (unsigned i = 0; i < blockObj->numVariables(); i++) {
831 Definition* dn = blockObj->definitionParseNode(i);
833 MOZ_ASSERT(dn->isDefn());
835 // blockIndexToLocalIndex returns the frame slot following the unaliased
836 // locals. We add numAliased so that the cookie's slot value comes after
837 // all (aliased and unaliased) body level locals.
838 if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(),
839 numAliased + blockObj->blockIndexToLocalIndex(dn->frameSlot())))
841 return false;
844 #ifdef DEBUG
845 for (ParseNode* pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
846 MOZ_ASSERT(pnu->pn_lexdef == dn);
847 MOZ_ASSERT(!(pnu->pn_dflags & PND_BOUND));
848 MOZ_ASSERT(pnu->pn_cookie.isFree());
850 #endif
852 blockObj->setAliased(i, bce->isAliasedName(dn));
855 MOZ_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj));
857 return true;
860 static bool
861 EmitInternedObjectOp(ExclusiveContext* cx, uint32_t index, JSOp op, BytecodeEmitter* bce);
863 // In a function, block-scoped locals go after the vars, and form part of the
864 // fixed part of a stack frame. Outside a function, there are no fixed vars,
865 // but block-scoped locals still form part of the fixed part of a stack frame
866 // and are thus addressable via GETLOCAL and friends.
867 static void
868 ComputeLocalOffset(ExclusiveContext* cx, BytecodeEmitter* bce, Handle<StaticBlockObject*> blockObj)
870 unsigned nbodyfixed = bce->sc->isFunctionBox()
871 ? bce->script->bindings.numUnaliasedBodyLevelLocals()
872 : 0;
873 unsigned localOffset = nbodyfixed;
875 if (bce->staticScope) {
876 Rooted<NestedScopeObject*> outer(cx, bce->staticScope);
877 for (; outer; outer = outer->enclosingNestedScope()) {
878 if (outer->is<StaticBlockObject>()) {
879 StaticBlockObject& outerBlock = outer->as<StaticBlockObject>();
880 localOffset = outerBlock.localOffset() + outerBlock.numVariables();
881 break;
886 MOZ_ASSERT(localOffset + blockObj->numVariables()
887 <= nbodyfixed + bce->script->bindings.numBlockScoped());
889 blockObj->setLocalOffset(localOffset);
892 // ~ Nested Scopes ~
894 // A nested scope is a region of a compilation unit (function, script, or eval
895 // code) with an additional node on the scope chain. This node may either be a
896 // "with" object or a "block" object. "With" objects represent "with" scopes.
897 // Block objects represent lexical scopes, and contain named block-scoped
898 // bindings, for example "let" bindings or the exception in a catch block.
899 // Those variables may be local and thus accessible directly from the stack, or
900 // "aliased" (accessed by name from nested functions, or dynamically via nested
901 // "eval" or "with") and only accessible through the scope chain.
903 // All nested scopes are present on the "static scope chain". A nested scope
904 // that is a "with" scope will be present on the scope chain at run-time as
905 // well. A block scope may or may not have a corresponding link on the run-time
906 // scope chain; if no variable declared in the block scope is "aliased", then no
907 // scope chain node is allocated.
909 // To help debuggers, the bytecode emitter arranges to record the PC ranges
910 // comprehended by a nested scope, and ultimately attach them to the JSScript.
911 // An element in the "block scope array" specifies the PC range, and links to a
912 // NestedScopeObject in the object list of the script. That scope object is
913 // linked to the previous link in the static scope chain, if any. The static
914 // scope chain at any pre-retire PC can be retrieved using
915 // JSScript::getStaticScope(jsbytecode* pc).
917 // Block scopes store their locals in the fixed part of a stack frame, after the
918 // "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
919 // binding that occurs in a function (as opposed to a script or in eval code).
920 // Only functions have fixed var bindings.
922 // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
923 // block scope, even if the block has no aliased locals. This allows
924 // DebugScopes to invalidate any association between a debugger scope object,
925 // which can proxy access to unaliased stack locals, and the actual live frame.
926 // In normal, non-debug mode, this opcode does not cause any baseline code to be
927 // emitted.
929 // Enter a nested scope with EnterNestedScope. It will emit
930 // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
931 // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks,
932 // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
933 // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE
934 // object, and pass that same object to the corresponding LeaveNestedScope. If
935 // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for
936 // with scopes pass STMT_WITH.
938 static bool
939 EnterNestedScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmt, ObjectBox* objbox,
940 StmtType stmtType)
942 Rooted<NestedScopeObject*> scopeObj(cx, &objbox->object->as<NestedScopeObject>());
943 uint32_t scopeObjectIndex = bce->objectList.add(objbox);
945 switch (stmtType) {
946 case STMT_BLOCK: {
947 Rooted<StaticBlockObject*> blockObj(cx, &scopeObj->as<StaticBlockObject>());
949 ComputeLocalOffset(cx, bce, blockObj);
951 if (!ComputeAliasedSlots(cx, bce, blockObj))
952 return false;
954 if (blockObj->needsClone()) {
955 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_PUSHBLOCKSCOPE, bce))
956 return false;
958 break;
960 case STMT_WITH:
961 MOZ_ASSERT(scopeObj->is<StaticWithObject>());
962 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_ENTERWITH, bce))
963 return false;
964 break;
965 default:
966 MOZ_CRASH("Unexpected scope statement");
969 uint32_t parent = BlockScopeNote::NoBlockScopeIndex;
970 if (StmtInfoBCE* stmt = bce->topScopeStmt) {
971 for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {}
972 parent = stmt->blockScopeIndex;
975 stmt->blockScopeIndex = bce->blockScopeList.length();
976 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent))
977 return false;
979 PushStatementBCE(bce, stmt, stmtType, bce->offset());
980 scopeObj->initEnclosingNestedScope(EnclosingStaticScope(bce));
981 FinishPushNestedScope(bce, stmt, *scopeObj);
982 MOZ_ASSERT(stmt->isNestedScope);
983 stmt->isBlockScope = (stmtType == STMT_BLOCK);
985 return true;
988 // Patches |breaks| and |continues| unless the top statement info record
989 // represents a try-catch-finally suite. May fail if a jump offset overflows.
990 static bool
991 PopStatementBCE(ExclusiveContext* cx, BytecodeEmitter* bce)
993 StmtInfoBCE* stmt = bce->topStmt;
994 if (!stmt->isTrying() &&
995 (!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) ||
996 !BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO)))
998 return false;
1001 FinishPopStatement(bce);
1002 return true;
1005 static bool
1006 LeaveNestedScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmt)
1008 MOZ_ASSERT(stmt == bce->topStmt);
1009 MOZ_ASSERT(stmt->isNestedScope);
1010 MOZ_ASSERT(stmt->isBlockScope == !(stmt->type == STMT_WITH));
1011 uint32_t blockScopeIndex = stmt->blockScopeIndex;
1013 #ifdef DEBUG
1014 MOZ_ASSERT(bce->blockScopeList.list[blockScopeIndex].length == 0);
1015 uint32_t blockObjIndex = bce->blockScopeList.list[blockScopeIndex].index;
1016 ObjectBox* blockObjBox = bce->objectList.find(blockObjIndex);
1017 NestedScopeObject* staticScope = &blockObjBox->object->as<NestedScopeObject>();
1018 MOZ_ASSERT(stmt->staticScope == staticScope);
1019 MOZ_ASSERT(staticScope == bce->staticScope);
1020 MOZ_ASSERT_IF(!stmt->isBlockScope, staticScope->is<StaticWithObject>());
1021 #endif
1023 if (!PopStatementBCE(cx, bce))
1024 return false;
1026 if (Emit1(cx, bce, stmt->isBlockScope ? JSOP_DEBUGLEAVEBLOCK : JSOP_LEAVEWITH) < 0)
1027 return false;
1029 bce->blockScopeList.recordEnd(blockScopeIndex, bce->offset());
1031 if (stmt->isBlockScope && stmt->staticScope->as<StaticBlockObject>().needsClone()) {
1032 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0)
1033 return false;
1036 return true;
1039 static bool
1040 EmitIndex32(ExclusiveContext* cx, JSOp op, uint32_t index, BytecodeEmitter* bce)
1042 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
1043 const size_t len = 1 + UINT32_INDEX_LEN;
1044 MOZ_ASSERT(len == size_t(js_CodeSpec[op].length));
1045 ptrdiff_t offset = EmitCheck(cx, bce, len);
1046 if (offset < 0)
1047 return false;
1049 jsbytecode* code = bce->code(offset);
1050 code[0] = jsbytecode(op);
1051 SET_UINT32_INDEX(code, index);
1052 UpdateDepth(cx, bce, offset);
1053 CheckTypeSet(cx, bce, op);
1054 return true;
1057 static bool
1058 EmitIndexOp(ExclusiveContext* cx, JSOp op, uint32_t index, BytecodeEmitter* bce)
1060 MOZ_ASSERT(CheckStrictOrSloppy(bce, op));
1061 const size_t len = js_CodeSpec[op].length;
1062 MOZ_ASSERT(len >= 1 + UINT32_INDEX_LEN);
1063 ptrdiff_t offset = EmitCheck(cx, bce, len);
1064 if (offset < 0)
1065 return false;
1067 jsbytecode* code = bce->code(offset);
1068 code[0] = jsbytecode(op);
1069 SET_UINT32_INDEX(code, index);
1070 UpdateDepth(cx, bce, offset);
1071 CheckTypeSet(cx, bce, op);
1072 return true;
1075 static bool
1076 EmitAtomOp(ExclusiveContext* cx, JSAtom* atom, JSOp op, BytecodeEmitter* bce)
1078 MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1080 // .generator and .genrval lookups should be emitted as JSOP_GETALIASEDVAR
1081 // instead of JSOP_GETNAME etc, to bypass |with| objects on the scope chain.
1082 MOZ_ASSERT_IF(op == JSOP_GETNAME || op == JSOP_GETGNAME, !bce->sc->isDotVariable(atom));
1084 if (op == JSOP_GETPROP && atom == cx->names().length) {
1085 /* Specialize length accesses for the interpreter. */
1086 op = JSOP_LENGTH;
1089 jsatomid index;
1090 if (!bce->makeAtomIndex(atom, &index))
1091 return false;
1093 return EmitIndexOp(cx, op, index, bce);
1096 static bool
1097 EmitAtomOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
1099 MOZ_ASSERT(pn->pn_atom != nullptr);
1100 return EmitAtomOp(cx, pn->pn_atom, op, bce);
1103 static bool
1104 EmitInternedObjectOp(ExclusiveContext* cx, uint32_t index, JSOp op, BytecodeEmitter* bce)
1106 MOZ_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1107 MOZ_ASSERT(index < bce->objectList.length);
1108 return EmitIndex32(cx, op, index, bce);
1111 static bool
1112 EmitObjectOp(ExclusiveContext* cx, ObjectBox* objbox, JSOp op, BytecodeEmitter* bce)
1114 return EmitInternedObjectOp(cx, bce->objectList.add(objbox), op, bce);
1117 static bool
1118 EmitObjectPairOp(ExclusiveContext* cx, ObjectBox* objbox1, ObjectBox* objbox2, JSOp op,
1119 BytecodeEmitter* bce)
1121 uint32_t index = bce->objectList.add(objbox1);
1122 bce->objectList.add(objbox2);
1123 return EmitInternedObjectOp(cx, index, op, bce);
1126 static bool
1127 EmitRegExp(ExclusiveContext* cx, uint32_t index, BytecodeEmitter* bce)
1129 return EmitIndex32(cx, JSOP_REGEXP, index, bce);
1133 * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are
1134 * not used to unconditionally emit JSOP_GETLOCAL. Variable access should
1135 * instead be emitted using EmitVarOp. In special cases, when the caller
1136 * definitely knows that a given local slot is unaliased, this function may be
1137 * used as a non-asserting version of EMIT_UINT16_IMM_OP.
1139 static bool
1140 EmitLocalOp(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, uint32_t slot)
1142 MOZ_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
1143 MOZ_ASSERT(IsLocalOp(op));
1145 ptrdiff_t off = EmitN(cx, bce, op, LOCALNO_LEN);
1146 if (off < 0)
1147 return false;
1149 SET_LOCALNO(bce->code(off), slot);
1150 return true;
1153 static bool
1154 EmitUnaliasedVarOp(ExclusiveContext* cx, JSOp op, uint32_t slot, MaybeCheckLexical checkLexical,
1155 BytecodeEmitter* bce)
1157 MOZ_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
1159 if (IsLocalOp(op)) {
1160 // Only unaliased locals have stack slots assigned to them. Convert the
1161 // var index (which includes unaliased and aliased locals) to the stack
1162 // slot index.
1163 MOZ_ASSERT(bce->localsToFrameSlots_[slot] <= slot);
1164 slot = bce->localsToFrameSlots_[slot];
1166 if (checkLexical) {
1167 MOZ_ASSERT(op != JSOP_INITLEXICAL);
1168 if (!EmitLocalOp(cx, bce, JSOP_CHECKLEXICAL, slot))
1169 return false;
1172 return EmitLocalOp(cx, bce, op, slot);
1175 MOZ_ASSERT(IsArgOp(op));
1176 ptrdiff_t off = EmitN(cx, bce, op, ARGNO_LEN);
1177 if (off < 0)
1178 return false;
1180 SET_ARGNO(bce->code(off), slot);
1181 return true;
1184 static bool
1185 EmitScopeCoordOp(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, ScopeCoordinate sc)
1187 MOZ_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
1189 unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN;
1190 MOZ_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
1192 ptrdiff_t off = EmitN(cx, bce, op, n);
1193 if (off < 0)
1194 return false;
1196 jsbytecode* pc = bce->code(off);
1197 SET_SCOPECOORD_HOPS(pc, sc.hops());
1198 pc += SCOPECOORD_HOPS_LEN;
1199 SET_SCOPECOORD_SLOT(pc, sc.slot());
1200 pc += SCOPECOORD_SLOT_LEN;
1201 CheckTypeSet(cx, bce, op);
1202 return true;
1205 static bool
1206 EmitAliasedVarOp(ExclusiveContext* cx, JSOp op, ScopeCoordinate sc, MaybeCheckLexical checkLexical,
1207 BytecodeEmitter* bce)
1209 if (checkLexical) {
1210 MOZ_ASSERT(op != JSOP_INITALIASEDLEXICAL);
1211 if (!EmitScopeCoordOp(cx, bce, JSOP_CHECKALIASEDLEXICAL, sc))
1212 return false;
1215 return EmitScopeCoordOp(cx, bce, op, sc);
1218 // Compute the number of nested scope objects that will actually be on the scope
1219 // chain at runtime, given the BCE's current staticScope.
1220 static unsigned
1221 DynamicNestedScopeDepth(BytecodeEmitter* bce)
1223 unsigned depth = 0;
1224 for (NestedScopeObject* b = bce->staticScope; b; b = b->enclosingNestedScope()) {
1225 if (!b->is<StaticBlockObject>() || b->as<StaticBlockObject>().needsClone())
1226 ++depth;
1229 return depth;
1232 static bool
1233 LookupAliasedName(BytecodeEmitter* bce, HandleScript script, PropertyName* name, uint32_t* pslot,
1234 ParseNode* pn = nullptr)
1236 LazyScript::FreeVariable* freeVariables = nullptr;
1237 uint32_t lexicalBegin = 0;
1238 uint32_t numFreeVariables = 0;
1239 if (bce->emitterMode == BytecodeEmitter::LazyFunction) {
1240 freeVariables = bce->lazyScript->freeVariables();
1241 lexicalBegin = script->bindings.lexicalBegin();
1242 numFreeVariables = bce->lazyScript->numFreeVariables();
1246 * Beware: BindingIter may contain more than one Binding for a given name
1247 * (in the case of |function f(x,x) {}|) but only one will be aliased.
1249 uint32_t bindingIndex = 0;
1250 uint32_t slot = CallObject::RESERVED_SLOTS;
1251 for (BindingIter bi(script); !bi.done(); bi++) {
1252 if (bi->aliased()) {
1253 if (bi->name() == name) {
1254 // Check if the free variable from a lazy script was marked as
1255 // a possible hoisted use and is a lexical binding. If so,
1256 // mark it as such so we emit a dead zone check.
1257 if (freeVariables) {
1258 for (uint32_t i = 0; i < numFreeVariables; i++) {
1259 if (freeVariables[i].atom() == name) {
1260 if (freeVariables[i].isHoistedUse() && bindingIndex >= lexicalBegin) {
1261 MOZ_ASSERT(pn);
1262 MOZ_ASSERT(pn->isUsed());
1263 pn->pn_dflags |= PND_LEXICAL;
1266 break;
1271 *pslot = slot;
1272 return true;
1274 slot++;
1276 bindingIndex++;
1278 return false;
1281 static bool
1282 LookupAliasedNameSlot(BytecodeEmitter* bce, HandleScript script, PropertyName* name,
1283 ScopeCoordinate* sc)
1285 uint32_t slot;
1286 if (!LookupAliasedName(bce, script, name, &slot))
1287 return false;
1289 sc->setSlot(slot);
1290 return true;
1294 * Use this function instead of assigning directly to 'hops' to guard for
1295 * uint8_t overflows.
1297 static bool
1298 AssignHops(BytecodeEmitter* bce, ParseNode* pn, unsigned src, ScopeCoordinate* dst)
1300 if (src > UINT8_MAX) {
1301 bce->reportError(pn, JSMSG_TOO_DEEP, js_function_str);
1302 return false;
1305 dst->setHops(src);
1306 return true;
1309 static inline MaybeCheckLexical
1310 NodeNeedsCheckLexical(ParseNode* pn)
1312 return pn->isHoistedLexicalUse() ? CheckLexical : DontCheckLexical;
1315 static bool
1316 EmitAliasedVarOp(ExclusiveContext* cx, JSOp op, ParseNode* pn, BytecodeEmitter* bce)
1319 * While pn->pn_cookie tells us how many function scopes are between the use and the def this
1320 * is not the same as how many hops up the dynamic scope chain are needed. In particular:
1321 * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic
1322 * scope object).
1323 * - a heavyweight named function scope contributes an extra scope to the scope chain (a
1324 * DeclEnvObject that holds just the name).
1325 * - all the intervening let/catch blocks must be counted.
1327 unsigned skippedScopes = 0;
1328 BytecodeEmitter* bceOfDef = bce;
1329 if (pn->isUsed()) {
1331 * As explained in BindNameToSlot, the 'level' of a use indicates how
1332 * many function scopes (i.e., BytecodeEmitters) to skip to find the
1333 * enclosing function scope of the definition being accessed.
1335 for (unsigned i = pn->pn_cookie.level(); i; i--) {
1336 skippedScopes += DynamicNestedScopeDepth(bceOfDef);
1337 FunctionBox* funbox = bceOfDef->sc->asFunctionBox();
1338 if (funbox->isHeavyweight()) {
1339 skippedScopes++;
1340 if (funbox->function()->isNamedLambda())
1341 skippedScopes++;
1343 bceOfDef = bceOfDef->parent;
1345 } else {
1346 MOZ_ASSERT(pn->isDefn());
1347 MOZ_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel());
1351 * The final part of the skippedScopes computation depends on the type of
1352 * variable. An arg or local variable is at the outer scope of a function
1353 * and so includes the full DynamicNestedScopeDepth. A let/catch-binding
1354 * requires a search of the block chain to see how many (dynamic) block
1355 * objects to skip.
1357 ScopeCoordinate sc;
1358 if (IsArgOp(pn->getOp())) {
1359 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc))
1360 return false;
1361 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef, bceOfDef->script, pn->name(), &sc));
1362 } else {
1363 MOZ_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION));
1364 uint32_t local = pn->pn_cookie.slot();
1365 if (local < bceOfDef->script->bindings.numBodyLevelLocals()) {
1366 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc))
1367 return false;
1368 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef, bceOfDef->script, pn->name(), &sc));
1369 } else {
1370 MOZ_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals());
1371 MOZ_ASSERT(bceOfDef->staticScope->is<StaticBlockObject>());
1372 Rooted<StaticBlockObject*> b(cx, &bceOfDef->staticScope->as<StaticBlockObject>());
1373 local = bceOfDef->localsToFrameSlots_[local];
1374 while (local < b->localOffset()) {
1375 if (b->needsClone())
1376 skippedScopes++;
1377 b = &b->enclosingNestedScope()->as<StaticBlockObject>();
1379 if (!AssignHops(bce, pn, skippedScopes, &sc))
1380 return false;
1381 sc.setSlot(b->localIndexToSlot(local));
1385 return EmitAliasedVarOp(cx, op, sc, NodeNeedsCheckLexical(pn), bce);
1388 static bool
1389 EmitVarOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
1391 MOZ_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME));
1392 MOZ_ASSERT(!pn->pn_cookie.isFree());
1394 if (IsAliasedVarOp(op)) {
1395 ScopeCoordinate sc;
1396 sc.setHops(pn->pn_cookie.level());
1397 sc.setSlot(pn->pn_cookie.slot());
1398 return EmitAliasedVarOp(cx, op, sc, NodeNeedsCheckLexical(pn), bce);
1401 MOZ_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op));
1403 if (!bce->isAliasedName(pn)) {
1404 MOZ_ASSERT(pn->isUsed() || pn->isDefn());
1405 MOZ_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0);
1406 MOZ_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel());
1407 return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), NodeNeedsCheckLexical(pn), bce);
1410 switch (op) {
1411 case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break;
1412 case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break;
1413 case JSOP_INITLEXICAL: op = JSOP_INITALIASEDLEXICAL; break;
1414 default: MOZ_CRASH("unexpected var op");
1417 return EmitAliasedVarOp(cx, op, pn, bce);
1420 static JSOp
1421 GetIncDecInfo(ParseNodeKind kind, bool* post)
1423 MOZ_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT ||
1424 kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT);
1425 *post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT;
1426 return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB;
1429 static bool
1430 EmitVarIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
1432 JSOp op = pn->pn_kid->getOp();
1433 MOZ_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op));
1434 MOZ_ASSERT(pn->pn_kid->isKind(PNK_NAME));
1435 MOZ_ASSERT(!pn->pn_kid->pn_cookie.isFree());
1437 bool post;
1438 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
1440 JSOp getOp, setOp;
1441 if (IsLocalOp(op)) {
1442 getOp = JSOP_GETLOCAL;
1443 setOp = JSOP_SETLOCAL;
1444 } else if (IsArgOp(op)) {
1445 getOp = JSOP_GETARG;
1446 setOp = JSOP_SETARG;
1447 } else {
1448 getOp = JSOP_GETALIASEDVAR;
1449 setOp = JSOP_SETALIASEDVAR;
1452 if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V
1453 return false;
1454 if (Emit1(cx, bce, JSOP_POS) < 0) // N
1455 return false;
1456 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N
1457 return false;
1458 if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1
1459 return false;
1460 if (Emit1(cx, bce, binop) < 0) // N? N+1
1461 return false;
1462 if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1
1463 return false;
1464 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
1465 return false;
1467 return true;
1470 bool
1471 BytecodeEmitter::isAliasedName(ParseNode* pn)
1473 Definition* dn = pn->resolve();
1474 MOZ_ASSERT(dn->isDefn());
1475 MOZ_ASSERT(!dn->isPlaceholder());
1476 MOZ_ASSERT(dn->isBound());
1478 /* If dn is in an enclosing function, it is definitely aliased. */
1479 if (dn->pn_cookie.level() != script->staticLevel())
1480 return true;
1482 switch (dn->kind()) {
1483 case Definition::LET:
1484 case Definition::CONST:
1486 * There are two ways to alias a let variable: nested functions and
1487 * dynamic scope operations. (This is overly conservative since the
1488 * bindingsAccessedDynamically flag, checked by allLocalsAliased, is
1489 * function-wide.)
1491 * In addition all locals in generators are marked as aliased, to ensure
1492 * that they are allocated on scope chains instead of on the stack. See
1493 * the definition of SharedContext::allLocalsAliased.
1495 return dn->isClosed() || sc->allLocalsAliased();
1496 case Definition::ARG:
1498 * Consult the bindings, since they already record aliasing. We might
1499 * be tempted to use the same definition as VAR/CONST/LET, but there is
1500 * a problem caused by duplicate arguments: only the last argument with
1501 * a given name is aliased. This is necessary to avoid generating a
1502 * shape for the call object with with more than one name for a given
1503 * slot (which violates internal engine invariants). All this means that
1504 * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
1505 * mark both parameters in function(x,x) as aliased.
1507 return script->formalIsAliased(pn->pn_cookie.slot());
1508 case Definition::VAR:
1509 case Definition::GLOBALCONST:
1510 MOZ_ASSERT_IF(sc->allLocalsAliased(), script->cookieIsAliased(pn->pn_cookie));
1511 return script->cookieIsAliased(pn->pn_cookie);
1512 case Definition::PLACEHOLDER:
1513 case Definition::NAMED_LAMBDA:
1514 case Definition::MISSING:
1515 MOZ_CRASH("unexpected dn->kind");
1517 return false;
1520 static JSOp
1521 StrictifySetNameOp(JSOp op, BytecodeEmitter* bce)
1523 switch (op) {
1524 case JSOP_SETNAME:
1525 if (bce->sc->strict)
1526 op = JSOP_STRICTSETNAME;
1527 break;
1528 case JSOP_SETGNAME:
1529 if (bce->sc->strict)
1530 op = JSOP_STRICTSETGNAME;
1531 break;
1532 default:;
1534 return op;
1537 static void
1538 StrictifySetNameNode(ParseNode* pn, BytecodeEmitter* bce)
1540 pn->setOp(StrictifySetNameOp(pn->getOp(), bce));
1544 * Try to convert a *NAME op with a free name to a more specialized GNAME,
1545 * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
1546 * Return true if a conversion was made.
1548 static bool
1549 TryConvertFreeName(BytecodeEmitter* bce, ParseNode* pn)
1552 * In self-hosting mode, JSOP_*NAME is unconditionally converted to
1553 * JSOP_*INTRINSIC. This causes lookups to be redirected to the special
1554 * intrinsics holder in the global object, into which any missing values are
1555 * cloned lazily upon first access.
1557 if (bce->emitterMode == BytecodeEmitter::SelfHosting) {
1558 JSOp op;
1559 switch (pn->getOp()) {
1560 case JSOP_GETNAME: op = JSOP_GETINTRINSIC; break;
1561 case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break;
1562 /* Other *NAME ops aren't (yet) supported in self-hosted code. */
1563 default: MOZ_CRASH("intrinsic");
1565 pn->setOp(op);
1566 return true;
1570 * When parsing inner functions lazily, parse nodes for outer functions no
1571 * longer exist and only the function's scope chain is available for
1572 * resolving upvar accesses within the inner function.
1574 if (bce->emitterMode == BytecodeEmitter::LazyFunction) {
1575 // The only statements within a lazy function which can push lexical
1576 // scopes are try/catch blocks. Use generic ops in this case.
1577 for (StmtInfoBCE* stmt = bce->topStmt; stmt; stmt = stmt->down) {
1578 if (stmt->type == STMT_CATCH)
1579 return true;
1582 size_t hops = 0;
1583 FunctionBox* funbox = bce->sc->asFunctionBox();
1584 if (funbox->hasExtensibleScope())
1585 return false;
1586 if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom)
1587 return false;
1588 if (funbox->isHeavyweight()) {
1589 hops++;
1590 if (funbox->function()->isNamedLambda())
1591 hops++;
1593 if (bce->script->directlyInsideEval())
1594 return false;
1595 RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope());
1596 for (StaticScopeIter<CanGC> ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) {
1597 if (ssi.type() != StaticScopeIter<CanGC>::FUNCTION) {
1598 if (ssi.type() == StaticScopeIter<CanGC>::BLOCK) {
1599 // Use generic ops if a catch block is encountered.
1600 return false;
1602 if (ssi.hasDynamicScopeObject())
1603 hops++;
1604 continue;
1606 RootedScript script(bce->sc->context, ssi.funScript());
1607 if (script->functionNonDelazifying()->atom() == pn->pn_atom)
1608 return false;
1609 if (ssi.hasDynamicScopeObject()) {
1610 uint32_t slot;
1611 if (LookupAliasedName(bce, script, pn->pn_atom->asPropertyName(), &slot, pn)) {
1612 JSOp op;
1613 switch (pn->getOp()) {
1614 case JSOP_GETNAME: op = JSOP_GETALIASEDVAR; break;
1615 case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break;
1616 default: return false;
1619 pn->setOp(op);
1620 JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->parser->tokenStream, hops, slot));
1621 return true;
1623 hops++;
1626 if (script->funHasExtensibleScope() || script->directlyInsideEval())
1627 return false;
1631 // Unbound names aren't recognizable global-property references if the
1632 // script isn't running against its global object.
1633 if (!bce->script->compileAndGo() || !bce->hasGlobalScope)
1634 return false;
1636 // Deoptimized names also aren't necessarily globals.
1637 if (pn->isDeoptimized())
1638 return false;
1640 if (bce->sc->isFunctionBox()) {
1641 // Unbound names in function code may not be globals if new locals can
1642 // be added to this function (or an enclosing one) to alias a global
1643 // reference.
1644 FunctionBox* funbox = bce->sc->asFunctionBox();
1645 if (funbox->mightAliasLocals())
1646 return false;
1649 // If this is eval code, being evaluated inside strict mode eval code,
1650 // an "unbound" name might be a binding local to that outer eval:
1652 // var x = "GLOBAL";
1653 // eval('"use strict"; ' +
1654 // 'var x; ' +
1655 // 'eval("print(x)");'); // "undefined", not "GLOBAL"
1657 // Given the enclosing eval code's strictness and its bindings (neither is
1658 // readily available now), we could exactly check global-ness, but it's not
1659 // worth the trouble for doubly-nested eval code. So we conservatively
1660 // approximate. If the outer eval code is strict, then this eval code will
1661 // be: thus, don't optimize if we're compiling strict code inside an eval.
1662 if (bce->insideEval && bce->sc->strict)
1663 return false;
1665 JSOp op;
1666 switch (pn->getOp()) {
1667 case JSOP_GETNAME: op = JSOP_GETGNAME; break;
1668 case JSOP_SETNAME: op = StrictifySetNameOp(JSOP_SETGNAME, bce); break;
1669 case JSOP_SETCONST:
1670 // Not supported.
1671 return false;
1672 default: MOZ_CRASH("gname");
1674 pn->setOp(op);
1675 return true;
1679 * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
1680 * loads and stores, given the compile-time information in bce and a PNK_NAME
1681 * node pn. It returns false on error, true on success.
1683 * The caller can test pn->pn_cookie.isFree() to tell whether optimization
1684 * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If
1685 * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
1686 * been optimized, e.g., from JSOP_GETNAME to JSOP_CALLEE. Whether or not
1687 * pn->pn_op was modified, if this function finds an argument or local variable
1688 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1689 * successful return.
1691 * NB: if you add more opcodes specialized from JSOP_GETNAME, etc., don't forget
1692 * to update the special cases in EmitFor (for-in) and EmitAssignment (= and
1693 * op=, e.g. +=).
1695 static bool
1696 BindNameToSlotHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
1698 MOZ_ASSERT(pn->isKind(PNK_NAME));
1700 MOZ_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound());
1702 /* Don't attempt if 'pn' is already bound or deoptimized or a function. */
1703 if (pn->isBound() || pn->isDeoptimized())
1704 return true;
1706 /* JSOP_CALLEE is pre-bound by definition. */
1707 JSOp op = pn->getOp();
1708 MOZ_ASSERT(op != JSOP_CALLEE);
1709 MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1712 * The parser already linked name uses to definitions when (where not
1713 * prevented by non-lexical constructs like 'with' and 'eval').
1715 Definition* dn;
1716 if (pn->isUsed()) {
1717 MOZ_ASSERT(pn->pn_cookie.isFree());
1718 dn = pn->pn_lexdef;
1719 MOZ_ASSERT(dn->isDefn());
1720 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1721 } else if (pn->isDefn()) {
1722 dn = (Definition*) pn;
1723 } else {
1724 return true;
1727 // Throw an error on attempts to mutate const-declared bindings.
1728 switch (op) {
1729 case JSOP_GETNAME:
1730 case JSOP_SETCONST:
1731 break;
1732 default:
1733 if (pn->isConst()) {
1734 JSAutoByteString name;
1735 if (!AtomToPrintableString(cx, pn->pn_atom, &name))
1736 return false;
1737 bce->reportError(pn, JSMSG_BAD_CONST_ASSIGN, name.ptr());
1738 return false;
1742 if (dn->pn_cookie.isFree()) {
1743 if (HandleScript caller = bce->evalCaller) {
1744 MOZ_ASSERT(bce->script->compileAndGo());
1747 * Don't generate upvars on the left side of a for loop. See
1748 * bug 470758.
1750 if (bce->emittingForInit)
1751 return true;
1754 * If this is an eval in the global scope, then unbound variables
1755 * must be globals, so try to use GNAME ops.
1757 if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) {
1758 pn->pn_dflags |= PND_BOUND;
1759 return true;
1763 * Out of tricks, so we must rely on PICs to optimize named
1764 * accesses from direct eval called from function code.
1766 return true;
1769 /* Optimize accesses to undeclared globals. */
1770 if (!TryConvertFreeName(bce, pn))
1771 return true;
1773 pn->pn_dflags |= PND_BOUND;
1774 return true;
1778 * At this point, we are only dealing with uses that have already been
1779 * bound to definitions via pn_lexdef. The rest of this routine converts
1780 * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG
1781 * op. This requires setting the node's pn_cookie with a pair (level, slot)
1782 * where 'level' is the number of function scopes between the use and the
1783 * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL
1784 * op. For example, in this code:
1786 * function(a,b,x) { return x }
1787 * function(y) { function() { return y } }
1789 * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0).
1791 MOZ_ASSERT(!pn->isDefn());
1792 MOZ_ASSERT(pn->isUsed());
1793 MOZ_ASSERT(pn->pn_lexdef);
1794 MOZ_ASSERT(pn->pn_cookie.isFree());
1797 * We are compiling a function body and may be able to optimize name
1798 * to stack slot. Look for an argument or variable in the function and
1799 * rewrite pn_op and update pn accordingly.
1801 switch (dn->kind()) {
1802 case Definition::ARG:
1803 switch (op) {
1804 case JSOP_GETNAME:
1805 op = JSOP_GETARG; break;
1806 case JSOP_SETNAME:
1807 case JSOP_STRICTSETNAME:
1808 op = JSOP_SETARG; break;
1809 default: MOZ_CRASH("arg");
1811 MOZ_ASSERT(!pn->isConst());
1812 break;
1814 case Definition::VAR:
1815 case Definition::GLOBALCONST:
1816 case Definition::CONST:
1817 case Definition::LET:
1818 switch (op) {
1819 case JSOP_GETNAME:
1820 op = JSOP_GETLOCAL; break;
1821 case JSOP_SETNAME:
1822 case JSOP_STRICTSETNAME:
1823 op = JSOP_SETLOCAL; break;
1824 case JSOP_SETCONST:
1825 op = JSOP_SETLOCAL; break;
1826 default: MOZ_CRASH("local");
1828 break;
1830 case Definition::NAMED_LAMBDA: {
1831 MOZ_ASSERT(dn->isOp(JSOP_CALLEE));
1832 MOZ_ASSERT(op != JSOP_CALLEE);
1835 * Currently, the ALIASEDVAR ops do not support accessing the
1836 * callee of a DeclEnvObject, so use NAME.
1838 if (dn->pn_cookie.level() != bce->script->staticLevel())
1839 return true;
1841 DebugOnly<JSFunction*> fun = bce->sc->asFunctionBox()->function();
1842 MOZ_ASSERT(fun->isLambda());
1843 MOZ_ASSERT(pn->pn_atom == fun->atom());
1846 * Leave pn->isOp(JSOP_GETNAME) if bce->fun is heavyweight to
1847 * address two cases: a new binding introduced by eval, and
1848 * assignment to the name in strict mode.
1850 * var fun = (function f(s) { eval(s); return f; });
1851 * assertEq(fun("var f = 42"), 42);
1853 * ECMAScript specifies that a function expression's name is bound
1854 * in a lexical environment distinct from that used to bind its
1855 * named parameters, the arguments object, and its variables. The
1856 * new binding for "var f = 42" shadows the binding for the
1857 * function itself, so the name of the function will not refer to
1858 * the function.
1860 * (function f() { "use strict"; f = 12; })();
1862 * Outside strict mode, assignment to a function expression's name
1863 * has no effect. But in strict mode, this attempt to mutate an
1864 * immutable binding must throw a TypeError. We implement this by
1865 * not optimizing such assignments and by marking such functions as
1866 * heavyweight, ensuring that the function name is represented in
1867 * the scope chain so that assignment will throw a TypeError.
1869 if (!bce->sc->asFunctionBox()->isHeavyweight()) {
1870 op = JSOP_CALLEE;
1871 pn->pn_dflags |= PND_CONST;
1874 pn->setOp(op);
1875 pn->pn_dflags |= PND_BOUND;
1876 return true;
1879 case Definition::PLACEHOLDER:
1880 return true;
1882 case Definition::MISSING:
1883 MOZ_CRASH("missing");
1887 * The difference between the current static level and the static level of
1888 * the definition is the number of function scopes between the current
1889 * scope and dn's scope.
1891 unsigned skip = bce->script->staticLevel() - dn->pn_cookie.level();
1892 MOZ_ASSERT_IF(skip, dn->isClosed());
1895 * Explicitly disallow accessing var/let bindings in global scope from
1896 * nested functions. The reason for this limitation is that, since the
1897 * global script is not included in the static scope chain (1. because it
1898 * has no object to stand in the static scope chain, 2. to minimize memory
1899 * bloat where a single live function keeps its whole global script
1900 * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
1901 * associated types::TypeSet.
1903 if (skip) {
1904 BytecodeEmitter* bceSkipped = bce;
1905 for (unsigned i = 0; i < skip; i++)
1906 bceSkipped = bceSkipped->parent;
1907 if (!bceSkipped->sc->isFunctionBox())
1908 return true;
1911 MOZ_ASSERT(!pn->isOp(op));
1912 pn->setOp(op);
1913 if (!pn->pn_cookie.set(bce->parser->tokenStream, skip, dn->pn_cookie.slot()))
1914 return false;
1916 pn->pn_dflags |= PND_BOUND;
1917 return true;
1921 * Attempts to bind the name, then checks that no dynamic scope lookup ops are
1922 * emitted in self-hosting mode. NAME ops do lookups off current scope chain,
1923 * and we do not want to allow self-hosted code to use the dynamic scope.
1925 static bool
1926 BindNameToSlot(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
1928 if (!BindNameToSlotHelper(cx, bce, pn))
1929 return false;
1931 StrictifySetNameNode(pn, bce);
1933 if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) {
1934 bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME);
1935 return false;
1938 return true;
1942 * If pn contains a useful expression, return true with *answer set to true.
1943 * If pn contains a useless expression, return true with *answer set to false.
1944 * Return false on error.
1946 * The caller should initialize *answer to false and invoke this function on
1947 * an expression statement or similar subtree to decide whether the tree could
1948 * produce code that has any side effects. For an expression statement, we
1949 * define useless code as code with no side effects, because the main effect,
1950 * the value left on the stack after the code executes, will be discarded by a
1951 * pop bytecode.
1953 static bool
1954 CheckSideEffects(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool* answer)
1956 if (!pn || *answer)
1957 return true;
1959 switch (pn->getArity()) {
1960 case PN_CODE:
1962 * A named function, contrary to ES3, is no longer useful, because we
1963 * bind its name lexically (using JSOP_CALLEE) instead of creating an
1964 * Object instance and binding a readonly, permanent property in it
1965 * (the object and binding can be detected and hijacked or captured).
1966 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
1968 MOZ_ASSERT(*answer == false);
1969 return true;
1971 case PN_LIST:
1972 if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) ||
1973 pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) {
1975 * Non-operators along with ||, &&, ===, and !== never invoke
1976 * toString or valueOf.
1978 bool ok = true;
1979 for (ParseNode* pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
1980 ok &= CheckSideEffects(cx, bce, pn2, answer);
1981 return ok;
1984 if (pn->isKind(PNK_GENEXP)) {
1985 /* Generator-expressions are harmless if the result is ignored. */
1986 MOZ_ASSERT(*answer == false);
1987 return true;
1991 * All invocation operations (construct: PNK_NEW, call: PNK_CALL)
1992 * are presumed to be useful, because they may have side effects
1993 * even if their main effect (their return value) is discarded.
1995 * PNK_ELEM binary trees of 3+ nodes are flattened into lists to
1996 * avoid too much recursion. All such lists must be presumed to be
1997 * useful because each index operation could invoke a getter.
1999 * Likewise, array and object initialisers may call prototype
2000 * setters (the __defineSetter__ built-in, and writable __proto__
2001 * on Array.prototype create this hazard). Initialiser list nodes
2002 * have JSOP_NEWINIT in their pn_op.
2004 *answer = true;
2005 return true;
2007 case PN_TERNARY:
2008 return CheckSideEffects(cx, bce, pn->pn_kid1, answer) &&
2009 CheckSideEffects(cx, bce, pn->pn_kid2, answer) &&
2010 CheckSideEffects(cx, bce, pn->pn_kid3, answer);
2012 case PN_BINARY:
2013 case PN_BINARY_OBJ:
2014 if (pn->isAssignment()) {
2016 * Assignment is presumed to be useful, even if the next operation
2017 * is another assignment overwriting this one's ostensible effect,
2018 * because the left operand may be a property with a setter that
2019 * has side effects.
2021 * The only exception is assignment of a useless value to a const
2022 * declared in the function currently being compiled.
2024 ParseNode* pn2 = pn->pn_left;
2025 if (!pn2->isKind(PNK_NAME)) {
2026 *answer = true;
2027 } else {
2028 if (!BindNameToSlot(cx, bce, pn2))
2029 return false;
2030 if (!CheckSideEffects(cx, bce, pn->pn_right, answer))
2031 return false;
2032 if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst()))
2033 *answer = true;
2035 return true;
2038 if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) ||
2039 pn->isOp(JSOP_STRICTNE)) {
2041 * ||, &&, ===, and !== do not convert their operands via
2042 * toString or valueOf method calls.
2044 return CheckSideEffects(cx, bce, pn->pn_left, answer) &&
2045 CheckSideEffects(cx, bce, pn->pn_right, answer);
2049 * We can't easily prove that neither operand ever denotes an
2050 * object with a toString or valueOf method.
2052 *answer = true;
2053 return true;
2055 case PN_UNARY:
2056 switch (pn->getKind()) {
2057 case PNK_DELETE:
2059 ParseNode* pn2 = pn->pn_kid;
2060 switch (pn2->getKind()) {
2061 case PNK_NAME:
2062 if (!BindNameToSlot(cx, bce, pn2))
2063 return false;
2064 if (pn2->isConst()) {
2065 MOZ_ASSERT(*answer == false);
2066 return true;
2068 /* FALL THROUGH */
2069 case PNK_DOT:
2070 case PNK_CALL:
2071 case PNK_ELEM:
2072 /* All these delete addressing modes have effects too. */
2073 *answer = true;
2074 return true;
2075 default:
2076 return CheckSideEffects(cx, bce, pn2, answer);
2078 MOZ_CRASH("We have a returning default case");
2081 case PNK_TYPEOF:
2082 case PNK_VOID:
2083 case PNK_NOT:
2084 case PNK_BITNOT:
2085 if (pn->isOp(JSOP_NOT)) {
2086 /* ! does not convert its operand via toString or valueOf. */
2087 return CheckSideEffects(cx, bce, pn->pn_kid, answer);
2089 /* FALL THROUGH */
2091 default:
2093 * All of PNK_INC, PNK_DEC and PNK_THROW have direct effects. Of
2094 * the remaining unary-arity node types, we can't easily prove that
2095 * the operand never denotes an object with a toString or valueOf
2096 * method.
2098 *answer = true;
2099 return true;
2101 MOZ_CRASH("We have a returning default case");
2103 case PN_NAME:
2105 * Take care to avoid trying to bind a label name (labels, both for
2106 * statements and property values in object initialisers, have pn_op
2107 * defaulted to JSOP_NOP).
2109 if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) {
2110 if (!BindNameToSlot(cx, bce, pn))
2111 return false;
2112 if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) {
2114 * Not a use of an unshadowed named function expression's given
2115 * name, so this expression could invoke a getter that has side
2116 * effects.
2118 *answer = true;
2122 if (pn->isHoistedLexicalUse()) {
2123 // Hoisted uses of lexical bindings throw on access.
2124 *answer = true;
2127 if (pn->isKind(PNK_DOT)) {
2128 /* Dotted property references in general can call getters. */
2129 *answer = true;
2131 return CheckSideEffects(cx, bce, pn->maybeExpr(), answer);
2133 case PN_NULLARY:
2134 if (pn->isKind(PNK_DEBUGGER))
2135 *answer = true;
2136 return true;
2138 return true;
2141 bool
2142 BytecodeEmitter::isInLoop()
2144 for (StmtInfoBCE* stmt = topStmt; stmt; stmt = stmt->down) {
2145 if (stmt->isLoop())
2146 return true;
2148 return false;
2151 bool
2152 BytecodeEmitter::checkSingletonContext()
2154 if (!script->compileAndGo() || sc->isFunctionBox() || isInLoop())
2155 return false;
2156 hasSingletons = true;
2157 return true;
2160 bool
2161 BytecodeEmitter::needsImplicitThis()
2163 if (!script->compileAndGo())
2164 return true;
2166 if (sc->isFunctionBox()) {
2167 if (sc->asFunctionBox()->inWith)
2168 return true;
2169 } else {
2170 JSObject* scope = sc->asGlobalSharedContext()->scopeChain();
2171 while (scope) {
2172 if (scope->is<DynamicWithObject>())
2173 return true;
2174 scope = scope->enclosingScope();
2178 for (StmtInfoBCE* stmt = topStmt; stmt; stmt = stmt->down) {
2179 if (stmt->type == STMT_WITH)
2180 return true;
2182 return false;
2185 void
2186 BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext* cx)
2188 // Note: when parsing off thread the resulting scripts need to be handed to
2189 // the debugger after rejoining to the main thread.
2190 if (!cx->isJSContext())
2191 return;
2193 // Lazy scripts are never top level (despite always being invoked with a
2194 // nullptr parent), and so the hook should never be fired.
2195 if (emitterMode != LazyFunction && !parent) {
2196 GlobalObject* compileAndGoGlobal = nullptr;
2197 if (script->compileAndGo())
2198 compileAndGoGlobal = &script->global();
2199 Debugger::onNewScript(cx->asJSContext(), script, compileAndGoGlobal);
2203 inline TokenStream*
2204 BytecodeEmitter::tokenStream()
2206 return &parser->tokenStream;
2209 bool
2210 BytecodeEmitter::reportError(ParseNode* pn, unsigned errorNumber, ...)
2212 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2214 va_list args;
2215 va_start(args, errorNumber);
2216 bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR,
2217 errorNumber, args);
2218 va_end(args);
2219 return result;
2222 bool
2223 BytecodeEmitter::reportStrictWarning(ParseNode* pn, unsigned errorNumber, ...)
2225 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2227 va_list args;
2228 va_start(args, errorNumber);
2229 bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args);
2230 va_end(args);
2231 return result;
2234 bool
2235 BytecodeEmitter::reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...)
2237 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2239 va_list args;
2240 va_start(args, errorNumber);
2241 bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict,
2242 errorNumber, args);
2243 va_end(args);
2244 return result;
2247 static bool
2248 EmitNewInit(ExclusiveContext* cx, BytecodeEmitter* bce, JSProtoKey key)
2250 const size_t len = 1 + UINT32_INDEX_LEN;
2251 ptrdiff_t offset = EmitCheck(cx, bce, len);
2252 if (offset < 0)
2253 return false;
2255 jsbytecode* code = bce->code(offset);
2256 code[0] = JSOP_NEWINIT;
2257 code[1] = jsbytecode(key);
2258 code[2] = 0;
2259 code[3] = 0;
2260 code[4] = 0;
2261 UpdateDepth(cx, bce, offset);
2262 CheckTypeSet(cx, bce, JSOP_NEWINIT);
2263 return true;
2266 static bool
2267 IteratorResultShape(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned* shape)
2269 MOZ_ASSERT(bce->script->compileAndGo());
2271 RootedPlainObject obj(cx);
2272 gc::AllocKind kind = GuessObjectGCKind(2);
2273 obj = NewBuiltinClassInstance<PlainObject>(cx, kind);
2274 if (!obj)
2275 return false;
2277 Rooted<jsid> value_id(cx, AtomToId(cx->names().value));
2278 Rooted<jsid> done_id(cx, AtomToId(cx->names().done));
2279 if (!DefineNativeProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr,
2280 JSPROP_ENUMERATE))
2282 return false;
2284 if (!DefineNativeProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr,
2285 JSPROP_ENUMERATE))
2287 return false;
2290 ObjectBox* objbox = bce->parser->newObjectBox(obj);
2291 if (!objbox)
2292 return false;
2294 *shape = bce->objectList.add(objbox);
2296 return true;
2299 static bool
2300 EmitPrepareIteratorResult(ExclusiveContext* cx, BytecodeEmitter* bce)
2302 if (bce->script->compileAndGo()) {
2303 unsigned shape;
2304 if (!IteratorResultShape(cx, bce, &shape))
2305 return false;
2306 return EmitIndex32(cx, JSOP_NEWOBJECT, shape, bce);
2309 return EmitNewInit(cx, bce, JSProto_Object);
2312 static bool
2313 EmitFinishIteratorResult(ExclusiveContext* cx, BytecodeEmitter* bce, bool done)
2315 jsatomid value_id;
2316 if (!bce->makeAtomIndex(cx->names().value, &value_id))
2317 return false;
2318 jsatomid done_id;
2319 if (!bce->makeAtomIndex(cx->names().done, &done_id))
2320 return false;
2322 if (!EmitIndex32(cx, JSOP_INITPROP, value_id, bce))
2323 return false;
2324 if (Emit1(cx, bce, done ? JSOP_TRUE : JSOP_FALSE) < 0)
2325 return false;
2326 if (!EmitIndex32(cx, JSOP_INITPROP, done_id, bce))
2327 return false;
2328 return true;
2331 static bool
2332 EmitNameOp(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool callContext)
2334 if (!BindNameToSlot(cx, bce, pn))
2335 return false;
2337 JSOp op = pn->getOp();
2339 if (op == JSOP_CALLEE) {
2340 if (Emit1(cx, bce, op) < 0)
2341 return false;
2342 } else {
2343 if (!pn->pn_cookie.isFree()) {
2344 MOZ_ASSERT(JOF_OPTYPE(op) != JOF_ATOM);
2345 if (!EmitVarOp(cx, pn, op, bce))
2346 return false;
2347 } else {
2348 if (!EmitAtomOp(cx, pn, op, bce))
2349 return false;
2353 /* Need to provide |this| value for call */
2354 if (callContext) {
2355 if (op == JSOP_GETNAME && bce->needsImplicitThis()) {
2356 if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce))
2357 return false;
2358 } else {
2359 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
2360 return false;
2364 return true;
2367 static bool
2368 EmitPropLHS(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2370 MOZ_ASSERT(pn->isKind(PNK_DOT));
2371 ParseNode* pn2 = pn->maybeExpr();
2374 * If the object operand is also a dotted property reference, reverse the
2375 * list linked via pn_expr temporarily so we can iterate over it from the
2376 * bottom up (reversing again as we go), to avoid excessive recursion.
2378 if (pn2->isKind(PNK_DOT)) {
2379 ParseNode* pndot = pn2;
2380 ParseNode* pnup = nullptr, *pndown;
2381 ptrdiff_t top = bce->offset();
2382 for (;;) {
2383 /* Reverse pndot->pn_expr to point up, not down. */
2384 pndot->pn_offset = top;
2385 MOZ_ASSERT(!pndot->isUsed());
2386 pndown = pndot->pn_expr;
2387 pndot->pn_expr = pnup;
2388 if (!pndown->isKind(PNK_DOT))
2389 break;
2390 pnup = pndot;
2391 pndot = pndown;
2394 /* pndown is a primary expression, not a dotted property reference. */
2395 if (!EmitTree(cx, bce, pndown))
2396 return false;
2398 do {
2399 /* Walk back up the list, emitting annotated name ops. */
2400 if (!EmitAtomOp(cx, pndot, JSOP_GETPROP, bce))
2401 return false;
2403 /* Reverse the pn_expr link again. */
2404 pnup = pndot->pn_expr;
2405 pndot->pn_expr = pndown;
2406 pndown = pndot;
2407 } while ((pndot = pnup) != nullptr);
2408 return true;
2411 // The non-optimized case.
2412 return EmitTree(cx, bce, pn2);
2415 static bool
2416 EmitPropOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2418 MOZ_ASSERT(pn->isArity(PN_NAME));
2420 if (!EmitPropLHS(cx, pn, op, bce))
2421 return false;
2423 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0)
2424 return false;
2426 if (!EmitAtomOp(cx, pn, op, bce))
2427 return false;
2429 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0)
2430 return false;
2432 return true;
2435 static bool
2436 EmitPropIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2438 MOZ_ASSERT(pn->pn_kid->getKind() == PNK_DOT);
2440 bool post;
2441 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2443 JSOp get = JSOP_GETPROP;
2444 if (!EmitPropLHS(cx, pn->pn_kid, get, bce)) // OBJ
2445 return false;
2446 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
2447 return false;
2448 if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V
2449 return false;
2450 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
2451 return false;
2452 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
2453 return false;
2454 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
2455 return false;
2456 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
2457 return false;
2459 if (post) {
2460 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
2461 return false;
2462 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
2463 return false;
2466 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETPROP : JSOP_SETPROP;
2467 if (!EmitAtomOp(cx, pn->pn_kid, setOp, bce)) // N? N+1
2468 return false;
2469 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2470 return false;
2472 return true;
2475 static bool
2476 EmitNameIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2478 const JSCodeSpec* cs = &js_CodeSpec[pn->pn_kid->getOp()];
2480 bool global = (cs->format & JOF_GNAME);
2481 bool post;
2482 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2484 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ
2485 return false;
2486 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_GETNAME, bce)) // OBJ V
2487 return false;
2488 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
2489 return false;
2490 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
2491 return false;
2492 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
2493 return false;
2494 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
2495 return false;
2497 if (post) {
2498 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
2499 return false;
2500 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
2501 return false;
2504 JSOp setOp = StrictifySetNameOp(global ? JSOP_SETGNAME : JSOP_SETNAME, bce);
2505 if (!EmitAtomOp(cx, pn->pn_kid, setOp, bce)) // N? N+1
2506 return false;
2507 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2508 return false;
2510 return true;
2514 * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM
2515 * opcode onto the stack in the right order. In the case of SETELEM, the
2516 * value to be assigned must already be pushed.
2518 static bool
2519 EmitElemOperands(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2521 MOZ_ASSERT(pn->isArity(PN_BINARY));
2522 if (!EmitTree(cx, bce, pn->pn_left))
2523 return false;
2524 if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0)
2525 return false;
2526 if (!EmitTree(cx, bce, pn->pn_right))
2527 return false;
2528 bool isSetElem = op == JSOP_SETELEM || op == JSOP_STRICTSETELEM;
2529 if (isSetElem && Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0)
2530 return false;
2531 return true;
2534 static inline bool
2535 EmitElemOpBase(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
2537 if (Emit1(cx, bce, op) < 0)
2538 return false;
2539 CheckTypeSet(cx, bce, op);
2540 return true;
2543 static bool
2544 EmitElemOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2546 return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op);
2549 static bool
2550 EmitElemIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2552 MOZ_ASSERT(pn->pn_kid->getKind() == PNK_ELEM);
2554 if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce))
2555 return false;
2557 bool post;
2558 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2561 * We need to convert the key to an object id first, so that we do not do
2562 * it inside both the GETELEM and the SETELEM.
2564 // OBJ KEY*
2565 if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY
2566 return false;
2567 if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY
2568 return false;
2569 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V
2570 return false;
2571 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N
2572 return false;
2573 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N
2574 return false;
2575 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1
2576 return false;
2577 if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1
2578 return false;
2580 if (post) {
2581 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ
2582 return false;
2583 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY
2584 return false;
2585 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1
2586 return false;
2589 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETELEM : JSOP_SETELEM;
2590 if (!EmitElemOpBase(cx, bce, setOp)) // N? N+1
2591 return false;
2592 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2593 return false;
2595 return true;
2598 static bool
2599 EmitNumberOp(ExclusiveContext* cx, double dval, BytecodeEmitter* bce)
2601 int32_t ival;
2602 uint32_t u;
2603 ptrdiff_t off;
2604 jsbytecode* pc;
2606 if (NumberIsInt32(dval, &ival)) {
2607 if (ival == 0)
2608 return Emit1(cx, bce, JSOP_ZERO) >= 0;
2609 if (ival == 1)
2610 return Emit1(cx, bce, JSOP_ONE) >= 0;
2611 if ((int)(int8_t)ival == ival)
2612 return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0;
2614 u = (uint32_t)ival;
2615 if (u < JS_BIT(16)) {
2616 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2617 } else if (u < JS_BIT(24)) {
2618 off = EmitN(cx, bce, JSOP_UINT24, 3);
2619 if (off < 0)
2620 return false;
2621 pc = bce->code(off);
2622 SET_UINT24(pc, u);
2623 } else {
2624 off = EmitN(cx, bce, JSOP_INT32, 4);
2625 if (off < 0)
2626 return false;
2627 pc = bce->code(off);
2628 SET_INT32(pc, ival);
2630 return true;
2633 if (!bce->constList.append(DoubleValue(dval)))
2634 return false;
2636 return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce);
2639 static inline void
2640 SetJumpOffsetAt(BytecodeEmitter* bce, ptrdiff_t off)
2642 SET_JUMP_OFFSET(bce->code(off), bce->offset() - off);
2645 static bool
2646 PushInitialConstants(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, unsigned n)
2648 MOZ_ASSERT(op == JSOP_UNDEFINED || op == JSOP_UNINITIALIZED);
2649 for (unsigned i = 0; i < n; ++i) {
2650 if (Emit1(cx, bce, op) < 0)
2651 return false;
2653 return true;
2656 static bool
2657 InitializeBlockScopedLocalsFromStack(ExclusiveContext* cx, BytecodeEmitter* bce,
2658 Handle<StaticBlockObject*> blockObj)
2660 for (unsigned i = blockObj->numVariables(); i > 0; --i) {
2661 if (blockObj->isAliased(i - 1)) {
2662 ScopeCoordinate sc;
2663 sc.setHops(0);
2664 sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1);
2665 if (!EmitAliasedVarOp(cx, JSOP_INITALIASEDLEXICAL, sc, DontCheckLexical, bce))
2666 return false;
2667 } else {
2668 // blockIndexToLocalIndex returns the slot index after the unaliased
2669 // locals stored in the frame. EmitUnaliasedVarOp expects the slot index
2670 // to include both unaliased and aliased locals, so we have to add the
2671 // number of aliased locals.
2672 uint32_t numAliased = bce->script->bindings.numAliasedBodyLevelLocals();
2673 unsigned local = blockObj->blockIndexToLocalIndex(i - 1) + numAliased;
2674 if (!EmitUnaliasedVarOp(cx, JSOP_INITLEXICAL, local, DontCheckLexical, bce))
2675 return false;
2677 if (Emit1(cx, bce, JSOP_POP) < 0)
2678 return false;
2680 return true;
2683 static bool
2684 EnterBlockScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmtInfo,
2685 ObjectBox* objbox, JSOp initialValueOp, unsigned alreadyPushed = 0)
2687 // Initial values for block-scoped locals. Whether it is undefined or the
2688 // JS_UNINITIALIZED_LEXICAL magic value depends on the context. The
2689 // current way we emit for-in and for-of heads means its let bindings will
2690 // always be initialized, so we can initialize them to undefined.
2691 Rooted<StaticBlockObject*> blockObj(cx, &objbox->object->as<StaticBlockObject>());
2692 if (!PushInitialConstants(cx, bce, initialValueOp, blockObj->numVariables() - alreadyPushed))
2693 return false;
2695 if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK))
2696 return false;
2698 if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj))
2699 return false;
2701 return true;
2705 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
2706 * LLVM is deciding to inline this function which uses a lot of stack space
2707 * into EmitTree which is recursive and uses relatively little stack space.
2709 MOZ_NEVER_INLINE static bool
2710 EmitSwitch(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
2712 JSOp switchOp;
2713 bool hasDefault;
2714 ptrdiff_t top, off, defaultOffset;
2715 ParseNode* pn2, *pn3, *pn4;
2716 int32_t low, high;
2717 int noteIndex;
2718 size_t switchSize;
2719 jsbytecode* pc;
2721 /* Try for most optimal, fall back if not dense ints. */
2722 switchOp = JSOP_TABLESWITCH;
2723 hasDefault = false;
2724 defaultOffset = -1;
2726 pn2 = pn->pn_right;
2727 MOZ_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST));
2729 /* Push the discriminant. */
2730 if (!EmitTree(cx, bce, pn->pn_left))
2731 return false;
2733 StmtInfoBCE stmtInfo(cx);
2734 if (pn2->isKind(PNK_LEXICALSCOPE)) {
2735 if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, JSOP_UNINITIALIZED, 0))
2736 return false;
2738 stmtInfo.type = STMT_SWITCH;
2739 stmtInfo.update = top = bce->offset();
2740 /* Advance pn2 to refer to the switch case list. */
2741 pn2 = pn2->expr();
2742 } else {
2743 MOZ_ASSERT(pn2->isKind(PNK_STATEMENTLIST));
2744 top = bce->offset();
2745 PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top);
2748 /* Switch bytecodes run from here till end of final case. */
2749 uint32_t caseCount = pn2->pn_count;
2750 uint32_t tableLength = 0;
2751 UniquePtr<ParseNode*[], JS::FreePolicy> table(nullptr);
2753 if (caseCount > JS_BIT(16)) {
2754 bce->parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES);
2755 return false;
2758 if (caseCount == 0 ||
2759 (caseCount == 1 &&
2760 (hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) {
2761 caseCount = 0;
2762 low = 0;
2763 high = -1;
2764 } else {
2765 bool ok = true;
2766 #define INTMAP_LENGTH 256
2767 jsbitmap intmap_space[INTMAP_LENGTH];
2768 jsbitmap* intmap = nullptr;
2769 int32_t intmap_bitlen = 0;
2771 low = JSVAL_INT_MAX;
2772 high = JSVAL_INT_MIN;
2774 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2775 if (pn3->isKind(PNK_DEFAULT)) {
2776 hasDefault = true;
2777 caseCount--; /* one of the "cases" was the default */
2778 continue;
2781 MOZ_ASSERT(pn3->isKind(PNK_CASE));
2782 if (switchOp == JSOP_CONDSWITCH)
2783 continue;
2785 MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
2787 pn4 = pn3->pn_left;
2789 if (pn4->getKind() != PNK_NUMBER) {
2790 switchOp = JSOP_CONDSWITCH;
2791 continue;
2794 int32_t i;
2795 if (!NumberIsInt32(pn4->pn_dval, &i)) {
2796 switchOp = JSOP_CONDSWITCH;
2797 continue;
2800 if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) {
2801 switchOp = JSOP_CONDSWITCH;
2802 continue;
2804 if (i < low)
2805 low = i;
2806 if (high < i)
2807 high = i;
2810 * Check for duplicates, which require a JSOP_CONDSWITCH.
2811 * We bias i by 65536 if it's negative, and hope that's a rare
2812 * case (because it requires a malloc'd bitmap).
2814 if (i < 0)
2815 i += JS_BIT(16);
2816 if (i >= intmap_bitlen) {
2817 if (!intmap &&
2818 size_t(i) < (INTMAP_LENGTH * JS_BITMAP_NBITS)) {
2819 intmap = intmap_space;
2820 intmap_bitlen = INTMAP_LENGTH * JS_BITMAP_NBITS;
2821 } else {
2822 /* Just grab 8K for the worst-case bitmap. */
2823 intmap_bitlen = JS_BIT(16);
2824 intmap = cx->pod_malloc<jsbitmap>(JS_BIT(16) / JS_BITMAP_NBITS);
2825 if (!intmap) {
2826 js_ReportOutOfMemory(cx);
2827 return false;
2830 memset(intmap, 0, size_t(intmap_bitlen) / CHAR_BIT);
2832 if (JS_TEST_BIT(intmap, i)) {
2833 switchOp = JSOP_CONDSWITCH;
2834 continue;
2836 JS_SET_BIT(intmap, i);
2839 if (intmap && intmap != intmap_space)
2840 js_free(intmap);
2841 if (!ok)
2842 return false;
2845 * Compute table length and select condswitch instead if overlarge or
2846 * more than half-sparse.
2848 if (switchOp == JSOP_TABLESWITCH) {
2849 tableLength = (uint32_t)(high - low + 1);
2850 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2851 switchOp = JSOP_CONDSWITCH;
2856 * The note has one or two offsets: first tells total switch code length;
2857 * second (if condswitch) tells offset to first JSOP_CASE.
2859 if (switchOp == JSOP_CONDSWITCH) {
2860 /* 0 bytes of immediate for unoptimized switch. */
2861 switchSize = 0;
2862 noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0);
2863 } else {
2864 MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
2866 /* 3 offsets (len, low, high) before the table, 1 per entry. */
2867 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2868 noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0);
2870 if (noteIndex < 0)
2871 return false;
2873 /* Emit switchOp followed by switchSize bytes of jump or lookup table. */
2874 if (EmitN(cx, bce, switchOp, switchSize) < 0)
2875 return false;
2877 off = -1;
2878 if (switchOp == JSOP_CONDSWITCH) {
2879 int caseNoteIndex = -1;
2880 bool beforeCases = true;
2882 /* Emit code for evaluating cases and jumping to case statements. */
2883 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2884 pn4 = pn3->pn_left;
2885 if (pn4 && !EmitTree(cx, bce, pn4))
2886 return false;
2887 if (caseNoteIndex >= 0) {
2888 /* off is the previous JSOP_CASE's bytecode offset. */
2889 if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
2890 return false;
2892 if (!pn4) {
2893 MOZ_ASSERT(pn3->isKind(PNK_DEFAULT));
2894 continue;
2896 caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0);
2897 if (caseNoteIndex < 0)
2898 return false;
2899 off = EmitJump(cx, bce, JSOP_CASE, 0);
2900 if (off < 0)
2901 return false;
2902 pn3->pn_offset = off;
2903 if (beforeCases) {
2904 unsigned noteCount, noteCountDelta;
2906 /* Switch note's second offset is to first JSOP_CASE. */
2907 noteCount = bce->notes().length();
2908 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top))
2909 return false;
2910 noteCountDelta = bce->notes().length() - noteCount;
2911 if (noteCountDelta != 0)
2912 caseNoteIndex += noteCountDelta;
2913 beforeCases = false;
2918 * If we didn't have an explicit default (which could fall in between
2919 * cases, preventing us from fusing this SetSrcNoteOffset with the call
2920 * in the loop above), link the last case to the implicit default for
2921 * the benefit of IonBuilder.
2923 if (!hasDefault &&
2924 caseNoteIndex >= 0 &&
2925 !SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
2927 return false;
2930 /* Emit default even if no explicit default statement. */
2931 defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0);
2932 if (defaultOffset < 0)
2933 return false;
2934 } else {
2935 MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
2936 pc = bce->code(top + JUMP_OFFSET_LEN);
2938 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2939 SET_JUMP_OFFSET(pc, low);
2940 pc += JUMP_OFFSET_LEN;
2941 SET_JUMP_OFFSET(pc, high);
2942 pc += JUMP_OFFSET_LEN;
2945 * Use malloc to avoid arena bloat for programs with many switches.
2946 * UniquePtr takes care of freeing it on exit.
2948 if (tableLength != 0) {
2949 table = cx->make_zeroed_pod_array<ParseNode*>(tableLength);
2950 if (!table)
2951 return false;
2952 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2953 if (pn3->isKind(PNK_DEFAULT))
2954 continue;
2956 MOZ_ASSERT(pn3->isKind(PNK_CASE));
2958 pn4 = pn3->pn_left;
2959 MOZ_ASSERT(pn4->getKind() == PNK_NUMBER);
2961 int32_t i = int32_t(pn4->pn_dval);
2962 MOZ_ASSERT(double(i) == pn4->pn_dval);
2964 i -= low;
2965 MOZ_ASSERT(uint32_t(i) < tableLength);
2966 table[i] = pn3;
2971 /* Emit code for each case's statements, copying pn_offset up to pn3. */
2972 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2973 if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT))
2974 SetJumpOffsetAt(bce, pn3->pn_offset);
2975 pn4 = pn3->pn_right;
2976 if (!EmitTree(cx, bce, pn4))
2977 return false;
2978 pn3->pn_offset = pn4->pn_offset;
2979 if (pn3->isKind(PNK_DEFAULT))
2980 off = pn3->pn_offset - top;
2983 if (!hasDefault) {
2984 /* If no default case, offset for default is to end of switch. */
2985 off = bce->offset() - top;
2988 /* We better have set "off" by now. */
2989 MOZ_ASSERT(off != -1);
2991 /* Set the default offset (to end of switch if no default). */
2992 if (switchOp == JSOP_CONDSWITCH) {
2993 pc = nullptr;
2994 MOZ_ASSERT(defaultOffset != -1);
2995 SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top));
2996 } else {
2997 pc = bce->code(top);
2998 SET_JUMP_OFFSET(pc, off);
2999 pc += JUMP_OFFSET_LEN;
3002 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3003 off = bce->offset() - top;
3004 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off))
3005 return false;
3007 if (switchOp == JSOP_TABLESWITCH) {
3008 /* Skip over the already-initialized switch bounds. */
3009 pc += 2 * JUMP_OFFSET_LEN;
3011 /* Fill in the jump table, if there is one. */
3012 for (uint32_t i = 0; i < tableLength; i++) {
3013 pn3 = table[i];
3014 off = pn3 ? pn3->pn_offset - top : 0;
3015 SET_JUMP_OFFSET(pc, off);
3016 pc += JUMP_OFFSET_LEN;
3020 if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) {
3021 if (!LeaveNestedScope(cx, bce, &stmtInfo))
3022 return false;
3023 } else {
3024 if (!PopStatementBCE(cx, bce))
3025 return false;
3028 return true;
3031 bool
3032 BytecodeEmitter::isRunOnceLambda()
3034 // The run once lambda flags set by the parser are approximate, and we look
3035 // at properties of the function itself before deciding to emit a function
3036 // as a run once lambda.
3038 if (!(parent && parent->emittingRunOnceLambda) &&
3039 (emitterMode != LazyFunction || !lazyScript->treatAsRunOnce()))
3041 return false;
3044 FunctionBox* funbox = sc->asFunctionBox();
3045 return !funbox->argumentsHasLocalBinding() &&
3046 !funbox->isGenerator() &&
3047 !funbox->function()->name();
3050 static bool
3051 EmitYieldOp(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
3053 if (op == JSOP_FINALYIELDRVAL)
3054 return Emit1(cx, bce, JSOP_FINALYIELDRVAL) >= 0;
3056 MOZ_ASSERT(op == JSOP_INITIALYIELD || op == JSOP_YIELD);
3058 ptrdiff_t off = EmitN(cx, bce, op, 3);
3059 if (off < 0)
3060 return false;
3062 uint32_t yieldIndex = bce->yieldOffsetList.length();
3063 if (yieldIndex >= JS_BIT(24)) {
3064 bce->reportError(nullptr, JSMSG_TOO_MANY_YIELDS);
3065 return false;
3068 SET_UINT24(bce->code(off), yieldIndex);
3070 if (!bce->yieldOffsetList.append(bce->offset()))
3071 return false;
3073 return Emit1(cx, bce, JSOP_DEBUGAFTERYIELD) >= 0;
3076 bool
3077 frontend::EmitFunctionScript(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* body)
3079 if (!bce->updateLocalsToFrameSlots())
3080 return false;
3083 * IonBuilder has assumptions about what may occur immediately after
3084 * script->main (e.g., in the case of destructuring params). Thus, put the
3085 * following ops into the range [script->code, script->main). Note:
3086 * execution starts from script->code, so this has no semantic effect.
3089 FunctionBox* funbox = bce->sc->asFunctionBox();
3090 if (funbox->argumentsHasLocalBinding()) {
3091 MOZ_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */
3092 bce->switchToProlog();
3093 if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0)
3094 return false;
3095 InternalBindingsHandle bindings(bce->script, &bce->script->bindings);
3096 BindingIter bi = Bindings::argumentsBinding(cx, bindings);
3097 if (bce->script->bindingIsAliased(bi)) {
3098 ScopeCoordinate sc;
3099 sc.setHops(0);
3100 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce, bce->script, cx->names().arguments, &sc));
3101 if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, DontCheckLexical, bce))
3102 return false;
3103 } else {
3104 if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bi.localIndex(), DontCheckLexical, bce))
3105 return false;
3107 if (Emit1(cx, bce, JSOP_POP) < 0)
3108 return false;
3109 bce->switchToMain();
3113 * Emit a prologue for run-once scripts which will deoptimize JIT code if
3114 * the script ends up running multiple times via foo.caller related
3115 * shenanigans.
3117 bool runOnce = bce->isRunOnceLambda();
3118 if (runOnce) {
3119 bce->switchToProlog();
3120 if (Emit1(cx, bce, JSOP_RUNONCE) < 0)
3121 return false;
3122 bce->switchToMain();
3125 if (!EmitTree(cx, bce, body))
3126 return false;
3128 // If we fall off the end of a generator, do a final yield.
3129 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isGenerator()) {
3130 if (bce->sc->asFunctionBox()->isStarGenerator() && !EmitPrepareIteratorResult(cx, bce))
3131 return false;
3133 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
3134 return false;
3136 if (bce->sc->asFunctionBox()->isStarGenerator() && !EmitFinishIteratorResult(cx, bce, true))
3137 return false;
3139 if (Emit1(cx, bce, JSOP_SETRVAL) < 0)
3140 return false;
3142 ScopeCoordinate sc;
3143 // We know that .generator is on the top scope chain node, as we are
3144 // at the function end.
3145 sc.setHops(0);
3146 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce, bce->script, cx->names().dotGenerator, &sc));
3147 if (!EmitAliasedVarOp(cx, JSOP_GETALIASEDVAR, sc, DontCheckLexical, bce))
3148 return false;
3150 // No need to check for finally blocks, etc as in EmitReturn.
3151 if (!EmitYieldOp(cx, bce, JSOP_FINALYIELDRVAL))
3152 return false;
3156 * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase
3157 * depend on this opcode, e.g. js_InternalInterpret.
3159 if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
3160 return false;
3162 // If all locals are aliased, the frame's block slots won't be used, so we
3163 // can set numBlockScoped = 0. This is nice for generators as it ensures
3164 // nfixed == 0, so we don't have to initialize any local slots when resuming
3165 // a generator.
3166 if (bce->sc->allLocalsAliased())
3167 bce->script->bindings.setAllLocalsAliased();
3169 if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce))
3170 return false;
3173 * If this function is only expected to run once, mark the script so that
3174 * initializers created within it may be given more precise types.
3176 if (runOnce) {
3177 bce->script->setTreatAsRunOnce();
3178 MOZ_ASSERT(!bce->script->hasRunOnce());
3181 /* Initialize fun->script() so that the debugger has a valid fun->script(). */
3182 RootedFunction fun(cx, bce->script->functionNonDelazifying());
3183 MOZ_ASSERT(fun->isInterpreted());
3185 if (fun->isInterpretedLazy())
3186 fun->setUnlazifiedScript(bce->script);
3187 else
3188 fun->setScript(bce->script);
3190 bce->tellDebuggerAboutCompiledScript(cx);
3192 return true;
3195 static bool
3196 MaybeEmitVarDecl(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn,
3197 jsatomid* result)
3199 jsatomid atomIndex;
3201 if (!pn->pn_cookie.isFree()) {
3202 atomIndex = pn->pn_cookie.slot();
3203 } else {
3204 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
3205 return false;
3208 if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM &&
3209 (!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight()))
3211 bce->switchToProlog();
3212 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
3213 return false;
3214 if (!EmitIndexOp(cx, prologOp, atomIndex, bce))
3215 return false;
3216 bce->switchToMain();
3219 if (result)
3220 *result = atomIndex;
3221 return true;
3225 * This enum tells EmitVariables and the destructuring functions how emit the
3226 * given Parser::variables parse tree. In the base case, DefineVars, the caller
3227 * only wants variables to be defined in the prologue (if necessary). For
3228 * PushInitialValues, variable initializer expressions are evaluated and left
3229 * on the stack. For InitializeVars, the initializer expressions values are
3230 * assigned (to local variables) and popped.
3232 enum VarEmitOption
3234 DefineVars = 0,
3235 PushInitialValues = 1,
3236 InitializeVars = 2
3239 typedef bool
3240 (*DestructuringDeclEmitter)(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn);
3242 template <DestructuringDeclEmitter EmitName>
3243 static bool
3244 EmitDestructuringDeclsWithEmitter(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp,
3245 ParseNode* pattern)
3247 if (pattern->isKind(PNK_ARRAY)) {
3248 for (ParseNode* element = pattern->pn_head; element; element = element->pn_next) {
3249 if (element->isKind(PNK_ELISION))
3250 continue;
3251 ParseNode* target = element;
3252 if (element->isKind(PNK_SPREAD)) {
3253 MOZ_ASSERT(element->pn_kid->isKind(PNK_NAME));
3254 target = element->pn_kid;
3256 if (target->isKind(PNK_NAME)) {
3257 if (!EmitName(cx, bce, prologOp, target))
3258 return false;
3259 } else {
3260 if (!EmitDestructuringDeclsWithEmitter<EmitName>(cx, bce, prologOp, target))
3261 return false;
3264 return true;
3267 MOZ_ASSERT(pattern->isKind(PNK_OBJECT));
3268 for (ParseNode* member = pattern->pn_head; member; member = member->pn_next) {
3269 MOZ_ASSERT(member->isKind(PNK_MUTATEPROTO) ||
3270 member->isKind(PNK_COLON) ||
3271 member->isKind(PNK_SHORTHAND));
3273 ParseNode* target = member->isKind(PNK_MUTATEPROTO) ? member->pn_kid : member->pn_right;
3275 if (target->isKind(PNK_NAME)) {
3276 if (!EmitName(cx, bce, prologOp, target))
3277 return false;
3278 } else {
3279 if (!EmitDestructuringDeclsWithEmitter<EmitName>(cx, bce, prologOp, target))
3280 return false;
3283 return true;
3286 bool
3287 EmitDestructuringDecl(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn)
3289 MOZ_ASSERT(pn->isKind(PNK_NAME));
3290 if (!BindNameToSlot(cx, bce, pn))
3291 return false;
3293 MOZ_ASSERT(!pn->isOp(JSOP_CALLEE));
3294 return MaybeEmitVarDecl(cx, bce, prologOp, pn, nullptr);
3297 static inline bool
3298 EmitDestructuringDecls(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp,
3299 ParseNode* pattern)
3301 return EmitDestructuringDeclsWithEmitter<EmitDestructuringDecl>(cx, bce, prologOp, pattern);
3304 bool
3305 EmitInitializeDestructuringDecl(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp,
3306 ParseNode* pn)
3308 MOZ_ASSERT(pn->isKind(PNK_NAME));
3309 MOZ_ASSERT(pn->isBound());
3310 return EmitVarOp(cx, pn, pn->getOp(), bce);
3313 // Emit code to initialize all destructured names to the value on the top of
3314 // the stack.
3315 static inline bool
3316 EmitInitializeDestructuringDecls(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp,
3317 ParseNode* pattern)
3319 return EmitDestructuringDeclsWithEmitter<EmitInitializeDestructuringDecl>(cx, bce,
3320 prologOp, pattern);
3323 static bool
3324 EmitDestructuringOpsHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pattern,
3325 VarEmitOption emitOption);
3328 * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on
3329 * the stack and emits code to destructure a single lhs expression (either a
3330 * name or a compound []/{} expression).
3332 * If emitOption is InitializeVars, the to-be-destructured value is assigned to
3333 * locals and ultimately the initial slot is popped (-1 total depth change).
3335 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3336 * with the initial values of the N (where 0 <= N) variables assigned in the
3337 * lhs expression. (Same post-condition as EmitDestructuringOpsHelper)
3339 static bool
3340 EmitDestructuringLHS(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, VarEmitOption emitOption)
3342 MOZ_ASSERT(emitOption != DefineVars);
3344 // Now emit the lvalue opcode sequence. If the lvalue is a nested
3345 // destructuring initialiser-form, call ourselves to handle it, then pop
3346 // the matched value. Otherwise emit an lvalue bytecode sequence followed
3347 // by an assignment op.
3348 if (pn->isKind(PNK_SPREAD))
3349 pn = pn->pn_kid;
3350 if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) {
3351 if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption))
3352 return false;
3353 if (emitOption == InitializeVars) {
3354 // Per its post-condition, EmitDestructuringOpsHelper has left the
3355 // to-be-destructured value on top of the stack.
3356 if (Emit1(cx, bce, JSOP_POP) < 0)
3357 return false;
3359 } else if (emitOption == PushInitialValues) {
3360 // The lhs is a simple name so the to-be-destructured value is
3361 // its initial value and there is nothing to do.
3362 MOZ_ASSERT(pn->getOp() == JSOP_SETLOCAL || pn->getOp() == JSOP_INITLEXICAL);
3363 MOZ_ASSERT(pn->pn_dflags & PND_BOUND);
3364 } else {
3365 switch (pn->getKind()) {
3366 case PNK_NAME:
3367 if (!BindNameToSlot(cx, bce, pn))
3368 return false;
3370 switch (pn->getOp()) {
3371 case JSOP_SETNAME:
3372 case JSOP_STRICTSETNAME:
3373 case JSOP_SETGNAME:
3374 case JSOP_STRICTSETGNAME:
3375 case JSOP_SETCONST: {
3376 // This is like ordinary assignment, but with one difference.
3378 // In `a = b`, we first determine a binding for `a` (using
3379 // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then
3380 // a JSOP_SETNAME instruction.
3382 // In `[a] = [b]`, per spec, `b` is evaluated first, then we
3383 // determine a binding for `a`. Then we need to do assignment--
3384 // but the operands are on the stack in the wrong order for
3385 // JSOP_SETPROP, so we have to add a JSOP_SWAP.
3386 jsatomid atomIndex;
3387 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
3388 return false;
3390 if (!pn->isOp(JSOP_SETCONST)) {
3391 bool global = pn->isOp(JSOP_SETGNAME) || pn->isOp(JSOP_STRICTSETGNAME);
3392 JSOp bindOp = global ? JSOP_BINDGNAME : JSOP_BINDNAME;
3393 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
3394 return false;
3395 if (Emit1(cx, bce, JSOP_SWAP) < 0)
3396 return false;
3399 if (!EmitIndexOp(cx, pn->getOp(), atomIndex, bce))
3400 return false;
3401 break;
3404 case JSOP_SETLOCAL:
3405 case JSOP_SETARG:
3406 case JSOP_INITLEXICAL:
3407 if (!EmitVarOp(cx, pn, pn->getOp(), bce))
3408 return false;
3409 break;
3411 default:
3412 MOZ_CRASH("EmitDestructuringLHS: bad name op");
3414 break;
3416 case PNK_DOT:
3418 // See the (PNK_NAME, JSOP_SETNAME) case above.
3420 // In `a.x = b`, `a` is evaluated first, then `b`, then a
3421 // JSOP_SETPROP instruction.
3423 // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we
3424 // need a property set -- but the operands are on the stack in the
3425 // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP.
3426 if (!EmitTree(cx, bce, pn->pn_expr))
3427 return false;
3428 if (Emit1(cx, bce, JSOP_SWAP) < 0)
3429 return false;
3430 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETPROP : JSOP_SETPROP;
3431 if (!EmitAtomOp(cx, pn, setOp, bce))
3432 return false;
3433 break;
3436 case PNK_ELEM:
3438 // See the comment at `case PNK_DOT:` above. This case,
3439 // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP
3440 // is emitted by EmitElemOperands.
3441 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETELEM : JSOP_SETELEM;
3442 if (!EmitElemOp(cx, pn, setOp, bce))
3443 return false;
3444 break;
3447 case PNK_CALL:
3448 MOZ_ASSERT(pn->pn_xflags & PNX_SETCALL);
3449 if (!EmitTree(cx, bce, pn))
3450 return false;
3452 // Pop the call return value. Below, we pop the RHS too, balancing
3453 // the stack --- presumably for the benefit of bytecode
3454 // analysis. (The interpreter will never reach these instructions
3455 // since we just emitted JSOP_SETCALL, which always throws. It's
3456 // possible no analyses actually depend on this either.)
3457 if (Emit1(cx, bce, JSOP_POP) < 0)
3458 return false;
3459 break;
3461 default:
3462 MOZ_CRASH("EmitDestructuringLHS: bad lhs kind");
3465 // Pop the assigned value.
3466 if (Emit1(cx, bce, JSOP_POP) < 0)
3467 return false;
3470 return true;
3473 static bool EmitSpread(ExclusiveContext* cx, BytecodeEmitter* bce);
3474 static bool EmitIterator(ExclusiveContext* cx, BytecodeEmitter* bce);
3477 * EmitIteratorNext will pop iterator from the top of the stack.
3478 * It will push the result of |.next()| onto the stack.
3480 static bool
3481 EmitIteratorNext(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn=nullptr)
3483 MOZ_ASSERT(bce->emitterMode != BytecodeEmitter::SelfHosting,
3484 ".next() iteration is prohibited in self-hosted code because it "
3485 "can run user-modifiable iteration code");
3487 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... ITER ITER
3488 return false;
3489 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // ... ITER NEXT
3490 return false;
3491 if (Emit1(cx, bce, JSOP_SWAP) < 0) // ... NEXT ITER
3492 return false;
3493 if (EmitCall(cx, bce, JSOP_CALL, 0, pn) < 0) // ... RESULT
3494 return false;
3495 CheckTypeSet(cx, bce, JSOP_CALL);
3496 return true;
3499 static bool
3500 EmitDestructuringOpsArrayHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pattern,
3501 VarEmitOption emitOption)
3503 MOZ_ASSERT(pattern->isKind(PNK_ARRAY));
3504 MOZ_ASSERT(pattern->isArity(PN_LIST));
3505 MOZ_ASSERT(bce->stackDepth != 0);
3508 * Use an iterator to destructure the RHS, instead of index lookup.
3509 * InitializeVars expects us to leave the *original* value on the stack.
3511 if (emitOption == InitializeVars) {
3512 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... OBJ OBJ
3513 return false;
3515 if (!EmitIterator(cx, bce)) // ... OBJ? ITER
3516 return false;
3517 bool needToPopIterator = true;
3519 for (ParseNode* member = pattern->pn_head; member; member = member->pn_next) {
3521 * Now push the property name currently being matched, which is the
3522 * current property name "label" on the left of a colon in the object
3523 * initializer.
3525 if (member->isKind(PNK_SPREAD)) {
3526 /* Create a new array with the rest of the iterator */
3527 ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); // ... OBJ? ITER ARRAY
3528 if (off < 0)
3529 return false;
3530 CheckTypeSet(cx, bce, JSOP_NEWARRAY);
3531 jsbytecode* pc = bce->code(off);
3532 SET_UINT24(pc, 0);
3534 if (!EmitNumberOp(cx, 0, bce)) // ... OBJ? ITER ARRAY INDEX
3535 return false;
3536 if (!EmitSpread(cx, bce)) // ... OBJ? ARRAY INDEX
3537 return false;
3538 if (Emit1(cx, bce, JSOP_POP) < 0) // ... OBJ? ARRAY
3539 return false;
3540 needToPopIterator = false;
3541 } else {
3542 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... OBJ? ITER ITER
3543 return false;
3544 if (!EmitIteratorNext(cx, bce, pattern)) // ... OBJ? ITER RESULT
3545 return false;
3546 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... OBJ? ITER RESULT RESULT
3547 return false;
3548 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ... OBJ? ITER RESULT DONE?
3549 return false;
3551 // Emit (result.done ? undefined : result.value)
3552 // This is mostly copied from EmitConditionalExpression, except that this code
3553 // does not push new values onto the stack.
3554 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND);
3555 if (noteIndex < 0)
3556 return false;
3557 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
3558 if (beq < 0)
3559 return false;
3561 if (Emit1(cx, bce, JSOP_POP) < 0) // ... OBJ? ITER
3562 return false;
3563 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ... OBJ? ITER UNDEFINED
3564 return false;
3566 /* Jump around else, fixup the branch, emit else, fixup jump. */
3567 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
3568 if (jmp < 0)
3569 return false;
3570 SetJumpOffsetAt(bce, beq);
3572 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ... OBJ? ITER VALUE
3573 return false;
3575 SetJumpOffsetAt(bce, jmp);
3576 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
3577 return false;
3580 // Destructure into the pattern the element contains.
3581 ParseNode* subpattern = member;
3582 if (subpattern->isKind(PNK_ELISION)) {
3583 // The value destructuring into an elision just gets ignored.
3584 if (Emit1(cx, bce, JSOP_POP) < 0) // ... OBJ? ITER
3585 return false;
3586 continue;
3589 int32_t depthBefore = bce->stackDepth;
3590 if (!EmitDestructuringLHS(cx, bce, subpattern, emitOption))
3591 return false;
3593 if (emitOption == PushInitialValues && needToPopIterator) {
3595 * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is
3596 * | to-be-destructured-value | x | y |
3597 * The goal is:
3598 * | x | y | z |
3599 * so emit a pick to produce the intermediate state
3600 * | x | y | to-be-destructured-value |
3601 * before destructuring z. This gives the loop invariant that
3602 * the to-be-destructured-value is always on top of the stack.
3604 MOZ_ASSERT((bce->stackDepth - bce->stackDepth) >= -1);
3605 uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore);
3606 if (pickDistance > 0) {
3607 if (pickDistance > UINT8_MAX) {
3608 bce->reportError(subpattern, JSMSG_TOO_MANY_LOCALS);
3609 return false;
3611 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0)
3612 return false;
3617 if (needToPopIterator && Emit1(cx, bce, JSOP_POP) < 0)
3618 return false;
3620 return true;
3623 static bool
3624 EmitDestructuringOpsObjectHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pattern,
3625 VarEmitOption emitOption)
3627 MOZ_ASSERT(pattern->isKind(PNK_OBJECT));
3628 MOZ_ASSERT(pattern->isArity(PN_LIST));
3630 MOZ_ASSERT(bce->stackDepth != 0); // ... OBJ
3632 for (ParseNode* member = pattern->pn_head; member; member = member->pn_next) {
3633 // Duplicate the value being destructured to use as a reference base.
3634 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... OBJ OBJ
3635 return false;
3637 // Now push the property name currently being matched, which is the
3638 // current property name "label" on the left of a colon in the object
3639 // initialiser.
3640 bool needsGetElem = true;
3642 ParseNode* subpattern;
3643 if (member->isKind(PNK_MUTATEPROTO)) {
3644 if (!EmitAtomOp(cx, cx->names().proto, JSOP_GETPROP, bce)) // ... OBJ PROP
3645 return false;
3646 needsGetElem = false;
3647 subpattern = member->pn_kid;
3648 } else {
3649 MOZ_ASSERT(member->isKind(PNK_COLON) || member->isKind(PNK_SHORTHAND));
3651 ParseNode* key = member->pn_left;
3652 if (key->isKind(PNK_NUMBER)) {
3653 if (!EmitNumberOp(cx, key->pn_dval, bce)) // ... OBJ OBJ KEY
3654 return false;
3655 } else if (key->isKind(PNK_NAME) || key->isKind(PNK_STRING)) {
3656 PropertyName* name = key->pn_atom->asPropertyName();
3658 // The parser already checked for atoms representing indexes and
3659 // used PNK_NUMBER instead, but also watch for ids which TI treats
3660 // as indexes for simplification of downstream analysis.
3661 jsid id = NameToId(name);
3662 if (id != types::IdToTypeId(id)) {
3663 if (!EmitTree(cx, bce, key)) // ... OBJ OBJ KEY
3664 return false;
3665 } else {
3666 if (!EmitAtomOp(cx, name, JSOP_GETPROP, bce)) // ...OBJ PROP
3667 return false;
3668 needsGetElem = false;
3670 } else {
3671 MOZ_ASSERT(key->isKind(PNK_COMPUTED_NAME));
3672 if (!EmitTree(cx, bce, key->pn_kid)) // ... OBJ OBJ KEY
3673 return false;
3676 subpattern = member->pn_right;
3679 // Get the property value if not done already.
3680 if (needsGetElem && !EmitElemOpBase(cx, bce, JSOP_GETELEM)) // ... OBJ PROP
3681 return false;
3683 // Destructure PROP per this member's subpattern.
3684 int32_t depthBefore = bce->stackDepth;
3685 if (!EmitDestructuringLHS(cx, bce, subpattern, emitOption))
3686 return false;
3688 // If emitOption is InitializeVars, destructuring initialized each
3689 // target in the subpattern's LHS as it went, then popped PROP. We've
3690 // correctly returned to the loop-entry stack, and we continue to the
3691 // next member.
3692 if (emitOption == InitializeVars) // ... OBJ
3693 continue;
3695 MOZ_ASSERT(emitOption == PushInitialValues);
3697 // EmitDestructuringLHS removed PROP, and it pushed a value per target
3698 // name in LHS (for |emitOption == PushInitialValues| only makes sense
3699 // when multiple values need to be pushed onto the stack to initialize
3700 // a single lexical scope). It also preserved OBJ deep in the stack as
3701 // the original object to be destructed into remaining target names in
3702 // the LHS object pattern. (We use PushInitialValues *only* as part of
3703 // SpiderMonkey's proprietary let block statements, which assign their
3704 // targets all in a single go [akin to Scheme's let, and distinct from
3705 // let*/letrec].) Thus for:
3707 // let ({arr: [x, y], z} = obj) { ... }
3709 // we have this stack after the above acts upon the [x, y] subpattern:
3711 // ... OBJ x y
3713 // (where of course x = obj.arr[0] and y = obj.arr[1], and []-indexing
3714 // is really iteration-indexing). We want to have:
3716 // ... x y OBJ
3718 // so that we can continue, ready to destruct z from OBJ. Pick OBJ out
3719 // of the stack, moving it to the top, to accomplish this.
3720 MOZ_ASSERT((bce->stackDepth - bce->stackDepth) >= -1);
3721 uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore);
3722 if (pickDistance > 0) {
3723 if (pickDistance > UINT8_MAX) {
3724 bce->reportError(subpattern, JSMSG_TOO_MANY_LOCALS);
3725 return false;
3727 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0)
3728 return false;
3732 if (emitOption == PushInitialValues) {
3733 // Per the above loop invariant, the value being destructured into this
3734 // object pattern is atop the stack. Pop it to achieve the
3735 // post-condition.
3736 if (Emit1(cx, bce, JSOP_POP) < 0) // ... <pattern's target name values, seriatim>
3737 return false;
3740 return true;
3744 * Recursive helper for EmitDestructuringOps.
3745 * EmitDestructuringOpsHelper assumes the to-be-destructured value has been
3746 * pushed on the stack and emits code to destructure each part of a [] or {}
3747 * lhs expression.
3749 * If emitOption is InitializeVars, the initial to-be-destructured value is
3750 * left untouched on the stack and the overall depth is not changed.
3752 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3753 * with the initial values of the N (where 0 <= N) variables assigned in the
3754 * lhs expression. (Same post-condition as EmitDestructuringLHS)
3756 static bool
3757 EmitDestructuringOpsHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pattern,
3758 VarEmitOption emitOption)
3760 MOZ_ASSERT(emitOption != DefineVars);
3762 if (pattern->isKind(PNK_ARRAY))
3763 return EmitDestructuringOpsArrayHelper(cx, bce, pattern, emitOption);
3764 return EmitDestructuringOpsObjectHelper(cx, bce, pattern, emitOption);
3767 static bool
3768 EmitDestructuringOps(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pattern,
3769 bool isLet = false)
3772 * Call our recursive helper to emit the destructuring assignments and
3773 * related stack manipulations.
3775 VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars;
3776 return EmitDestructuringOpsHelper(cx, bce, pattern, emitOption);
3779 static bool
3780 EmitTemplateString(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
3782 MOZ_ASSERT(pn->isArity(PN_LIST));
3784 for (ParseNode* pn2 = pn->pn_head; pn2 != NULL; pn2 = pn2->pn_next) {
3785 if (pn2->getKind() != PNK_STRING && pn2->getKind() != PNK_TEMPLATE_STRING) {
3786 // We update source notes before emitting the expression
3787 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
3788 return false;
3790 if (!EmitTree(cx, bce, pn2))
3791 return false;
3793 if (pn2->getKind() != PNK_STRING && pn2->getKind() != PNK_TEMPLATE_STRING) {
3794 // We need to convert the expression to a string
3795 if (Emit1(cx, bce, JSOP_TOSTRING) < 0)
3796 return false;
3799 if (pn2 != pn->pn_head) {
3800 // We've pushed two strings onto the stack. Add them together, leaving just one.
3801 if (Emit1(cx, bce, JSOP_ADD) < 0)
3802 return false;
3806 return true;
3809 static bool
3810 EmitVariables(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, VarEmitOption emitOption,
3811 bool isLetExpr = false)
3813 MOZ_ASSERT(pn->isArity(PN_LIST));
3814 MOZ_ASSERT(isLetExpr == (emitOption == PushInitialValues));
3816 ParseNode* next;
3817 for (ParseNode* pn2 = pn->pn_head; ; pn2 = next) {
3818 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
3819 return false;
3820 next = pn2->pn_next;
3822 ParseNode* pn3;
3823 if (!pn2->isKind(PNK_NAME)) {
3824 if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) {
3825 // If the emit option is DefineVars, emit variable binding
3826 // ops, but not destructuring ops. The parser (see
3827 // Parser::variables) has ensured that our caller will be the
3828 // PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree (we don't have
3829 // to worry about this being a variable declaration, as
3830 // destructuring declarations without initializers, e.g., |var
3831 // [x]|, are not legal syntax), and that case will emit the
3832 // destructuring code only after emitting an enumerating
3833 // opcode and a branch that tests whether the enumeration
3834 // ended. Thus, each iteration's assignment is responsible for
3835 // initializing, and nothing needs to be done here.
3837 // Otherwise this is emitting destructuring let binding
3838 // initialization for a legacy comprehension expression. See
3839 // EmitForInOrOfVariables.
3840 MOZ_ASSERT(pn->pn_count == 1);
3841 if (emitOption == DefineVars) {
3842 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2))
3843 return false;
3844 } else {
3845 // Lexical bindings cannot be used before they are
3846 // initialized. Similar to the JSOP_INITLEXICAL case below.
3847 MOZ_ASSERT(emitOption != DefineVars);
3848 MOZ_ASSERT_IF(emitOption == InitializeVars, pn->pn_xflags & PNX_POPVAR);
3849 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
3850 return false;
3851 if (!EmitInitializeDestructuringDecls(cx, bce, pn->getOp(), pn2))
3852 return false;
3854 break;
3858 * A destructuring initialiser assignment preceded by var will
3859 * never occur to the left of 'in' in a for-in loop. As with 'for
3860 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3861 * i' to be hoisted out of the loop.
3863 MOZ_ASSERT(pn2->isKind(PNK_ASSIGN));
3864 MOZ_ASSERT(pn2->isOp(JSOP_NOP));
3865 MOZ_ASSERT(emitOption != DefineVars);
3868 * To allow the front end to rewrite var f = x; as f = x; when a
3869 * function f(){} precedes the var, detect simple name assignment
3870 * here and initialize the name.
3872 if (pn2->pn_left->isKind(PNK_NAME)) {
3873 pn3 = pn2->pn_right;
3874 pn2 = pn2->pn_left;
3875 goto do_name;
3878 pn3 = pn2->pn_left;
3879 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3))
3880 return false;
3882 if (!EmitTree(cx, bce, pn2->pn_right))
3883 return false;
3885 if (!EmitDestructuringOps(cx, bce, pn3, isLetExpr))
3886 return false;
3888 /* If we are not initializing, nothing to pop. */
3889 if (emitOption != InitializeVars) {
3890 if (next)
3891 continue;
3892 break;
3894 goto emit_note_pop;
3898 * Load initializer early to share code above that jumps to do_name.
3899 * NB: if this var redeclares an existing binding, then pn2 is linked
3900 * on its definition's use-chain and pn_expr has been overlayed with
3901 * pn_lexdef.
3903 pn3 = pn2->maybeExpr();
3905 do_name:
3906 if (!BindNameToSlot(cx, bce, pn2))
3907 return false;
3910 JSOp op;
3911 op = pn2->getOp();
3912 MOZ_ASSERT(op != JSOP_CALLEE);
3913 MOZ_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP));
3915 jsatomid atomIndex;
3916 if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex))
3917 return false;
3919 if (pn3) {
3920 MOZ_ASSERT(emitOption != DefineVars);
3921 if (op == JSOP_SETNAME ||
3922 op == JSOP_STRICTSETNAME ||
3923 op == JSOP_SETGNAME ||
3924 op == JSOP_STRICTSETGNAME ||
3925 op == JSOP_SETINTRINSIC)
3927 MOZ_ASSERT(emitOption != PushInitialValues);
3928 JSOp bindOp;
3929 if (op == JSOP_SETNAME || op == JSOP_STRICTSETNAME)
3930 bindOp = JSOP_BINDNAME;
3931 else if (op == JSOP_SETGNAME || op == JSOP_STRICTSETGNAME)
3932 bindOp = JSOP_BINDGNAME;
3933 else
3934 bindOp = JSOP_BINDINTRINSIC;
3935 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
3936 return false;
3939 bool oldEmittingForInit = bce->emittingForInit;
3940 bce->emittingForInit = false;
3941 if (!EmitTree(cx, bce, pn3))
3942 return false;
3943 bce->emittingForInit = oldEmittingForInit;
3944 } else if (op == JSOP_INITLEXICAL || isLetExpr) {
3945 // 'let' bindings cannot be used before they are
3946 // initialized. JSOP_INITLEXICAL distinguishes the binding site.
3947 MOZ_ASSERT(emitOption != DefineVars);
3948 MOZ_ASSERT_IF(emitOption == InitializeVars, pn->pn_xflags & PNX_POPVAR);
3949 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
3950 return false;
3953 // If we are not initializing, nothing to pop. If we are initializing
3954 // lets, we must emit the pops.
3955 if (emitOption != InitializeVars) {
3956 if (next)
3957 continue;
3958 break;
3961 MOZ_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr);
3962 if (!pn2->pn_cookie.isFree()) {
3963 if (!EmitVarOp(cx, pn2, op, bce))
3964 return false;
3965 } else {
3966 if (!EmitIndexOp(cx, op, atomIndex, bce))
3967 return false;
3970 emit_note_pop:
3971 if (!next)
3972 break;
3973 if (Emit1(cx, bce, JSOP_POP) < 0)
3974 return false;
3977 if (pn->pn_xflags & PNX_POPVAR) {
3978 if (Emit1(cx, bce, JSOP_POP) < 0)
3979 return false;
3982 return true;
3985 static bool
3986 EmitAssignment(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* lhs, JSOp op, ParseNode* rhs)
3989 * Check left operand type and generate specialized code for it.
3990 * Specialize to avoid ECMA "reference type" values on the operand
3991 * stack, which impose pervasive runtime "GetValue" costs.
3993 jsatomid atomIndex = (jsatomid) -1;
3994 jsbytecode offset = 1;
3996 switch (lhs->getKind()) {
3997 case PNK_NAME:
3998 if (!BindNameToSlot(cx, bce, lhs))
3999 return false;
4000 if (lhs->pn_cookie.isFree()) {
4001 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
4002 return false;
4003 if (!lhs->isConst()) {
4004 JSOp bindOp;
4005 if (lhs->isOp(JSOP_SETNAME) || lhs->isOp(JSOP_STRICTSETNAME))
4006 bindOp = JSOP_BINDNAME;
4007 else if (lhs->isOp(JSOP_SETGNAME) || lhs->isOp(JSOP_STRICTSETGNAME))
4008 bindOp = JSOP_BINDGNAME;
4009 else
4010 bindOp = JSOP_BINDINTRINSIC;
4011 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
4012 return false;
4013 offset++;
4016 break;
4017 case PNK_DOT:
4018 if (!EmitTree(cx, bce, lhs->expr()))
4019 return false;
4020 offset++;
4021 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
4022 return false;
4023 break;
4024 case PNK_ELEM:
4025 MOZ_ASSERT(lhs->isArity(PN_BINARY));
4026 if (!EmitTree(cx, bce, lhs->pn_left))
4027 return false;
4028 if (!EmitTree(cx, bce, lhs->pn_right))
4029 return false;
4030 offset += 2;
4031 break;
4032 case PNK_ARRAY:
4033 case PNK_OBJECT:
4034 break;
4035 case PNK_CALL:
4036 MOZ_ASSERT(lhs->pn_xflags & PNX_SETCALL);
4037 if (!EmitTree(cx, bce, lhs))
4038 return false;
4039 if (Emit1(cx, bce, JSOP_POP) < 0)
4040 return false;
4041 break;
4042 default:
4043 MOZ_ASSERT(0);
4046 if (op != JSOP_NOP) {
4047 MOZ_ASSERT(rhs);
4048 switch (lhs->getKind()) {
4049 case PNK_NAME:
4050 if (lhs->isConst()) {
4051 if (lhs->isOp(JSOP_CALLEE)) {
4052 if (Emit1(cx, bce, JSOP_CALLEE) < 0)
4053 return false;
4054 } else if (lhs->isOp(JSOP_GETNAME) || lhs->isOp(JSOP_GETGNAME)) {
4055 if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce))
4056 return false;
4057 } else {
4058 MOZ_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM);
4059 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
4060 return false;
4062 } else if (lhs->isOp(JSOP_SETNAME) || lhs->isOp(JSOP_STRICTSETNAME)) {
4063 if (Emit1(cx, bce, JSOP_DUP) < 0)
4064 return false;
4065 if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce))
4066 return false;
4067 } else if (lhs->isOp(JSOP_SETGNAME) || lhs->isOp(JSOP_STRICTSETGNAME)) {
4068 MOZ_ASSERT(lhs->pn_cookie.isFree());
4069 if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce))
4070 return false;
4071 } else if (lhs->isOp(JSOP_SETINTRINSIC)) {
4072 MOZ_ASSERT(lhs->pn_cookie.isFree());
4073 if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce))
4074 return false;
4075 } else {
4076 JSOp op;
4077 switch (lhs->getOp()) {
4078 case JSOP_SETARG: op = JSOP_GETARG; break;
4079 case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break;
4080 case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break;
4081 default: MOZ_CRASH("Bad op");
4083 if (!EmitVarOp(cx, lhs, op, bce))
4084 return false;
4086 break;
4087 case PNK_DOT: {
4088 if (Emit1(cx, bce, JSOP_DUP) < 0)
4089 return false;
4090 bool isLength = (lhs->pn_atom == cx->names().length);
4091 if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce))
4092 return false;
4093 break;
4095 case PNK_ELEM:
4096 if (Emit1(cx, bce, JSOP_DUP2) < 0)
4097 return false;
4098 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
4099 return false;
4100 break;
4101 case PNK_CALL:
4103 * We just emitted a JSOP_SETCALL (which will always throw) and
4104 * popped the call's return value. Push a random value to make sure
4105 * the stack depth is correct.
4107 MOZ_ASSERT(lhs->pn_xflags & PNX_SETCALL);
4108 if (Emit1(cx, bce, JSOP_NULL) < 0)
4109 return false;
4110 break;
4111 default:;
4115 /* Now emit the right operand (it may affect the namespace). */
4116 if (rhs) {
4117 if (!EmitTree(cx, bce, rhs))
4118 return false;
4119 } else {
4121 * The value to assign is the next enumeration value in a for-in or
4122 * for-of loop. That value has already been emitted: by JSOP_ITERNEXT
4123 * in the for-in case, or via a GETPROP "value" on the result object in
4124 * the for-of case. If offset == 1, that slot is already at the top of
4125 * the stack. Otherwise, rearrange the stack to put that value on top.
4127 if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0)
4128 return false;
4131 /* If += etc., emit the binary operator with a source note. */
4132 if (op != JSOP_NOP) {
4134 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
4135 * declared in the current compilation unit, as in this case (just
4136 * a bit further below) we will avoid emitting the assignment op.
4138 if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) {
4139 if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0)
4140 return false;
4142 if (Emit1(cx, bce, op) < 0)
4143 return false;
4146 /* Finally, emit the specialized assignment bytecode. */
4147 switch (lhs->getKind()) {
4148 case PNK_NAME:
4149 if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) {
4150 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
4151 return false;
4152 } else {
4153 if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce))
4154 return false;
4156 break;
4157 case PNK_DOT:
4159 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETPROP : JSOP_SETPROP;
4160 if (!EmitIndexOp(cx, setOp, atomIndex, bce))
4161 return false;
4162 break;
4164 case PNK_CALL:
4165 /* Do nothing. The JSOP_SETCALL we emitted will always throw. */
4166 MOZ_ASSERT(lhs->pn_xflags & PNX_SETCALL);
4167 break;
4168 case PNK_ELEM:
4170 JSOp setOp = bce->sc->strict ? JSOP_STRICTSETELEM : JSOP_SETELEM;
4171 if (Emit1(cx, bce, setOp) < 0)
4172 return false;
4173 break;
4175 case PNK_ARRAY:
4176 case PNK_OBJECT:
4177 if (!EmitDestructuringOps(cx, bce, lhs))
4178 return false;
4179 break;
4180 default:
4181 MOZ_ASSERT(0);
4183 return true;
4186 bool
4187 ParseNode::getConstantValue(ExclusiveContext* cx, AllowConstantObjects allowObjects, MutableHandleValue vp)
4189 switch (getKind()) {
4190 case PNK_NUMBER:
4191 vp.setNumber(pn_dval);
4192 return true;
4193 case PNK_TEMPLATE_STRING:
4194 case PNK_STRING:
4195 vp.setString(pn_atom);
4196 return true;
4197 case PNK_TRUE:
4198 vp.setBoolean(true);
4199 return true;
4200 case PNK_FALSE:
4201 vp.setBoolean(false);
4202 return true;
4203 case PNK_NULL:
4204 vp.setNull();
4205 return true;
4206 case PNK_CALLSITEOBJ:
4207 case PNK_ARRAY: {
4208 RootedValue value(cx);
4209 unsigned count;
4210 ParseNode* pn;
4212 if (allowObjects == DontAllowObjects) {
4213 vp.setMagic(JS_GENERIC_MAGIC);
4214 return true;
4216 if (allowObjects == DontAllowNestedObjects)
4217 allowObjects = DontAllowObjects;
4219 if (getKind() == PNK_CALLSITEOBJ) {
4220 count = pn_count - 1;
4221 pn = pn_head->pn_next;
4222 } else {
4223 MOZ_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
4224 count = pn_count;
4225 pn = pn_head;
4228 RootedArrayObject obj(cx, NewDenseFullyAllocatedArray(cx, count, nullptr, MaybeSingletonObject));
4229 if (!obj)
4230 return false;
4232 unsigned idx = 0;
4233 RootedId id(cx);
4234 for (; pn; idx++, pn = pn->pn_next) {
4235 if (!pn->getConstantValue(cx, allowObjects, &value))
4236 return false;
4237 if (value.isMagic(JS_GENERIC_MAGIC)) {
4238 vp.setMagic(JS_GENERIC_MAGIC);
4239 return true;
4241 id = INT_TO_JSID(idx);
4242 if (!JSObject::defineGeneric(cx, obj, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
4243 return false;
4245 MOZ_ASSERT(idx == count);
4247 types::FixArrayType(cx, obj);
4248 vp.setObject(*obj);
4249 return true;
4251 case PNK_OBJECT: {
4252 MOZ_ASSERT(isOp(JSOP_NEWINIT));
4253 MOZ_ASSERT(!(pn_xflags & PNX_NONCONST));
4255 if (allowObjects == DontAllowObjects) {
4256 vp.setMagic(JS_GENERIC_MAGIC);
4257 return true;
4259 if (allowObjects == DontAllowNestedObjects)
4260 allowObjects = DontAllowObjects;
4262 gc::AllocKind kind = GuessObjectGCKind(pn_count);
4263 RootedPlainObject obj(cx,
4264 NewBuiltinClassInstance<PlainObject>(cx, kind, MaybeSingletonObject));
4265 if (!obj)
4266 return false;
4268 RootedValue value(cx), idvalue(cx);
4269 for (ParseNode* pn = pn_head; pn; pn = pn->pn_next) {
4270 if (!pn->pn_right->getConstantValue(cx, allowObjects, &value))
4271 return false;
4272 if (value.isMagic(JS_GENERIC_MAGIC)) {
4273 vp.setMagic(JS_GENERIC_MAGIC);
4274 return true;
4277 ParseNode* pnid = pn->pn_left;
4278 if (pnid->isKind(PNK_NUMBER)) {
4279 idvalue = NumberValue(pnid->pn_dval);
4280 } else {
4281 MOZ_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING));
4282 MOZ_ASSERT(pnid->pn_atom != cx->names().proto);
4283 idvalue = StringValue(pnid->pn_atom);
4286 uint32_t index;
4287 if (IsDefinitelyIndex(idvalue, &index)) {
4288 if (!JSObject::defineElement(cx, obj, index, value, nullptr, nullptr,
4289 JSPROP_ENUMERATE))
4291 return false;
4294 continue;
4297 JSAtom* name = ToAtom<CanGC>(cx, idvalue);
4298 if (!name)
4299 return false;
4301 if (name->isIndex(&index)) {
4302 if (!JSObject::defineElement(cx, obj, index, value,
4303 nullptr, nullptr, JSPROP_ENUMERATE))
4304 return false;
4305 } else {
4306 if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value,
4307 nullptr, nullptr, JSPROP_ENUMERATE))
4309 return false;
4314 types::FixObjectType(cx, obj);
4315 vp.setObject(*obj);
4316 return true;
4318 default:
4319 MOZ_CRASH("Unexpected node");
4321 return false;
4324 static bool
4325 EmitSingletonInitialiser(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4327 RootedValue value(cx);
4328 if (!pn->getConstantValue(cx, ParseNode::AllowObjects, &value))
4329 return false;
4331 RootedNativeObject obj(cx, &value.toObject().as<NativeObject>());
4332 if (!obj->is<ArrayObject>() && !JSObject::setSingletonType(cx, obj))
4333 return false;
4335 ObjectBox* objbox = bce->parser->newObjectBox(obj);
4336 if (!objbox)
4337 return false;
4339 return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce);
4342 static bool
4343 EmitCallSiteObject(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4345 RootedValue value(cx);
4346 if (!pn->getConstantValue(cx, ParseNode::AllowObjects, &value))
4347 return false;
4349 MOZ_ASSERT(value.isObject());
4351 ObjectBox* objbox1 = bce->parser->newObjectBox(&value.toObject().as<NativeObject>());
4352 if (!objbox1)
4353 return false;
4355 if (!pn->as<CallSiteNode>().getRawArrayValue(cx, &value))
4356 return false;
4358 MOZ_ASSERT(value.isObject());
4360 ObjectBox* objbox2 = bce->parser->newObjectBox(&value.toObject().as<NativeObject>());
4361 if (!objbox2)
4362 return false;
4364 return EmitObjectPairOp(cx, objbox1, objbox2, JSOP_CALLSITEOBJ, bce);
4367 /* See the SRC_FOR source note offsetBias comments later in this file. */
4368 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4369 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4371 namespace {
4373 class EmitLevelManager
4375 BytecodeEmitter* bce;
4376 public:
4377 explicit EmitLevelManager(BytecodeEmitter* bce) : bce(bce) { bce->emitLevel++; }
4378 ~EmitLevelManager() { bce->emitLevel--; }
4381 } /* anonymous namespace */
4383 static bool
4384 EmitCatch(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4387 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4388 * and save the block object atom.
4390 StmtInfoBCE* stmt = bce->topStmt;
4391 MOZ_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope);
4392 stmt->type = STMT_CATCH;
4394 /* Go up one statement info record to the TRY or FINALLY record. */
4395 stmt = stmt->down;
4396 MOZ_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
4398 /* Pick up the pending exception and bind it to the catch variable. */
4399 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
4400 return false;
4403 * Dup the exception object if there is a guard for rethrowing to use
4404 * it later when rethrowing or in other catches.
4406 if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0)
4407 return false;
4409 ParseNode* pn2 = pn->pn_kid1;
4410 switch (pn2->getKind()) {
4411 case PNK_ARRAY:
4412 case PNK_OBJECT:
4413 if (!EmitDestructuringOps(cx, bce, pn2))
4414 return false;
4415 if (Emit1(cx, bce, JSOP_POP) < 0)
4416 return false;
4417 break;
4419 case PNK_NAME:
4420 /* Inline and specialize BindNameToSlot for pn2. */
4421 MOZ_ASSERT(!pn2->pn_cookie.isFree());
4422 if (!EmitVarOp(cx, pn2, JSOP_INITLEXICAL, bce))
4423 return false;
4424 if (Emit1(cx, bce, JSOP_POP) < 0)
4425 return false;
4426 break;
4428 default:
4429 MOZ_ASSERT(0);
4432 // If there is a guard expression, emit it and arrange to jump to the next
4433 // catch block if the guard expression is false.
4434 if (pn->pn_kid2) {
4435 if (!EmitTree(cx, bce, pn->pn_kid2))
4436 return false;
4438 // If the guard expression is false, fall through, pop the block scope,
4439 // and jump to the next catch block. Otherwise jump over that code and
4440 // pop the dupped exception.
4441 ptrdiff_t guardCheck = EmitJump(cx, bce, JSOP_IFNE, 0);
4442 if (guardCheck < 0)
4443 return false;
4446 NonLocalExitScope nle(cx, bce);
4448 // Move exception back to cx->exception to prepare for
4449 // the next catch.
4450 if (Emit1(cx, bce, JSOP_THROWING) < 0)
4451 return false;
4453 // Leave the scope for this catch block.
4454 if (!nle.prepareForNonLocalJump(stmt))
4455 return false;
4457 // Jump to the next handler. The jump target is backpatched by EmitTry.
4458 ptrdiff_t guardJump = EmitJump(cx, bce, JSOP_GOTO, 0);
4459 if (guardJump < 0)
4460 return false;
4461 stmt->guardJump() = guardJump;
4464 // Back to normal control flow.
4465 SetJumpOffsetAt(bce, guardCheck);
4467 // Pop duplicated exception object as we no longer need it.
4468 if (Emit1(cx, bce, JSOP_POP) < 0)
4469 return false;
4472 /* Emit the catch body. */
4473 return EmitTree(cx, bce, pn->pn_kid3);
4476 // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the
4477 // comment on EmitSwitch.
4479 MOZ_NEVER_INLINE static bool
4480 EmitTry(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4482 StmtInfoBCE stmtInfo(cx);
4484 // Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4485 // for later fixup.
4487 // When a finally block is active (STMT_FINALLY in our parse context),
4488 // non-local jumps (including jumps-over-catches) result in a GOSUB
4489 // being written into the bytecode stream and fixed-up later (c.f.
4490 // EmitBackPatchOp and BackPatch).
4492 PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset());
4494 // Since an exception can be thrown at any place inside the try block,
4495 // we need to restore the stack and the scope chain before we transfer
4496 // the control to the exception handler.
4498 // For that we store in a try note associated with the catch or
4499 // finally block the stack depth upon the try entry. The interpreter
4500 // uses this depth to properly unwind the stack and the scope chain.
4502 int depth = bce->stackDepth;
4504 // Record the try location, then emit the try block.
4505 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY);
4506 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0)
4507 return false;
4508 ptrdiff_t tryStart = bce->offset();
4509 if (!EmitTree(cx, bce, pn->pn_kid1))
4510 return false;
4511 MOZ_ASSERT(depth == bce->stackDepth);
4513 // GOSUB to finally, if present.
4514 if (pn->pn_kid3) {
4515 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
4516 return false;
4519 // Source note points to the jump at the end of the try block.
4520 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH))
4521 return false;
4523 // Emit jump over catch and/or finally.
4524 ptrdiff_t catchJump = -1;
4525 if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
4526 return false;
4528 ptrdiff_t tryEnd = bce->offset();
4530 // If this try has a catch block, emit it.
4531 if (ParseNode* pn2 = pn->pn_kid2) {
4532 // The emitted code for a catch block looks like:
4534 // [pushblockscope] only if any local aliased
4535 // exception
4536 // if there is a catchguard:
4537 // dup
4538 // setlocal 0; pop assign or possibly destructure exception
4539 // if there is a catchguard:
4540 // < catchguard code >
4541 // ifne POST
4542 // debugleaveblock
4543 // [popblockscope] only if any local aliased
4544 // throwing pop exception to cx->exception
4545 // goto <next catch block>
4546 // POST: pop
4547 // < catch block contents >
4548 // debugleaveblock
4549 // [popblockscope] only if any local aliased
4550 // goto <end of catch blocks> non-local; finally applies
4552 // If there's no catch block without a catchguard, the last <next catch
4553 // block> points to rethrow code. This code will [gosub] to the finally
4554 // code if appropriate, and is also used for the catch-all trynote for
4555 // capturing exceptions thrown from catch{} blocks.
4557 for (ParseNode* pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
4558 MOZ_ASSERT(bce->stackDepth == depth);
4560 // Emit the lexical scope and catch body.
4561 MOZ_ASSERT(pn3->isKind(PNK_LEXICALSCOPE));
4562 if (!EmitTree(cx, bce, pn3))
4563 return false;
4565 // gosub <finally>, if required.
4566 if (pn->pn_kid3) {
4567 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
4568 return false;
4569 MOZ_ASSERT(bce->stackDepth == depth);
4572 // Jump over the remaining catch blocks. This will get fixed
4573 // up to jump to after catch/finally.
4574 if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
4575 return false;
4577 // If this catch block had a guard clause, patch the guard jump to
4578 // come here.
4579 if (stmtInfo.guardJump() != -1) {
4580 SetJumpOffsetAt(bce, stmtInfo.guardJump());
4581 stmtInfo.guardJump() = -1;
4583 // If this catch block is the last one, rethrow, delegating
4584 // execution of any finally block to the exception handler.
4585 if (!pn3->pn_next) {
4586 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
4587 return false;
4588 if (Emit1(cx, bce, JSOP_THROW) < 0)
4589 return false;
4595 MOZ_ASSERT(bce->stackDepth == depth);
4597 // Emit the finally handler, if there is one.
4598 ptrdiff_t finallyStart = 0;
4599 if (pn->pn_kid3) {
4600 // Fix up the gosubs that might have been emitted before non-local
4601 // jumps to the finally code.
4602 if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB))
4603 return false;
4605 finallyStart = bce->offset();
4607 // Indicate that we're emitting a subroutine body.
4608 stmtInfo.type = STMT_SUBROUTINE;
4609 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin))
4610 return false;
4611 if (Emit1(cx, bce, JSOP_FINALLY) < 0 ||
4612 !EmitTree(cx, bce, pn->pn_kid3) ||
4613 Emit1(cx, bce, JSOP_RETSUB) < 0)
4615 return false;
4617 MOZ_ASSERT(bce->stackDepth == depth);
4619 if (!PopStatementBCE(cx, bce))
4620 return false;
4622 // ReconstructPCStack needs a NOP here to mark the end of the last catch block.
4623 if (Emit1(cx, bce, JSOP_NOP) < 0)
4624 return false;
4626 // Fix up the end-of-try/catch jumps to come here.
4627 if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO))
4628 return false;
4630 // Add the try note last, to let post-order give us the right ordering
4631 // (first to last for a given nesting level, inner to outer by level).
4632 if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd))
4633 return false;
4635 // If we've got a finally, mark try+catch region with additional
4636 // trynote to catch exceptions (re)thrown from a catch block or
4637 // for the try{}finally{} case.
4638 if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart))
4639 return false;
4641 return true;
4644 static bool
4645 EmitIf(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4647 StmtInfoBCE stmtInfo(cx);
4649 /* Initialize so we can detect else-if chains and avoid recursion. */
4650 stmtInfo.type = STMT_IF;
4651 ptrdiff_t beq = -1;
4652 ptrdiff_t jmp = -1;
4653 ptrdiff_t noteIndex = -1;
4655 if_again:
4656 /* Emit code for the condition before pushing stmtInfo. */
4657 if (!EmitTree(cx, bce, pn->pn_kid1))
4658 return false;
4659 ptrdiff_t top = bce->offset();
4660 if (stmtInfo.type == STMT_IF) {
4661 PushStatementBCE(bce, &stmtInfo, STMT_IF, top);
4662 } else {
4664 * We came here from the goto further below that detects else-if
4665 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4666 * Also we need a note offset for SRC_IF_ELSE to help IonMonkey.
4668 MOZ_ASSERT(stmtInfo.type == STMT_ELSE);
4669 stmtInfo.type = STMT_IF;
4670 stmtInfo.update = top;
4671 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
4672 return false;
4675 /* Emit an annotated branch-if-false around the then part. */
4676 ParseNode* pn3 = pn->pn_kid3;
4677 noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF);
4678 if (noteIndex < 0)
4679 return false;
4680 beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
4681 if (beq < 0)
4682 return false;
4684 /* Emit code for the then and optional else parts. */
4685 if (!EmitTree(cx, bce, pn->pn_kid2))
4686 return false;
4687 if (pn3) {
4688 /* Modify stmtInfo so we know we're in the else part. */
4689 stmtInfo.type = STMT_ELSE;
4692 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4693 * around the else part. The PopStatementBCE call at the bottom of
4694 * this function will fix up the backpatch chain linked from
4695 * stmtInfo.breaks.
4697 jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks);
4698 if (jmp < 0)
4699 return false;
4701 /* Ensure the branch-if-false comes here, then emit the else. */
4702 SetJumpOffsetAt(bce, beq);
4703 if (pn3->isKind(PNK_IF)) {
4704 pn = pn3;
4705 goto if_again;
4708 if (!EmitTree(cx, bce, pn3))
4709 return false;
4712 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4713 * IonMonkey's benefit. We can't just "back up" from the pc
4714 * of the else clause, because we don't know whether an extended
4715 * jump was required to leap from the end of the then clause over
4716 * the else clause.
4718 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
4719 return false;
4720 } else {
4721 /* No else part, fixup the branch-if-false to come here. */
4722 SetJumpOffsetAt(bce, beq);
4724 return PopStatementBCE(cx, bce);
4728 * pnLet represents one of:
4730 * let-expression: (let (x = y) EXPR)
4731 * let-statement: let (x = y) { ... }
4733 * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces:
4735 * bytecode stackDepth srcnotes
4736 * evaluate a +1
4737 * evaluate b +1
4738 * dup +1
4739 * destructure y
4740 * pick 1
4741 * dup +1
4742 * destructure z
4743 * pick 1
4744 * pop -1
4745 * setlocal 2 -1
4746 * setlocal 1 -1
4747 * setlocal 0 -1
4748 * pushblockscope (if needed)
4749 * evaluate e +1
4750 * debugleaveblock
4751 * popblockscope (if needed)
4753 * Note that, since pushblockscope simply changes fp->scopeChain and does not
4754 * otherwise touch the stack, evaluation of the let-var initializers must leave
4755 * the initial value in the let-var's future slot.
4758 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4759 * the comment on EmitSwitch.
4761 MOZ_NEVER_INLINE static bool
4762 EmitLet(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pnLet)
4764 MOZ_ASSERT(pnLet->isArity(PN_BINARY));
4765 ParseNode* varList = pnLet->pn_left;
4766 MOZ_ASSERT(varList->isArity(PN_LIST));
4767 ParseNode* letBody = pnLet->pn_right;
4768 MOZ_ASSERT(letBody->isLexical() && letBody->isKind(PNK_LEXICALSCOPE));
4770 int letHeadDepth = bce->stackDepth;
4772 if (!EmitVariables(cx, bce, varList, PushInitialValues, true))
4773 return false;
4775 /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
4776 uint32_t valuesPushed = bce->stackDepth - letHeadDepth;
4777 StmtInfoBCE stmtInfo(cx);
4778 if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, JSOP_UNINITIALIZED, valuesPushed))
4779 return false;
4781 if (!EmitTree(cx, bce, letBody->pn_expr))
4782 return false;
4784 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4785 return false;
4787 return true;
4791 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4792 * the comment on EmitSwitch.
4794 MOZ_NEVER_INLINE static bool
4795 EmitLexicalScope(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4797 MOZ_ASSERT(pn->isKind(PNK_LEXICALSCOPE));
4799 StmtInfoBCE stmtInfo(cx);
4800 if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, JSOP_UNINITIALIZED, 0))
4801 return false;
4803 if (!EmitTree(cx, bce, pn->pn_expr))
4804 return false;
4806 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4807 return false;
4809 return true;
4812 static bool
4813 EmitWith(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4815 StmtInfoBCE stmtInfo(cx);
4816 if (!EmitTree(cx, bce, pn->pn_left))
4817 return false;
4818 if (!EnterNestedScope(cx, bce, &stmtInfo, pn->pn_binary_obj, STMT_WITH))
4819 return false;
4820 if (!EmitTree(cx, bce, pn->pn_right))
4821 return false;
4822 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4823 return false;
4824 return true;
4828 * EmitIterator expects the iterable to already be on the stack.
4829 * It will replace that stack value with the corresponding iterator
4831 static bool
4832 EmitIterator(ExclusiveContext* cx, BytecodeEmitter* bce)
4834 // Convert iterable to iterator.
4835 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
4836 return false;
4837 #ifdef JS_HAS_SYMBOLS
4838 if (Emit2(cx, bce, JSOP_SYMBOL, jsbytecode(JS::SymbolCode::iterator)) < 0) // OBJ OBJ @@ITERATOR
4839 return false;
4840 if (!EmitElemOpBase(cx, bce, JSOP_CALLELEM)) // OBJ ITERFN
4841 return false;
4842 #else
4843 if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // OBJ ITERFN
4844 return false;
4845 #endif
4846 if (Emit1(cx, bce, JSOP_SWAP) < 0) // ITERFN OBJ
4847 return false;
4848 if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER
4849 return false;
4850 CheckTypeSet(cx, bce, JSOP_CALL);
4851 return true;
4854 static bool
4855 EmitForInOrOfVariables(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool* letDecl)
4857 *letDecl = pn->isKind(PNK_LEXICALSCOPE);
4858 MOZ_ASSERT_IF(*letDecl, pn->isLexical());
4860 // If the left part is 'var x', emit code to define x if necessary using a
4861 // prolog opcode, but do not emit a pop. If it is 'let x', EnterBlockScope
4862 // will initialize let bindings in EmitForOf and EmitForIn with
4863 // undefineds.
4865 // Due to the horror of legacy comprehensions, there is a third case where
4866 // we have PNK_LET without a lexical scope, because those expressions are
4867 // parsed with single lexical scope for the entire comprehension. In this
4868 // case we must initialize the lets to not trigger dead zone checks via
4869 // InitializeVars.
4870 if (!*letDecl) {
4871 bce->emittingForInit = true;
4872 if (pn->isKind(PNK_VAR)) {
4873 if (!EmitVariables(cx, bce, pn, DefineVars))
4874 return false;
4875 } else {
4876 MOZ_ASSERT(pn->isKind(PNK_LET));
4877 if (!EmitVariables(cx, bce, pn, InitializeVars))
4878 return false;
4880 bce->emittingForInit = false;
4883 return true;
4888 * If type is STMT_FOR_OF_LOOP, it emits bytecode for for-of loop.
4889 * pn should be PNK_FOR, and pn->pn_left should be PNK_FOROF.
4891 * If type is STMT_SPREAD, it emits bytecode for spread operator.
4892 * pn should be nullptr.
4893 * Please refer the comment above EmitSpread for additional information about
4894 * stack convention.
4896 static bool
4897 EmitForOf(ExclusiveContext* cx, BytecodeEmitter* bce, StmtType type, ParseNode* pn, ptrdiff_t top)
4899 MOZ_ASSERT(type == STMT_FOR_OF_LOOP || type == STMT_SPREAD);
4900 MOZ_ASSERT_IF(type == STMT_FOR_OF_LOOP, pn && pn->pn_left->isKind(PNK_FOROF));
4901 MOZ_ASSERT_IF(type == STMT_SPREAD, !pn);
4903 ParseNode* forHead = pn ? pn->pn_left : nullptr;
4904 ParseNode* forHeadExpr = forHead ? forHead->pn_kid3 : nullptr;
4905 ParseNode* forBody = pn ? pn->pn_right : nullptr;
4907 ParseNode* pn1 = forHead ? forHead->pn_kid1 : nullptr;
4908 bool letDecl = false;
4909 if (pn1 && !EmitForInOrOfVariables(cx, bce, pn1, &letDecl))
4910 return false;
4912 if (type == STMT_FOR_OF_LOOP) {
4913 // For-of loops run with two values on the stack: the iterator and the
4914 // current result object.
4916 // Compile the object expression to the right of 'of'.
4917 if (!EmitTree(cx, bce, forHeadExpr))
4918 return false;
4919 if (!EmitIterator(cx, bce))
4920 return false;
4922 // Push a dummy result so that we properly enter iteration midstream.
4923 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RESULT
4924 return false;
4927 // Enter the block before the loop body, after evaluating the obj.
4928 // Initialize let bindings with undefined when entering, as the name
4929 // assigned to is a plain assignment.
4930 StmtInfoBCE letStmt(cx);
4931 if (letDecl) {
4932 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, JSOP_UNDEFINED, 0))
4933 return false;
4936 LoopStmtInfo stmtInfo(cx);
4937 PushLoopStatement(bce, &stmtInfo, type, top);
4939 // Jump down to the loop condition to minimize overhead assuming at least
4940 // one iteration, as the other loop forms do. Annotate so IonMonkey can
4941 // find the loop-closing jump.
4942 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_OF);
4943 if (noteIndex < 0)
4944 return false;
4945 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
4946 if (jmp < 0)
4947 return false;
4949 top = bce->offset();
4950 SET_STATEMENT_TOP(&stmtInfo, top);
4951 if (EmitLoopHead(cx, bce, nullptr) < 0)
4952 return false;
4954 if (type == STMT_SPREAD)
4955 bce->stackDepth++;
4957 #ifdef DEBUG
4958 int loopDepth = bce->stackDepth;
4959 #endif
4961 // Emit code to assign result.value to the iteration variable.
4962 if (type == STMT_FOR_OF_LOOP) {
4963 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT
4964 return false;
4966 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ... RESULT VALUE
4967 return false;
4968 if (type == STMT_FOR_OF_LOOP) {
4969 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) // ITER RESULT VALUE
4970 return false;
4971 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER RESULT
4972 return false;
4974 // The stack should be balanced around the assignment opcode sequence.
4975 MOZ_ASSERT(bce->stackDepth == loopDepth);
4977 // Emit code for the loop body.
4978 if (!EmitTree(cx, bce, forBody))
4979 return false;
4981 // Set loop and enclosing "update" offsets, for continue.
4982 StmtInfoBCE* stmt = &stmtInfo;
4983 do {
4984 stmt->update = bce->offset();
4985 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
4986 } else {
4987 if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0) // ITER ARR (I+1)
4988 return false;
4990 MOZ_ASSERT(bce->stackDepth == loopDepth - 1);
4992 // STMT_SPREAD never contain continue, so do not set "update" offset.
4995 // COME FROM the beginning of the loop to here.
4996 SetJumpOffsetAt(bce, jmp);
4997 if (!EmitLoopEntry(cx, bce, forHeadExpr))
4998 return false;
5000 if (type == STMT_FOR_OF_LOOP) {
5001 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER
5002 return false;
5003 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER
5004 return false;
5005 } else {
5006 if (!EmitDupAt(cx, bce, bce->stackDepth - 1 - 2)) // ITER ARR I ITER
5007 return false;
5009 if (!EmitIteratorNext(cx, bce, forHead)) // ... RESULT
5010 return false;
5011 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... RESULT RESULT
5012 return false;
5013 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ... RESULT DONE?
5014 return false;
5016 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset()); // ... RESULT
5017 if (beq < 0)
5018 return false;
5020 MOZ_ASSERT(bce->stackDepth == loopDepth);
5022 // Let Ion know where the closing jump of this loop is.
5023 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp))
5024 return false;
5026 // Fixup breaks and continues.
5027 // For STMT_SPREAD, just pop pc->topStmt.
5028 if (!PopStatementBCE(cx, bce))
5029 return false;
5031 if (!bce->tryNoteList.append(JSTRY_FOR_OF, bce->stackDepth, top, bce->offset()))
5032 return false;
5034 if (letDecl) {
5035 if (!LeaveNestedScope(cx, bce, &letStmt))
5036 return false;
5039 if (type == STMT_SPREAD) {
5040 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ARR I RESULT ITER
5041 return false;
5044 // Pop the result and the iter.
5045 EMIT_UINT16_IMM_OP(JSOP_POPN, 2);
5047 return true;
5050 static bool
5051 EmitForIn(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5053 ParseNode* forHead = pn->pn_left;
5054 ParseNode* forBody = pn->pn_right;
5056 ParseNode* pn1 = forHead->pn_kid1;
5057 bool letDecl = false;
5058 if (pn1 && !EmitForInOrOfVariables(cx, bce, pn1, &letDecl))
5059 return false;
5061 /* Compile the object expression to the right of 'in'. */
5062 if (!EmitTree(cx, bce, forHead->pn_kid3))
5063 return false;
5066 * Emit a bytecode to convert top of stack value to the iterator
5067 * object depending on the loop variant (for-in, for-each-in, or
5068 * destructuring for-in).
5070 MOZ_ASSERT(pn->isOp(JSOP_ITER));
5071 if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0)
5072 return false;
5074 // For-in loops have both the iterator and the value on the stack. Push
5075 // undefined to balance the stack.
5076 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
5077 return false;
5079 // Enter the block before the loop body, after evaluating the obj.
5080 // Initialize let bindings with undefined when entering, as the name
5081 // assigned to is a plain assignment.
5082 StmtInfoBCE letStmt(cx);
5083 if (letDecl) {
5084 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, JSOP_UNDEFINED, 0))
5085 return false;
5088 LoopStmtInfo stmtInfo(cx);
5089 PushLoopStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top);
5091 /* Annotate so IonMonkey can find the loop-closing jump. */
5092 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN);
5093 if (noteIndex < 0)
5094 return false;
5097 * Jump down to the loop condition to minimize overhead assuming at
5098 * least one iteration, as the other loop forms do.
5100 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
5101 if (jmp < 0)
5102 return false;
5104 top = bce->offset();
5105 SET_STATEMENT_TOP(&stmtInfo, top);
5106 if (EmitLoopHead(cx, bce, nullptr) < 0)
5107 return false;
5109 #ifdef DEBUG
5110 int loopDepth = bce->stackDepth;
5111 #endif
5113 // Emit code to assign the enumeration value to the left hand side, but
5114 // also leave it on the stack.
5115 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr))
5116 return false;
5118 /* The stack should be balanced around the assignment opcode sequence. */
5119 MOZ_ASSERT(bce->stackDepth == loopDepth);
5121 /* Emit code for the loop body. */
5122 if (!EmitTree(cx, bce, forBody))
5123 return false;
5125 /* Set loop and enclosing "update" offsets, for continue. */
5126 StmtInfoBCE* stmt = &stmtInfo;
5127 do {
5128 stmt->update = bce->offset();
5129 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
5132 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
5134 SetJumpOffsetAt(bce, jmp);
5135 if (!EmitLoopEntry(cx, bce, nullptr))
5136 return false;
5137 if (Emit1(cx, bce, JSOP_POP) < 0)
5138 return false;
5139 if (Emit1(cx, bce, JSOP_MOREITER) < 0)
5140 return false;
5141 if (Emit1(cx, bce, JSOP_ISNOITER) < 0)
5142 return false;
5143 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset());
5144 if (beq < 0)
5145 return false;
5147 /* Set the srcnote offset so we can find the closing jump. */
5148 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp))
5149 return false;
5151 // Fix up breaks and continues.
5152 if (!PopStatementBCE(cx, bce))
5153 return false;
5155 // Pop the enumeration value.
5156 if (Emit1(cx, bce, JSOP_POP) < 0)
5157 return false;
5159 if (!bce->tryNoteList.append(JSTRY_FOR_IN, bce->stackDepth, top, bce->offset()))
5160 return false;
5161 if (Emit1(cx, bce, JSOP_ENDITER) < 0)
5162 return false;
5164 if (letDecl) {
5165 if (!LeaveNestedScope(cx, bce, &letStmt))
5166 return false;
5169 return true;
5172 static bool
5173 EmitNormalFor(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5175 LoopStmtInfo stmtInfo(cx);
5176 PushLoopStatement(bce, &stmtInfo, STMT_FOR_LOOP, top);
5178 ParseNode* forHead = pn->pn_left;
5179 ParseNode* forBody = pn->pn_right;
5181 /* C-style for (init; cond; update) ... loop. */
5182 JSOp op = JSOP_POP;
5183 ParseNode* pn3 = forHead->pn_kid1;
5184 if (!pn3) {
5185 // No initializer, but emit a nop so that there's somewhere to put the
5186 // SRC_FOR annotation that IonBuilder will look for.
5187 op = JSOP_NOP;
5188 } else {
5189 bce->emittingForInit = true;
5190 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
5191 return false;
5192 if (!EmitTree(cx, bce, pn3))
5193 return false;
5194 bce->emittingForInit = false;
5198 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
5199 * Use tmp to hold the biased srcnote "top" offset, which differs
5200 * from the top local variable by the length of the JSOP_GOTO
5201 * emitted in between tmp and top if this loop has a condition.
5203 int noteIndex = NewSrcNote(cx, bce, SRC_FOR);
5204 if (noteIndex < 0 || Emit1(cx, bce, op) < 0)
5205 return false;
5206 ptrdiff_t tmp = bce->offset();
5208 ptrdiff_t jmp = -1;
5209 if (forHead->pn_kid2) {
5210 /* Goto the loop condition, which branches back to iterate. */
5211 jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
5212 if (jmp < 0)
5213 return false;
5214 } else {
5215 if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0)
5216 return false;
5219 top = bce->offset();
5220 SET_STATEMENT_TOP(&stmtInfo, top);
5222 /* Emit code for the loop body. */
5223 if (EmitLoopHead(cx, bce, forBody) < 0)
5224 return false;
5225 if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody))
5226 return false;
5227 if (!EmitTree(cx, bce, forBody))
5228 return false;
5230 /* Set the second note offset so we can find the update part. */
5231 MOZ_ASSERT(noteIndex != -1);
5232 ptrdiff_t tmp2 = bce->offset();
5234 /* Set loop and enclosing "update" offsets, for continue. */
5235 StmtInfoBCE* stmt = &stmtInfo;
5236 do {
5237 stmt->update = bce->offset();
5238 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
5240 /* Check for update code to do before the condition (if any). */
5241 pn3 = forHead->pn_kid3;
5242 if (pn3) {
5243 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
5244 return false;
5245 op = JSOP_POP;
5246 if (!EmitTree(cx, bce, pn3))
5247 return false;
5249 /* Always emit the POP or NOP to help IonBuilder. */
5250 if (Emit1(cx, bce, op) < 0)
5251 return false;
5253 /* Restore the absolute line number for source note readers. */
5254 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end);
5255 if (bce->currentLine() != lineNum) {
5256 if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0)
5257 return false;
5258 bce->current->currentLine = lineNum;
5259 bce->current->lastColumn = 0;
5263 ptrdiff_t tmp3 = bce->offset();
5265 if (forHead->pn_kid2) {
5266 /* Fix up the goto from top to target the loop condition. */
5267 MOZ_ASSERT(jmp >= 0);
5268 SetJumpOffsetAt(bce, jmp);
5269 if (!EmitLoopEntry(cx, bce, forHead->pn_kid2))
5270 return false;
5272 if (!EmitTree(cx, bce, forHead->pn_kid2))
5273 return false;
5276 /* Set the first note offset so we can find the loop condition. */
5277 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp))
5278 return false;
5279 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp))
5280 return false;
5281 /* The third note offset helps us find the loop-closing jump. */
5282 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp))
5283 return false;
5285 /* If no loop condition, just emit a loop-closing jump. */
5286 op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO;
5287 if (EmitJump(cx, bce, op, top - bce->offset()) < 0)
5288 return false;
5290 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
5291 return false;
5293 /* Now fixup all breaks and continues. */
5294 return PopStatementBCE(cx, bce);
5297 static inline bool
5298 EmitFor(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5300 if (pn->pn_left->isKind(PNK_FORIN))
5301 return EmitForIn(cx, bce, pn, top);
5303 if (pn->pn_left->isKind(PNK_FOROF))
5304 return EmitForOf(cx, bce, STMT_FOR_OF_LOOP, pn, top);
5306 MOZ_ASSERT(pn->pn_left->isKind(PNK_FORHEAD));
5307 return EmitNormalFor(cx, bce, pn, top);
5310 static MOZ_NEVER_INLINE bool
5311 EmitFunc(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5313 FunctionBox* funbox = pn->pn_funbox;
5314 RootedFunction fun(cx, funbox->function());
5315 MOZ_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript());
5318 * Set the EMITTEDFUNCTION flag in function definitions once they have been
5319 * emitted. Function definitions that need hoisting to the top of the
5320 * function will be seen by EmitFunc in two places.
5322 if (pn->pn_dflags & PND_EMITTEDFUNCTION) {
5323 MOZ_ASSERT_IF(fun->hasScript(), fun->nonLazyScript());
5324 MOZ_ASSERT(pn->functionIsHoisted());
5325 MOZ_ASSERT(bce->sc->isFunctionBox());
5326 return true;
5329 pn->pn_dflags |= PND_EMITTEDFUNCTION;
5332 * Mark as singletons any function which will only be executed once, or
5333 * which is inner to a lambda we only expect to run once. In the latter
5334 * case, if the lambda runs multiple times then CloneFunctionObject will
5335 * make a deep clone of its contents.
5337 if (fun->isInterpreted()) {
5338 bool singleton =
5339 bce->script->compileAndGo() &&
5340 fun->isInterpreted() &&
5341 (bce->checkSingletonContext() ||
5342 (!bce->isInLoop() && bce->isRunOnceLambda()));
5343 if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton))
5344 return false;
5346 if (fun->isInterpretedLazy()) {
5347 if (!fun->lazyScript()->sourceObject()) {
5348 JSObject* scope = bce->staticScope;
5349 if (!scope && bce->sc->isFunctionBox())
5350 scope = bce->sc->asFunctionBox()->function();
5351 JSObject* source = bce->script->sourceObject();
5352 fun->lazyScript()->setParent(scope, &source->as<ScriptSourceObject>());
5354 if (bce->emittingRunOnceLambda)
5355 fun->lazyScript()->setTreatAsRunOnce();
5356 } else {
5357 SharedContext* outersc = bce->sc;
5359 if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals())
5360 funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent
5361 MOZ_ASSERT_IF(outersc->strict, funbox->strict);
5363 // Inherit most things (principals, version, etc) from the parent.
5364 Rooted<JSScript*> parent(cx, bce->script);
5365 CompileOptions options(cx, bce->parser->options());
5366 options.setMutedErrors(parent->mutedErrors())
5367 .setCompileAndGo(parent->compileAndGo())
5368 .setSelfHostingMode(parent->selfHosted())
5369 .setNoScriptRval(false)
5370 .setForEval(false)
5371 .setVersion(parent->getVersion());
5373 Rooted<JSObject*> enclosingScope(cx, EnclosingStaticScope(bce));
5374 Rooted<JSObject*> sourceObject(cx, bce->script->sourceObject());
5375 Rooted<JSScript*> script(cx, JSScript::Create(cx, enclosingScope, false, options,
5376 parent->staticLevel() + 1,
5377 sourceObject,
5378 funbox->bufStart, funbox->bufEnd));
5379 if (!script)
5380 return false;
5382 script->bindings = funbox->bindings;
5384 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
5385 BytecodeEmitter bce2(bce, bce->parser, funbox, script, /* lazyScript = */ js::NullPtr(),
5386 bce->insideEval, bce->evalCaller, bce->hasGlobalScope, lineNum,
5387 bce->emitterMode);
5388 if (!bce2.init())
5389 return false;
5391 /* We measured the max scope depth when we parsed the function. */
5392 if (!EmitFunctionScript(cx, &bce2, pn->pn_body))
5393 return false;
5395 if (funbox->usesArguments && funbox->usesApply && funbox->usesThis)
5396 script->setUsesArgumentsApplyAndThis();
5398 } else {
5399 MOZ_ASSERT(IsAsmJSModuleNative(fun->native()));
5402 /* Make the function object a literal in the outer script's pool. */
5403 unsigned index = bce->objectList.add(pn->pn_funbox);
5405 /* Non-hoisted functions simply emit their respective op. */
5406 if (!pn->functionIsHoisted()) {
5407 /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */
5408 MOZ_ASSERT(fun->isArrow() == (pn->getOp() == JSOP_LAMBDA_ARROW));
5409 if (fun->isArrow() && Emit1(cx, bce, JSOP_THIS) < 0)
5410 return false;
5411 return EmitIndex32(cx, pn->getOp(), index, bce);
5415 * For a script we emit the code as we parse. Thus the bytecode for
5416 * top-level functions should go in the prolog to predefine their
5417 * names in the variable object before the already-generated main code
5418 * is executed. This extra work for top-level scripts is not necessary
5419 * when we emit the code for a function. It is fully parsed prior to
5420 * invocation of the emitter and calls to EmitTree for function
5421 * definitions can be scheduled before generating the rest of code.
5423 if (!bce->sc->isFunctionBox()) {
5424 MOZ_ASSERT(pn->pn_cookie.isFree());
5425 MOZ_ASSERT(pn->getOp() == JSOP_NOP);
5426 MOZ_ASSERT(!bce->topStmt);
5427 bce->switchToProlog();
5428 if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce))
5429 return false;
5430 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5431 return false;
5432 bce->switchToMain();
5433 } else {
5434 #ifdef DEBUG
5435 BindingIter bi(bce->script);
5436 while (bi->name() != fun->atom())
5437 bi++;
5438 MOZ_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT ||
5439 bi->kind() == Binding::ARGUMENT);
5440 MOZ_ASSERT(bi.argOrLocalIndex() < JS_BIT(20));
5441 #endif
5442 pn->pn_index = index;
5443 if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce))
5444 return false;
5445 MOZ_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG);
5446 JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG;
5447 if (!EmitVarOp(cx, pn, setOp, bce))
5448 return false;
5449 if (Emit1(cx, bce, JSOP_POP) < 0)
5450 return false;
5453 return true;
5456 static bool
5457 EmitDo(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5459 /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */
5460 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
5461 if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
5462 return false;
5464 ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE);
5465 if (noteIndex2 < 0)
5466 return false;
5468 /* Compile the loop body. */
5469 ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left);
5470 if (top < 0)
5471 return false;
5473 LoopStmtInfo stmtInfo(cx);
5474 PushLoopStatement(bce, &stmtInfo, STMT_DO_LOOP, top);
5476 if (!EmitLoopEntry(cx, bce, nullptr))
5477 return false;
5479 if (!EmitTree(cx, bce, pn->pn_left))
5480 return false;
5482 /* Set loop and enclosing label update offsets, for continue. */
5483 ptrdiff_t off = bce->offset();
5484 StmtInfoBCE* stmt = &stmtInfo;
5485 do {
5486 stmt->update = off;
5487 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
5489 /* Compile the loop condition, now that continues know where to go. */
5490 if (!EmitTree(cx, bce, pn->pn_right))
5491 return false;
5493 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
5494 if (beq < 0)
5495 return false;
5497 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
5498 return false;
5501 * Update the annotations with the update and back edge positions, for
5502 * IonBuilder.
5504 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5505 * note gets bigger.
5507 if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top))
5508 return false;
5509 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top)))
5510 return false;
5512 return PopStatementBCE(cx, bce);
5515 static bool
5516 EmitWhile(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5519 * Minimize bytecodes issued for one or more iterations by jumping to
5520 * the condition below the body and closing the loop if the condition
5521 * is true with a backward branch. For iteration count i:
5523 * i test at the top test at the bottom
5524 * = =============== ==================
5525 * 0 ifeq-pass goto; ifne-fail
5526 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
5527 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
5528 * . . .
5529 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
5531 LoopStmtInfo stmtInfo(cx);
5532 PushLoopStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top);
5534 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
5535 if (noteIndex < 0)
5536 return false;
5538 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
5539 if (jmp < 0)
5540 return false;
5542 top = EmitLoopHead(cx, bce, pn->pn_right);
5543 if (top < 0)
5544 return false;
5546 if (!EmitTree(cx, bce, pn->pn_right))
5547 return false;
5549 SetJumpOffsetAt(bce, jmp);
5550 if (!EmitLoopEntry(cx, bce, pn->pn_left))
5551 return false;
5552 if (!EmitTree(cx, bce, pn->pn_left))
5553 return false;
5555 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
5556 if (beq < 0)
5557 return false;
5559 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
5560 return false;
5562 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp))
5563 return false;
5565 return PopStatementBCE(cx, bce);
5568 static bool
5569 EmitBreak(ExclusiveContext* cx, BytecodeEmitter* bce, PropertyName* label)
5571 StmtInfoBCE* stmt = bce->topStmt;
5572 SrcNoteType noteType;
5573 if (label) {
5574 while (stmt->type != STMT_LABEL || stmt->label != label)
5575 stmt = stmt->down;
5576 noteType = SRC_BREAK2LABEL;
5577 } else {
5578 while (!stmt->isLoop() && stmt->type != STMT_SWITCH)
5579 stmt = stmt->down;
5580 noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK;
5583 return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0;
5586 static bool
5587 EmitContinue(ExclusiveContext* cx, BytecodeEmitter* bce, PropertyName* label)
5589 StmtInfoBCE* stmt = bce->topStmt;
5590 if (label) {
5591 /* Find the loop statement enclosed by the matching label. */
5592 StmtInfoBCE* loop = nullptr;
5593 while (stmt->type != STMT_LABEL || stmt->label != label) {
5594 if (stmt->isLoop())
5595 loop = stmt;
5596 stmt = stmt->down;
5598 stmt = loop;
5599 } else {
5600 while (!stmt->isLoop())
5601 stmt = stmt->down;
5604 return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0;
5607 static bool
5608 InTryBlockWithFinally(BytecodeEmitter* bce)
5610 for (StmtInfoBCE* stmt = bce->topStmt; stmt; stmt = stmt->down) {
5611 if (stmt->type == STMT_FINALLY)
5612 return true;
5614 return false;
5617 static bool
5618 EmitReturn(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5620 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5621 return false;
5623 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) {
5624 if (!EmitPrepareIteratorResult(cx, bce))
5625 return false;
5628 /* Push a return value */
5629 if (ParseNode* pn2 = pn->pn_left) {
5630 if (!EmitTree(cx, bce, pn2))
5631 return false;
5632 } else {
5633 /* No explicit return value provided */
5634 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
5635 return false;
5638 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) {
5639 if (!EmitFinishIteratorResult(cx, bce, true))
5640 return false;
5644 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5645 * blocks having finally clauses and to exit intermingled let blocks.
5646 * We can't simply transfer control flow to our caller in that case,
5647 * because we must gosub to those finally clauses from inner to outer,
5648 * with the correct stack pointer (i.e., after popping any with,
5649 * for/in, etc., slots nested inside the finally's try).
5651 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5652 * extra JSOP_RETRVAL after the fixups.
5654 ptrdiff_t top = bce->offset();
5656 bool isGenerator = bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isGenerator();
5657 bool useGenRVal = false;
5658 if (isGenerator) {
5659 if (bce->sc->asFunctionBox()->isStarGenerator() && InTryBlockWithFinally(bce)) {
5660 // Emit JSOP_SETALIASEDVAR .genrval to store the return value on the
5661 // scope chain, so it's not lost when we yield in a finally block.
5662 useGenRVal = true;
5663 MOZ_ASSERT(pn->pn_right);
5664 if (!EmitTree(cx, bce, pn->pn_right))
5665 return false;
5666 if (Emit1(cx, bce, JSOP_POP) < 0)
5667 return false;
5668 } else {
5669 if (Emit1(cx, bce, JSOP_SETRVAL) < 0)
5670 return false;
5672 } else {
5673 if (Emit1(cx, bce, JSOP_RETURN) < 0)
5674 return false;
5677 NonLocalExitScope nle(cx, bce);
5679 if (!nle.prepareForNonLocalJump(nullptr))
5680 return false;
5682 if (isGenerator) {
5683 ScopeCoordinate sc;
5684 // We know that .generator and .genrval are on the top scope chain node,
5685 // as we just exited nested scopes.
5686 sc.setHops(0);
5687 if (useGenRVal) {
5688 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce, bce->script, cx->names().dotGenRVal, &sc));
5689 if (!EmitAliasedVarOp(cx, JSOP_GETALIASEDVAR, sc, DontCheckLexical, bce))
5690 return false;
5691 if (Emit1(cx, bce, JSOP_SETRVAL) < 0)
5692 return false;
5695 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce, bce->script, cx->names().dotGenerator, &sc));
5696 if (!EmitAliasedVarOp(cx, JSOP_GETALIASEDVAR, sc, DontCheckLexical, bce))
5697 return false;
5698 if (!EmitYieldOp(cx, bce, JSOP_FINALYIELDRVAL))
5699 return false;
5700 } else if (top + static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH) != bce->offset()) {
5701 bce->code()[top] = JSOP_SETRVAL;
5702 if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
5703 return false;
5706 return true;
5709 static bool
5710 EmitYield(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5712 MOZ_ASSERT(bce->sc->isFunctionBox());
5714 if (pn->getOp() == JSOP_YIELD) {
5715 if (bce->sc->asFunctionBox()->isStarGenerator()) {
5716 if (!EmitPrepareIteratorResult(cx, bce))
5717 return false;
5719 if (pn->pn_left) {
5720 if (!EmitTree(cx, bce, pn->pn_left))
5721 return false;
5722 } else {
5723 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
5724 return false;
5726 if (bce->sc->asFunctionBox()->isStarGenerator()) {
5727 if (!EmitFinishIteratorResult(cx, bce, false))
5728 return false;
5730 } else {
5731 MOZ_ASSERT(pn->getOp() == JSOP_INITIALYIELD);
5734 if (!EmitTree(cx, bce, pn->pn_right))
5735 return false;
5737 if (!EmitYieldOp(cx, bce, pn->getOp()))
5738 return false;
5740 if (pn->getOp() == JSOP_INITIALYIELD && Emit1(cx, bce, JSOP_POP) < 0)
5741 return false;
5743 return true;
5746 static bool
5747 EmitYieldStar(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* iter, ParseNode* gen)
5749 MOZ_ASSERT(bce->sc->isFunctionBox());
5750 MOZ_ASSERT(bce->sc->asFunctionBox()->isStarGenerator());
5752 if (!EmitTree(cx, bce, iter)) // ITERABLE
5753 return false;
5754 if (!EmitIterator(cx, bce)) // ITER
5755 return false;
5757 // Initial send value is undefined.
5758 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RECEIVED
5759 return false;
5761 int depth = bce->stackDepth;
5762 MOZ_ASSERT(depth >= 2);
5764 ptrdiff_t initialSend = -1;
5765 if (EmitBackPatchOp(cx, bce, &initialSend) < 0) // goto initialSend
5766 return false;
5768 // Try prologue. // ITER RESULT
5769 StmtInfoBCE stmtInfo(cx);
5770 PushStatementBCE(bce, &stmtInfo, STMT_TRY, bce->offset());
5771 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY);
5772 ptrdiff_t tryStart = bce->offset(); // tryStart:
5773 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0)
5774 return false;
5775 MOZ_ASSERT(bce->stackDepth == depth);
5777 // Load the generator object.
5778 if (!EmitTree(cx, bce, gen)) // ITER RESULT GENOBJ
5779 return false;
5781 // Yield RESULT as-is, without re-boxing.
5782 if (!EmitYieldOp(cx, bce, JSOP_YIELD)) // ITER RECEIVED
5783 return false;
5785 // Try epilogue.
5786 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart))
5787 return false;
5788 ptrdiff_t subsequentSend = -1;
5789 if (EmitBackPatchOp(cx, bce, &subsequentSend) < 0) // goto subsequentSend
5790 return false;
5791 ptrdiff_t tryEnd = bce->offset(); // tryEnd:
5793 // Catch location.
5794 bce->stackDepth = uint32_t(depth); // ITER RESULT
5795 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER
5796 return false;
5797 // THROW? = 'throw' in ITER
5798 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) // ITER EXCEPTION
5799 return false;
5800 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER
5801 return false;
5802 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER
5803 return false;
5804 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_STRING, bce)) // EXCEPTION ITER ITER "throw"
5805 return false;
5806 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER "throw" ITER
5807 return false;
5808 if (Emit1(cx, bce, JSOP_IN) < 0) // EXCEPTION ITER THROW?
5809 return false;
5810 // if (THROW?) goto delegate
5811 ptrdiff_t checkThrow = EmitJump(cx, bce, JSOP_IFNE, 0); // EXCEPTION ITER
5812 if (checkThrow < 0)
5813 return false;
5814 if (Emit1(cx, bce, JSOP_POP) < 0) // EXCEPTION
5815 return false;
5816 if (Emit1(cx, bce, JSOP_THROW) < 0) // throw EXCEPTION
5817 return false;
5819 SetJumpOffsetAt(bce, checkThrow); // delegate:
5820 // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER
5821 bce->stackDepth = uint32_t(depth);
5822 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER
5823 return false;
5824 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER ITER
5825 return false;
5826 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_CALLPROP, bce)) // EXCEPTION ITER ITER THROW
5827 return false;
5828 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER THROW ITER
5829 return false;
5830 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER THROW ITER EXCEPTION
5831 return false;
5832 if (EmitCall(cx, bce, JSOP_CALL, 1, iter) < 0) // ITER RESULT
5833 return false;
5834 CheckTypeSet(cx, bce, JSOP_CALL);
5835 MOZ_ASSERT(bce->stackDepth == depth);
5836 ptrdiff_t checkResult = -1;
5837 if (EmitBackPatchOp(cx, bce, &checkResult) < 0) // goto checkResult
5838 return false;
5840 // Catch epilogue.
5841 if (!PopStatementBCE(cx, bce))
5842 return false;
5843 // This is a peace offering to ReconstructPCStack. See the note in EmitTry.
5844 if (Emit1(cx, bce, JSOP_NOP) < 0)
5845 return false;
5846 if (!bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart + JSOP_TRY_LENGTH, tryEnd))
5847 return false;
5849 // After the try/catch block: send the received value to the iterator.
5850 if (!BackPatch(cx, bce, initialSend, bce->code().end(), JSOP_GOTO)) // initialSend:
5851 return false;
5852 if (!BackPatch(cx, bce, subsequentSend, bce->code().end(), JSOP_GOTO)) // subsequentSend:
5853 return false;
5855 // Send location.
5856 // result = iter.next(received) // ITER RECEIVED
5857 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER
5858 return false;
5859 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER
5860 return false;
5861 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER ITER
5862 return false;
5863 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // RECEIVED ITER ITER NEXT
5864 return false;
5865 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER NEXT ITER
5866 return false;
5867 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER NEXT ITER RECEIVED
5868 return false;
5869 if (EmitCall(cx, bce, JSOP_CALL, 1, iter) < 0) // ITER RESULT
5870 return false;
5871 CheckTypeSet(cx, bce, JSOP_CALL);
5872 MOZ_ASSERT(bce->stackDepth == depth);
5874 if (!BackPatch(cx, bce, checkResult, bce->code().end(), JSOP_GOTO)) // checkResult:
5875 return false;
5876 // if (!result.done) goto tryStart; // ITER RESULT
5877 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT
5878 return false;
5879 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE
5880 return false;
5881 // if (!DONE) goto tryStart;
5882 if (EmitJump(cx, bce, JSOP_IFEQ, tryStart - bce->offset()) < 0) // ITER RESULT
5883 return false;
5885 // result.value
5886 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RESULT ITER
5887 return false;
5888 if (Emit1(cx, bce, JSOP_POP) < 0) // RESULT
5889 return false;
5890 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // VALUE
5891 return false;
5893 MOZ_ASSERT(bce->stackDepth == depth - 1);
5895 return true;
5898 static bool
5899 EmitStatementList(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5901 MOZ_ASSERT(pn->isArity(PN_LIST));
5903 StmtInfoBCE stmtInfo(cx);
5904 PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top);
5906 ParseNode* pnchild = pn->pn_head;
5908 if (pn->pn_xflags & PNX_DESTRUCT)
5909 pnchild = pnchild->pn_next;
5911 for (ParseNode* pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5912 if (!EmitTree(cx, bce, pn2))
5913 return false;
5916 return PopStatementBCE(cx, bce);
5919 static bool
5920 EmitStatement(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5922 MOZ_ASSERT(pn->isKind(PNK_SEMI));
5924 ParseNode* pn2 = pn->pn_kid;
5925 if (!pn2)
5926 return true;
5928 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5929 return false;
5932 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5933 * debugger, and eval frames may need the value of the ultimate
5934 * expression statement as the script's result, despite the fact
5935 * that it appears useless to the compiler.
5937 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5938 * calling JS_Compile* to suppress JSOP_SETRVAL.
5940 bool wantval = false;
5941 bool useful = false;
5942 if (bce->sc->isFunctionBox()) {
5943 MOZ_ASSERT(!bce->script->noScriptRval());
5944 } else {
5945 useful = wantval = !bce->script->noScriptRval();
5948 /* Don't eliminate expressions with side effects. */
5949 if (!useful) {
5950 if (!CheckSideEffects(cx, bce, pn2, &useful))
5951 return false;
5954 * Don't eliminate apparently useless expressions if they are
5955 * labeled expression statements. The pc->topStmt->update test
5956 * catches the case where we are nesting in EmitTree for a labeled
5957 * compound statement.
5959 if (bce->topStmt &&
5960 bce->topStmt->type == STMT_LABEL &&
5961 bce->topStmt->update >= bce->offset())
5963 useful = true;
5967 if (useful) {
5968 JSOp op = wantval ? JSOP_SETRVAL : JSOP_POP;
5969 MOZ_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP));
5970 if (!EmitTree(cx, bce, pn2))
5971 return false;
5972 if (Emit1(cx, bce, op) < 0)
5973 return false;
5974 } else if (pn->isDirectivePrologueMember()) {
5975 // Don't complain about directive prologue members; just don't emit
5976 // their code.
5977 } else {
5978 if (JSAtom* atom = pn->isStringExprStatement()) {
5979 // Warn if encountering a non-directive prologue member string
5980 // expression statement, that is inconsistent with the current
5981 // directive prologue. That is, a script *not* starting with
5982 // "use strict" should warn for any "use strict" statements seen
5983 // later in the script, because such statements are misleading.
5984 const char* directive = nullptr;
5985 if (atom == cx->names().useStrict) {
5986 if (!bce->sc->strict)
5987 directive = js_useStrict_str;
5988 } else if (atom == cx->names().useAsm) {
5989 if (bce->sc->isFunctionBox()) {
5990 JSFunction* fun = bce->sc->asFunctionBox()->function();
5991 if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
5992 directive = js_useAsm_str;
5996 if (directive) {
5997 if (!bce->reportStrictWarning(pn2, JSMSG_CONTRARY_NONDIRECTIVE, directive))
5998 return false;
6000 } else {
6001 bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin);
6002 bce->current->lastColumn = 0;
6003 if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR))
6004 return false;
6008 return true;
6011 static bool
6012 EmitDelete(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6015 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6016 * must evaluate the operand if it appears it might have side effects.
6018 ParseNode* pn2 = pn->pn_kid;
6019 switch (pn2->getKind()) {
6020 case PNK_NAME:
6021 if (!BindNameToSlot(cx, bce, pn2))
6022 return false;
6023 if (!EmitAtomOp(cx, pn2, pn2->getOp(), bce))
6024 return false;
6025 break;
6026 case PNK_DOT:
6028 JSOp delOp = bce->sc->strict ? JSOP_STRICTDELPROP : JSOP_DELPROP;
6029 if (!EmitPropOp(cx, pn2, delOp, bce))
6030 return false;
6031 break;
6033 case PNK_ELEM:
6035 JSOp delOp = bce->sc->strict ? JSOP_STRICTDELELEM : JSOP_DELELEM;
6036 if (!EmitElemOp(cx, pn2, delOp, bce))
6037 return false;
6038 break;
6040 default:
6043 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6044 * to foo(), true (a comma expression).
6046 bool useful = false;
6047 if (!CheckSideEffects(cx, bce, pn2, &useful))
6048 return false;
6050 if (useful) {
6051 MOZ_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL));
6052 if (!EmitTree(cx, bce, pn2))
6053 return false;
6054 if (Emit1(cx, bce, JSOP_POP) < 0)
6055 return false;
6058 if (Emit1(cx, bce, JSOP_TRUE) < 0)
6059 return false;
6063 return true;
6066 static bool
6067 EmitArray(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, uint32_t count);
6069 static bool
6070 EmitSelfHostedCallFunction(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6072 // Special-casing of callFunction to emit bytecode that directly
6073 // invokes the callee with the correct |this| object and arguments.
6074 // callFunction(fun, thisArg, arg0, arg1) thus becomes:
6075 // - emit lookup for fun
6076 // - emit lookup for thisArg
6077 // - emit lookups for arg0, arg1
6079 // argc is set to the amount of actually emitted args and the
6080 // emitting of args below is disabled by setting emitArgs to false.
6081 if (pn->pn_count < 3) {
6082 bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s");
6083 return false;
6086 ParseNode* pn2 = pn->pn_head;
6087 ParseNode* funNode = pn2->pn_next;
6088 if (!EmitTree(cx, bce, funNode))
6089 return false;
6091 ParseNode* thisArg = funNode->pn_next;
6092 if (!EmitTree(cx, bce, thisArg))
6093 return false;
6095 bool oldEmittingForInit = bce->emittingForInit;
6096 bce->emittingForInit = false;
6098 for (ParseNode* argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) {
6099 if (!EmitTree(cx, bce, argpn))
6100 return false;
6103 bce->emittingForInit = oldEmittingForInit;
6105 uint32_t argc = pn->pn_count - 3;
6106 if (EmitCall(cx, bce, pn->getOp(), argc) < 0)
6107 return false;
6109 CheckTypeSet(cx, bce, pn->getOp());
6110 return true;
6113 static bool
6114 EmitSelfHostedResumeGenerator(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6116 // Syntax: resumeGenerator(gen, value, 'next'|'throw'|'close')
6117 if (pn->pn_count != 4) {
6118 bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "resumeGenerator", "1", "s");
6119 return false;
6122 ParseNode* funNode = pn->pn_head; // The resumeGenerator node.
6124 ParseNode* genNode = funNode->pn_next;
6125 if (!EmitTree(cx, bce, genNode))
6126 return false;
6128 ParseNode* valNode = genNode->pn_next;
6129 if (!EmitTree(cx, bce, valNode))
6130 return false;
6132 ParseNode* kindNode = valNode->pn_next;
6133 MOZ_ASSERT(kindNode->isKind(PNK_STRING));
6134 uint16_t operand = GeneratorObject::getResumeKind(cx, kindNode->pn_atom);
6135 MOZ_ASSERT(!kindNode->pn_next);
6137 if (EmitCall(cx, bce, JSOP_RESUME, operand) < 0)
6138 return false;
6140 return true;
6143 static bool
6144 EmitSelfHostedForceInterpreter(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6146 if (Emit1(cx, bce, JSOP_FORCEINTERPRETER) < 0)
6147 return false;
6148 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6149 return false;
6150 return true;
6153 static bool
6154 EmitCallOrNew(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6156 bool callop = pn->isKind(PNK_CALL) || pn->isKind(PNK_TAGGED_TEMPLATE);
6158 * Emit callable invocation or operator new (constructor call) code.
6159 * First, emit code for the left operand to evaluate the callable or
6160 * constructable object expression.
6162 * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc.
6163 * This is necessary to interpose the lambda-initialized method read
6164 * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by
6165 * JSOP_{SET,INIT}PROP.
6167 * Then (or in a call case that has no explicit reference-base
6168 * object) we emit JSOP_UNDEFINED to produce the undefined |this|
6169 * value required for calls (which non-strict mode functions
6170 * will box into the global object).
6172 uint32_t argc = pn->pn_count - 1;
6174 if (argc >= ARGC_LIMIT) {
6175 bce->parser->tokenStream.reportError(callop
6176 ? JSMSG_TOO_MANY_FUN_ARGS
6177 : JSMSG_TOO_MANY_CON_ARGS);
6178 return false;
6181 ParseNode* pn2 = pn->pn_head;
6182 bool spread = JOF_OPTYPE(pn->getOp()) == JOF_BYTE;
6183 switch (pn2->getKind()) {
6184 case PNK_NAME:
6185 if (bce->emitterMode == BytecodeEmitter::SelfHosting && !spread) {
6186 // We shouldn't see foo(bar) = x in self-hosted code.
6187 MOZ_ASSERT(!(pn->pn_xflags & PNX_SETCALL));
6189 // Calls to "forceInterpreter", "callFunction" or "resumeGenerator"
6190 // in self-hosted code generate inline bytecode.
6191 if (pn2->name() == cx->names().callFunction)
6192 return EmitSelfHostedCallFunction(cx, bce, pn);
6193 if (pn2->name() == cx->names().resumeGenerator)
6194 return EmitSelfHostedResumeGenerator(cx, bce, pn);
6195 if (pn2->name() == cx->names().forceInterpreter)
6196 return EmitSelfHostedForceInterpreter(cx, bce, pn);
6197 // Fall through.
6199 if (!EmitNameOp(cx, bce, pn2, callop))
6200 return false;
6201 break;
6202 case PNK_DOT:
6203 if (!EmitPropOp(cx, pn2, callop ? JSOP_CALLPROP : JSOP_GETPROP, bce))
6204 return false;
6205 break;
6206 case PNK_ELEM:
6207 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce))
6208 return false;
6209 if (callop) {
6210 if (Emit1(cx, bce, JSOP_SWAP) < 0)
6211 return false;
6213 break;
6214 case PNK_FUNCTION:
6216 * Top level lambdas which are immediately invoked should be
6217 * treated as only running once. Every time they execute we will
6218 * create new types and scripts for their contents, to increase
6219 * the quality of type information within them and enable more
6220 * backend optimizations. Note that this does not depend on the
6221 * lambda being invoked at most once (it may be named or be
6222 * accessed via foo.caller indirection), as multiple executions
6223 * will just cause the inner scripts to be repeatedly cloned.
6225 MOZ_ASSERT(!bce->emittingRunOnceLambda);
6226 if (bce->checkSingletonContext() || (!bce->isInLoop() && bce->isRunOnceLambda())) {
6227 bce->emittingRunOnceLambda = true;
6228 if (!EmitTree(cx, bce, pn2))
6229 return false;
6230 bce->emittingRunOnceLambda = false;
6231 } else {
6232 if (!EmitTree(cx, bce, pn2))
6233 return false;
6235 callop = false;
6236 break;
6237 default:
6238 if (!EmitTree(cx, bce, pn2))
6239 return false;
6240 callop = false; /* trigger JSOP_UNDEFINED after */
6241 break;
6243 if (!callop) {
6244 JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED;
6245 if (Emit1(cx, bce, thisop) < 0)
6246 return false;
6250 * Emit code for each argument in order, then emit the JSOP_*CALL or
6251 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6252 * were pushed on the operand stack.
6254 bool oldEmittingForInit = bce->emittingForInit;
6255 bce->emittingForInit = false;
6256 if (!spread) {
6257 for (ParseNode* pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6258 if (!EmitTree(cx, bce, pn3))
6259 return false;
6261 } else {
6262 if (!EmitArray(cx, bce, pn2->pn_next, argc))
6263 return false;
6265 bce->emittingForInit = oldEmittingForInit;
6267 if (!spread) {
6268 if (EmitCall(cx, bce, pn->getOp(), argc, pn) < 0)
6269 return false;
6270 } else {
6271 if (Emit1(cx, bce, pn->getOp()) < 0)
6272 return false;
6274 CheckTypeSet(cx, bce, pn->getOp());
6275 if (pn->isOp(JSOP_EVAL) ||
6276 pn->isOp(JSOP_STRICTEVAL) ||
6277 pn->isOp(JSOP_SPREADEVAL) ||
6278 pn->isOp(JSOP_STRICTSPREADEVAL))
6280 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
6281 EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum);
6283 if (pn->pn_xflags & PNX_SETCALL) {
6284 if (Emit1(cx, bce, JSOP_SETCALL) < 0)
6285 return false;
6287 return true;
6290 static bool
6291 EmitLogical(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6294 * JSOP_OR converts the operand on the stack to boolean, leaves the original
6295 * value on the stack and jumps if true; otherwise it falls into the next
6296 * bytecode, which pops the left operand and then evaluates the right operand.
6297 * The jump goes around the right operand evaluation.
6299 * JSOP_AND converts the operand on the stack to boolean and jumps if false;
6300 * otherwise it falls into the right operand's bytecode.
6303 if (pn->isArity(PN_BINARY)) {
6304 if (!EmitTree(cx, bce, pn->pn_left))
6305 return false;
6306 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
6307 if (top < 0)
6308 return false;
6309 if (Emit1(cx, bce, JSOP_POP) < 0)
6310 return false;
6311 if (!EmitTree(cx, bce, pn->pn_right))
6312 return false;
6313 ptrdiff_t off = bce->offset();
6314 jsbytecode* pc = bce->code(top);
6315 SET_JUMP_OFFSET(pc, off - top);
6316 *pc = pn->getOp();
6317 return true;
6320 MOZ_ASSERT(pn->isArity(PN_LIST));
6321 MOZ_ASSERT(pn->pn_head->pn_next->pn_next);
6323 /* Left-associative operator chain: avoid too much recursion. */
6324 ParseNode* pn2 = pn->pn_head;
6325 if (!EmitTree(cx, bce, pn2))
6326 return false;
6327 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
6328 if (top < 0)
6329 return false;
6330 if (Emit1(cx, bce, JSOP_POP) < 0)
6331 return false;
6333 /* Emit nodes between the head and the tail. */
6334 ptrdiff_t jmp = top;
6335 while ((pn2 = pn2->pn_next)->pn_next) {
6336 if (!EmitTree(cx, bce, pn2))
6337 return false;
6338 ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
6339 if (off < 0)
6340 return false;
6341 if (Emit1(cx, bce, JSOP_POP) < 0)
6342 return false;
6343 SET_JUMP_OFFSET(bce->code(jmp), off - jmp);
6344 jmp = off;
6346 if (!EmitTree(cx, bce, pn2))
6347 return false;
6349 pn2 = pn->pn_head;
6350 ptrdiff_t off = bce->offset();
6351 do {
6352 jsbytecode* pc = bce->code(top);
6353 ptrdiff_t tmp = GET_JUMP_OFFSET(pc);
6354 SET_JUMP_OFFSET(pc, off - top);
6355 *pc = pn->getOp();
6356 top += tmp;
6357 } while ((pn2 = pn2->pn_next)->pn_next);
6359 return true;
6363 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6364 * the comment on EmitSwitch.
6366 MOZ_NEVER_INLINE static bool
6367 EmitIncOrDec(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6369 /* Emit lvalue-specialized code for ++/-- operators. */
6370 ParseNode* pn2 = pn->pn_kid;
6371 switch (pn2->getKind()) {
6372 case PNK_DOT:
6373 if (!EmitPropIncDec(cx, pn, bce))
6374 return false;
6375 break;
6376 case PNK_ELEM:
6377 if (!EmitElemIncDec(cx, pn, bce))
6378 return false;
6379 break;
6380 case PNK_CALL:
6381 MOZ_ASSERT(pn2->pn_xflags & PNX_SETCALL);
6382 if (!EmitTree(cx, bce, pn2))
6383 return false;
6384 break;
6385 default:
6386 MOZ_ASSERT(pn2->isKind(PNK_NAME));
6387 pn2->setOp(JSOP_SETNAME);
6388 if (!BindNameToSlot(cx, bce, pn2))
6389 return false;
6390 JSOp op = pn2->getOp();
6391 bool maySet;
6392 switch (op) {
6393 case JSOP_SETLOCAL:
6394 case JSOP_SETARG:
6395 case JSOP_SETALIASEDVAR:
6396 case JSOP_SETNAME:
6397 case JSOP_STRICTSETNAME:
6398 case JSOP_SETGNAME:
6399 case JSOP_STRICTSETGNAME:
6400 maySet = true;
6401 break;
6402 default:
6403 maySet = false;
6405 if (op == JSOP_CALLEE) {
6406 if (Emit1(cx, bce, op) < 0)
6407 return false;
6408 } else if (!pn2->pn_cookie.isFree()) {
6409 if (maySet) {
6410 if (!EmitVarIncDec(cx, pn, bce))
6411 return false;
6412 } else {
6413 if (!EmitVarOp(cx, pn2, op, bce))
6414 return false;
6416 } else {
6417 MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6418 if (maySet) {
6419 if (!EmitNameIncDec(cx, pn, bce))
6420 return false;
6421 } else {
6422 if (!EmitAtomOp(cx, pn2, op, bce))
6423 return false;
6425 break;
6427 if (pn2->isConst()) {
6428 if (Emit1(cx, bce, JSOP_POS) < 0)
6429 return false;
6430 bool post;
6431 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
6432 if (!post) {
6433 if (Emit1(cx, bce, JSOP_ONE) < 0)
6434 return false;
6435 if (Emit1(cx, bce, binop) < 0)
6436 return false;
6440 return true;
6444 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6445 * the comment on EmitSwitch.
6447 MOZ_NEVER_INLINE static bool
6448 EmitLabeledStatement(ExclusiveContext* cx, BytecodeEmitter* bce, const LabeledStatement* pn)
6451 * Emit a JSOP_LABEL instruction. The argument is the offset to the statement
6452 * following the labeled statement.
6454 jsatomid index;
6455 if (!bce->makeAtomIndex(pn->label(), &index))
6456 return false;
6458 ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0);
6459 if (top < 0)
6460 return false;
6462 /* Emit code for the labeled statement. */
6463 StmtInfoBCE stmtInfo(cx);
6464 PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset());
6465 stmtInfo.label = pn->label();
6466 if (!EmitTree(cx, bce, pn->statement()))
6467 return false;
6468 if (!PopStatementBCE(cx, bce))
6469 return false;
6471 /* Patch the JSOP_LABEL offset. */
6472 SetJumpOffsetAt(bce, top);
6473 return true;
6476 static bool
6477 EmitSyntheticStatements(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
6479 MOZ_ASSERT(pn->isArity(PN_LIST));
6480 StmtInfoBCE stmtInfo(cx);
6481 PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top);
6482 ParseNode* pn2 = pn->pn_head;
6483 if (pn->pn_xflags & PNX_DESTRUCT)
6484 pn2 = pn2->pn_next;
6485 for (; pn2; pn2 = pn2->pn_next) {
6486 if (!EmitTree(cx, bce, pn2))
6487 return false;
6489 return PopStatementBCE(cx, bce);
6492 static bool
6493 EmitConditionalExpression(ExclusiveContext* cx, BytecodeEmitter* bce, ConditionalExpression& conditional)
6495 /* Emit the condition, then branch if false to the else part. */
6496 if (!EmitTree(cx, bce, &conditional.condition()))
6497 return false;
6498 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND);
6499 if (noteIndex < 0)
6500 return false;
6501 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
6502 if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression()))
6503 return false;
6505 /* Jump around else, fixup the branch, emit else, fixup jump. */
6506 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
6507 if (jmp < 0)
6508 return false;
6509 SetJumpOffsetAt(bce, beq);
6512 * Because each branch pushes a single value, but our stack budgeting
6513 * analysis ignores branches, we now have to adjust bce->stackDepth to
6514 * ignore the value pushed by the first branch. Execution will follow
6515 * only one path, so we must decrement bce->stackDepth.
6517 * Failing to do this will foil code, such as let expression and block
6518 * code generation, which must use the stack depth to compute local
6519 * stack indexes correctly.
6521 MOZ_ASSERT(bce->stackDepth > 0);
6522 bce->stackDepth--;
6523 if (!EmitTree(cx, bce, &conditional.elseExpression()))
6524 return false;
6525 SetJumpOffsetAt(bce, jmp);
6526 return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq);
6530 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6531 * the comment on EmitSwitch.
6533 MOZ_NEVER_INLINE static bool
6534 EmitObject(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6536 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext())
6537 return EmitSingletonInitialiser(cx, bce, pn);
6540 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6541 * a new object and defining (in source order) each property on the object
6542 * (or mutating the object's [[Prototype]], in the case of __proto__).
6544 ptrdiff_t offset = bce->offset();
6545 if (!EmitNewInit(cx, bce, JSProto_Object))
6546 return false;
6549 * Try to construct the shape of the object as we go, so we can emit a
6550 * JSOP_NEWOBJECT with the final shape instead.
6552 RootedPlainObject obj(cx);
6553 if (bce->script->compileAndGo()) {
6554 gc::AllocKind kind = GuessObjectGCKind(pn->pn_count);
6555 obj = NewBuiltinClassInstance<PlainObject>(cx, kind, TenuredObject);
6556 if (!obj)
6557 return false;
6560 for (ParseNode* propdef = pn->pn_head; propdef; propdef = propdef->pn_next) {
6561 if (!UpdateSourceCoordNotes(cx, bce, propdef->pn_pos.begin))
6562 return false;
6564 // Handle __proto__: v specially because *only* this form, and no other
6565 // involving "__proto__", performs [[Prototype]] mutation.
6566 if (propdef->isKind(PNK_MUTATEPROTO)) {
6567 if (!EmitTree(cx, bce, propdef->pn_kid))
6568 return false;
6569 obj = nullptr;
6570 if (!Emit1(cx, bce, JSOP_MUTATEPROTO))
6571 return false;
6572 continue;
6575 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6576 ParseNode* key = propdef->pn_left;
6577 bool isIndex = false;
6578 if (key->isKind(PNK_NUMBER)) {
6579 if (!EmitNumberOp(cx, key->pn_dval, bce))
6580 return false;
6581 isIndex = true;
6582 } else if (key->isKind(PNK_NAME) || key->isKind(PNK_STRING)) {
6583 // The parser already checked for atoms representing indexes and
6584 // used PNK_NUMBER instead, but also watch for ids which TI treats
6585 // as indexes for simpliciation of downstream analysis.
6586 jsid id = NameToId(key->pn_atom->asPropertyName());
6587 if (id != types::IdToTypeId(id)) {
6588 if (!EmitTree(cx, bce, key))
6589 return false;
6590 isIndex = true;
6592 } else {
6593 MOZ_ASSERT(key->isKind(PNK_COMPUTED_NAME));
6594 if (!EmitTree(cx, bce, key->pn_kid))
6595 return false;
6596 isIndex = true;
6599 /* Emit code for the property initializer. */
6600 if (!EmitTree(cx, bce, propdef->pn_right))
6601 return false;
6603 JSOp op = propdef->getOp();
6604 MOZ_ASSERT(op == JSOP_INITPROP ||
6605 op == JSOP_INITPROP_GETTER ||
6606 op == JSOP_INITPROP_SETTER);
6608 if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER)
6609 obj = nullptr;
6611 if (isIndex) {
6612 obj = nullptr;
6613 switch (op) {
6614 case JSOP_INITPROP: op = JSOP_INITELEM; break;
6615 case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break;
6616 case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break;
6617 default: MOZ_CRASH("Invalid op");
6619 if (Emit1(cx, bce, op) < 0)
6620 return false;
6621 } else {
6622 MOZ_ASSERT(key->isKind(PNK_NAME) || key->isKind(PNK_STRING));
6624 jsatomid index;
6625 if (!bce->makeAtomIndex(key->pn_atom, &index))
6626 return false;
6628 if (obj) {
6629 MOZ_ASSERT(!obj->inDictionaryMode());
6630 Rooted<jsid> id(cx, AtomToId(key->pn_atom));
6631 RootedValue undefinedValue(cx, UndefinedValue());
6632 if (!DefineNativeProperty(cx, obj, id, undefinedValue, nullptr, nullptr,
6633 JSPROP_ENUMERATE))
6635 return false;
6637 if (obj->inDictionaryMode())
6638 obj = nullptr;
6641 if (!EmitIndex32(cx, op, index, bce))
6642 return false;
6646 if (obj) {
6648 * The object survived and has a predictable shape: update the original
6649 * bytecode.
6651 ObjectBox* objbox = bce->parser->newObjectBox(obj);
6652 if (!objbox)
6653 return false;
6655 static_assert(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH,
6656 "newinit and newobject must have equal length to edit in-place");
6658 uint32_t index = bce->objectList.add(objbox);
6659 jsbytecode* code = bce->code(offset);
6660 code[0] = JSOP_NEWOBJECT;
6661 code[1] = jsbytecode(index >> 24);
6662 code[2] = jsbytecode(index >> 16);
6663 code[3] = jsbytecode(index >> 8);
6664 code[4] = jsbytecode(index);
6667 return true;
6670 static bool
6671 EmitArrayComp(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6673 if (!EmitNewInit(cx, bce, JSProto_Array))
6674 return false;
6677 * Pass the new array's stack index to the PNK_ARRAYPUSH case via
6678 * bce->arrayCompDepth, then simply traverse the PNK_FOR node and
6679 * its kids under pn2 to generate this comprehension.
6681 MOZ_ASSERT(bce->stackDepth > 0);
6682 uint32_t saveDepth = bce->arrayCompDepth;
6683 bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1);
6684 if (!EmitTree(cx, bce, pn->pn_head))
6685 return false;
6686 bce->arrayCompDepth = saveDepth;
6688 return true;
6692 * EmitSpread expects the current index (I) of the array, the array itself and the iterator to be
6693 * on the stack in that order (iterator on the bottom).
6694 * It will pop the iterator and I, then iterate over the iterator by calling |.next()|
6695 * and put the results into the I-th element of array with incrementing I, then
6696 * push the result I (it will be original I + iteration count).
6697 * The stack after iteration will look like |ARRAY INDEX|.
6699 static bool
6700 EmitSpread(ExclusiveContext* cx, BytecodeEmitter* bce)
6702 return EmitForOf(cx, bce, STMT_SPREAD, nullptr, -1);
6705 static bool
6706 EmitArray(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, uint32_t count)
6709 * Emit code for [a, b, c] that is equivalent to constructing a new
6710 * array and in source order evaluating each element value and adding
6711 * it to the array, without invoking latent setters. We use the
6712 * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and
6713 * to avoid dup'ing and popping the array as each element is added, as
6714 * JSOP_SETELEM/JSOP_SETPROP would do.
6717 int32_t nspread = 0;
6718 for (ParseNode* elt = pn; elt; elt = elt->pn_next) {
6719 if (elt->isKind(PNK_SPREAD))
6720 nspread++;
6723 ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); // ARRAY
6724 if (off < 0)
6725 return false;
6726 CheckTypeSet(cx, bce, JSOP_NEWARRAY);
6727 jsbytecode* pc = bce->code(off);
6729 // For arrays with spread, this is a very pessimistic allocation, the
6730 // minimum possible final size.
6731 SET_UINT24(pc, count - nspread);
6733 ParseNode* pn2 = pn;
6734 jsatomid atomIndex;
6735 bool afterSpread = false;
6736 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6737 if (!afterSpread && pn2->isKind(PNK_SPREAD)) {
6738 afterSpread = true;
6739 if (!EmitNumberOp(cx, atomIndex, bce)) // ARRAY INDEX
6740 return false;
6742 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
6743 return false;
6744 if (pn2->isKind(PNK_ELISION)) {
6745 if (Emit1(cx, bce, JSOP_HOLE) < 0)
6746 return false;
6747 } else {
6748 ParseNode* expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2;
6749 if (!EmitTree(cx, bce, expr)) // ARRAY INDEX? VALUE
6750 return false;
6752 if (pn2->isKind(PNK_SPREAD)) {
6753 if (!EmitIterator(cx, bce)) // ARRAY INDEX ITER
6754 return false;
6755 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // INDEX ITER ARRAY
6756 return false;
6757 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // ITER ARRAY INDEX
6758 return false;
6759 if (!EmitSpread(cx, bce)) // ARRAY INDEX
6760 return false;
6761 } else if (afterSpread) {
6762 if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0)
6763 return false;
6764 } else {
6765 off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3);
6766 if (off < 0)
6767 return false;
6768 SET_UINT24(bce->code(off), atomIndex);
6771 MOZ_ASSERT(atomIndex == count);
6772 if (afterSpread) {
6773 if (Emit1(cx, bce, JSOP_POP) < 0) // ARRAY
6774 return false;
6776 return true;
6779 static bool
6780 EmitUnary(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6782 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
6783 return false;
6784 /* Unary op, including unary +/-. */
6785 JSOp op = pn->getOp();
6786 ParseNode* pn2 = pn->pn_kid;
6788 if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME))
6789 op = JSOP_TYPEOFEXPR;
6791 bool oldEmittingForInit = bce->emittingForInit;
6792 bce->emittingForInit = false;
6793 if (!EmitTree(cx, bce, pn2))
6794 return false;
6796 bce->emittingForInit = oldEmittingForInit;
6797 return Emit1(cx, bce, op) >= 0;
6800 static bool
6801 EmitDefaults(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6803 MOZ_ASSERT(pn->isKind(PNK_ARGSBODY));
6805 ParseNode* arg, *pnlast = pn->last();
6806 for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) {
6807 if (!(arg->pn_dflags & PND_DEFAULT))
6808 continue;
6809 if (!BindNameToSlot(cx, bce, arg))
6810 return false;
6811 if (!EmitVarOp(cx, arg, JSOP_GETARG, bce))
6812 return false;
6813 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6814 return false;
6815 if (Emit1(cx, bce, JSOP_STRICTEQ) < 0)
6816 return false;
6817 // Emit source note to enable ion compilation.
6818 if (NewSrcNote(cx, bce, SRC_IF) < 0)
6819 return false;
6820 ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0);
6821 if (jump < 0)
6822 return false;
6823 if (!EmitTree(cx, bce, arg->expr()))
6824 return false;
6825 if (!EmitVarOp(cx, arg, JSOP_SETARG, bce))
6826 return false;
6827 if (Emit1(cx, bce, JSOP_POP) < 0)
6828 return false;
6829 SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump);
6832 return true;
6835 bool
6836 frontend::EmitTree(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6838 JS_CHECK_RECURSION(cx, return false);
6840 EmitLevelManager elm(bce);
6842 bool ok = true;
6843 ptrdiff_t top = bce->offset();
6844 pn->pn_offset = top;
6846 /* Emit notes to tell the current bytecode's source line number. */
6847 if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin))
6848 return false;
6850 switch (pn->getKind()) {
6851 case PNK_FUNCTION:
6852 ok = EmitFunc(cx, bce, pn);
6853 break;
6855 case PNK_ARGSBODY:
6857 RootedFunction fun(cx, bce->sc->asFunctionBox()->function());
6858 ParseNode* pnlast = pn->last();
6860 // Carefully emit everything in the right order:
6861 // 1. Destructuring
6862 // 2. Defaults
6863 // 3. Functions
6864 ParseNode* pnchild = pnlast->pn_head;
6865 if (pnlast->pn_xflags & PNX_DESTRUCT) {
6866 // Assign the destructuring arguments before defining any functions,
6867 // see bug 419662.
6868 MOZ_ASSERT(pnchild->isKind(PNK_SEMI));
6869 MOZ_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_GLOBALCONST));
6870 if (!EmitTree(cx, bce, pnchild))
6871 return false;
6872 pnchild = pnchild->pn_next;
6874 bool hasDefaults = bce->sc->asFunctionBox()->hasDefaults();
6875 if (hasDefaults) {
6876 ParseNode* rest = nullptr;
6877 bool restIsDefn = false;
6878 if (fun->hasRest()) {
6879 MOZ_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
6881 // Defaults with a rest parameter need special handling. The
6882 // rest parameter needs to be undefined while defaults are being
6883 // processed. To do this, we create the rest argument and let it
6884 // sit on the stack while processing defaults. The rest
6885 // parameter's slot is set to undefined for the course of
6886 // default processing.
6887 rest = pn->pn_head;
6888 while (rest->pn_next != pnlast)
6889 rest = rest->pn_next;
6890 restIsDefn = rest->isDefn();
6891 if (Emit1(cx, bce, JSOP_REST) < 0)
6892 return false;
6893 CheckTypeSet(cx, bce, JSOP_REST);
6895 // Only set the rest parameter if it's not aliased by a nested
6896 // function in the body.
6897 if (restIsDefn) {
6898 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6899 return false;
6900 if (!BindNameToSlot(cx, bce, rest))
6901 return false;
6902 if (!EmitVarOp(cx, rest, JSOP_SETARG, bce))
6903 return false;
6904 if (Emit1(cx, bce, JSOP_POP) < 0)
6905 return false;
6908 if (!EmitDefaults(cx, bce, pn))
6909 return false;
6910 if (fun->hasRest()) {
6911 if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce))
6912 return false;
6913 if (Emit1(cx, bce, JSOP_POP) < 0)
6914 return false;
6917 for (ParseNode* pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) {
6918 // Only bind the parameter if it's not aliased by a nested function
6919 // in the body.
6920 if (!pn2->isDefn())
6921 continue;
6922 if (!BindNameToSlot(cx, bce, pn2))
6923 return false;
6924 if (pn2->pn_next == pnlast && fun->hasRest() && !hasDefaults) {
6925 // Fill rest parameter. We handled the case with defaults above.
6926 MOZ_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
6927 bce->switchToProlog();
6928 if (Emit1(cx, bce, JSOP_REST) < 0)
6929 return false;
6930 CheckTypeSet(cx, bce, JSOP_REST);
6931 if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce))
6932 return false;
6933 if (Emit1(cx, bce, JSOP_POP) < 0)
6934 return false;
6935 bce->switchToMain();
6938 if (pnlast->pn_xflags & PNX_FUNCDEFS) {
6939 // This block contains top-level function definitions. To ensure
6940 // that we emit the bytecode defining them before the rest of code
6941 // in the block we use a separate pass over functions. During the
6942 // main pass later the emitter will add JSOP_NOP with source notes
6943 // for the function to preserve the original functions position
6944 // when decompiling.
6946 // Currently this is used only for functions, as compile-as-we go
6947 // mode for scripts does not allow separate emitter passes.
6948 for (ParseNode* pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
6949 if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) {
6950 if (!EmitTree(cx, bce, pn2))
6951 return false;
6955 ok = EmitTree(cx, bce, pnlast);
6956 break;
6959 case PNK_IF:
6960 ok = EmitIf(cx, bce, pn);
6961 break;
6963 case PNK_SWITCH:
6964 ok = EmitSwitch(cx, bce, pn);
6965 break;
6967 case PNK_WHILE:
6968 ok = EmitWhile(cx, bce, pn, top);
6969 break;
6971 case PNK_DOWHILE:
6972 ok = EmitDo(cx, bce, pn);
6973 break;
6975 case PNK_FOR:
6976 ok = EmitFor(cx, bce, pn, top);
6977 break;
6979 case PNK_BREAK:
6980 ok = EmitBreak(cx, bce, pn->as<BreakStatement>().label());
6981 break;
6983 case PNK_CONTINUE:
6984 ok = EmitContinue(cx, bce, pn->as<ContinueStatement>().label());
6985 break;
6987 case PNK_WITH:
6988 ok = EmitWith(cx, bce, pn);
6989 break;
6991 case PNK_TRY:
6992 if (!EmitTry(cx, bce, pn))
6993 return false;
6994 break;
6996 case PNK_CATCH:
6997 if (!EmitCatch(cx, bce, pn))
6998 return false;
6999 break;
7001 case PNK_VAR:
7002 case PNK_GLOBALCONST:
7003 if (!EmitVariables(cx, bce, pn, InitializeVars))
7004 return false;
7005 break;
7007 case PNK_RETURN:
7008 ok = EmitReturn(cx, bce, pn);
7009 break;
7011 case PNK_YIELD_STAR:
7012 ok = EmitYieldStar(cx, bce, pn->pn_left, pn->pn_right);
7013 break;
7015 case PNK_GENERATOR:
7016 if (Emit1(cx, bce, JSOP_GENERATOR) < 0)
7017 return false;
7018 break;
7020 case PNK_YIELD:
7021 ok = EmitYield(cx, bce, pn);
7022 break;
7024 case PNK_STATEMENTLIST:
7025 ok = EmitStatementList(cx, bce, pn, top);
7026 break;
7028 case PNK_SEQ:
7029 ok = EmitSyntheticStatements(cx, bce, pn, top);
7030 break;
7032 case PNK_SEMI:
7033 ok = EmitStatement(cx, bce, pn);
7034 break;
7036 case PNK_LABEL:
7037 ok = EmitLabeledStatement(cx, bce, &pn->as<LabeledStatement>());
7038 break;
7040 case PNK_COMMA:
7042 for (ParseNode* pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
7043 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
7044 return false;
7045 if (!EmitTree(cx, bce, pn2))
7046 return false;
7047 if (!pn2->pn_next)
7048 break;
7049 if (Emit1(cx, bce, JSOP_POP) < 0)
7050 return false;
7052 break;
7055 case PNK_ASSIGN:
7056 case PNK_ADDASSIGN:
7057 case PNK_SUBASSIGN:
7058 case PNK_BITORASSIGN:
7059 case PNK_BITXORASSIGN:
7060 case PNK_BITANDASSIGN:
7061 case PNK_LSHASSIGN:
7062 case PNK_RSHASSIGN:
7063 case PNK_URSHASSIGN:
7064 case PNK_MULASSIGN:
7065 case PNK_DIVASSIGN:
7066 case PNK_MODASSIGN:
7067 if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right))
7068 return false;
7069 break;
7071 case PNK_CONDITIONAL:
7072 ok = EmitConditionalExpression(cx, bce, pn->as<ConditionalExpression>());
7073 break;
7075 case PNK_OR:
7076 case PNK_AND:
7077 ok = EmitLogical(cx, bce, pn);
7078 break;
7080 case PNK_ADD:
7081 case PNK_SUB:
7082 case PNK_BITOR:
7083 case PNK_BITXOR:
7084 case PNK_BITAND:
7085 case PNK_STRICTEQ:
7086 case PNK_EQ:
7087 case PNK_STRICTNE:
7088 case PNK_NE:
7089 case PNK_LT:
7090 case PNK_LE:
7091 case PNK_GT:
7092 case PNK_GE:
7093 case PNK_IN:
7094 case PNK_INSTANCEOF:
7095 case PNK_LSH:
7096 case PNK_RSH:
7097 case PNK_URSH:
7098 case PNK_STAR:
7099 case PNK_DIV:
7100 case PNK_MOD:
7101 if (pn->isArity(PN_LIST)) {
7102 /* Left-associative operator chain: avoid too much recursion. */
7103 ParseNode* pn2 = pn->pn_head;
7104 if (!EmitTree(cx, bce, pn2))
7105 return false;
7106 JSOp op = pn->getOp();
7107 while ((pn2 = pn2->pn_next) != nullptr) {
7108 if (!EmitTree(cx, bce, pn2))
7109 return false;
7110 if (Emit1(cx, bce, op) < 0)
7111 return false;
7113 } else {
7114 /* Binary operators that evaluate both operands unconditionally. */
7115 if (!EmitTree(cx, bce, pn->pn_left))
7116 return false;
7117 if (!EmitTree(cx, bce, pn->pn_right))
7118 return false;
7119 if (Emit1(cx, bce, pn->getOp()) < 0)
7120 return false;
7122 break;
7124 case PNK_THROW:
7125 case PNK_TYPEOF:
7126 case PNK_VOID:
7127 case PNK_NOT:
7128 case PNK_BITNOT:
7129 case PNK_POS:
7130 case PNK_NEG:
7131 ok = EmitUnary(cx, bce, pn);
7132 break;
7134 case PNK_PREINCREMENT:
7135 case PNK_PREDECREMENT:
7136 case PNK_POSTINCREMENT:
7137 case PNK_POSTDECREMENT:
7138 ok = EmitIncOrDec(cx, bce, pn);
7139 break;
7141 case PNK_DELETE:
7142 ok = EmitDelete(cx, bce, pn);
7143 break;
7145 case PNK_DOT:
7146 ok = EmitPropOp(cx, pn, JSOP_GETPROP, bce);
7147 break;
7149 case PNK_ELEM:
7150 ok = EmitElemOp(cx, pn, JSOP_GETELEM, bce);
7151 break;
7153 case PNK_NEW:
7154 case PNK_TAGGED_TEMPLATE:
7155 case PNK_CALL:
7156 case PNK_GENEXP:
7157 ok = EmitCallOrNew(cx, bce, pn);
7158 break;
7160 case PNK_LEXICALSCOPE:
7161 ok = EmitLexicalScope(cx, bce, pn);
7162 break;
7164 case PNK_LET:
7165 case PNK_CONST:
7166 MOZ_ASSERT_IF(pn->isKind(PNK_CONST), !pn->isArity(PN_BINARY));
7167 ok = pn->isArity(PN_BINARY)
7168 ? EmitLet(cx, bce, pn)
7169 : EmitVariables(cx, bce, pn, InitializeVars);
7170 break;
7172 case PNK_IMPORT:
7173 case PNK_EXPORT:
7174 case PNK_EXPORT_FROM:
7175 // TODO: Implement emitter support for modules
7176 bce->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED);
7177 return false;
7179 case PNK_ARRAYPUSH: {
7181 * The array object's stack index is in bce->arrayCompDepth. See below
7182 * under the array initialiser code generator for array comprehension
7183 * special casing. Note that the array object is a pure stack value,
7184 * unaliased by blocks, so we can EmitUnaliasedVarOp.
7186 if (!EmitTree(cx, bce, pn->pn_kid))
7187 return false;
7188 if (!EmitDupAt(cx, bce, bce->arrayCompDepth))
7189 return false;
7190 if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0)
7191 return false;
7192 break;
7195 case PNK_CALLSITEOBJ:
7196 ok = EmitCallSiteObject(cx, bce, pn);
7197 break;
7199 case PNK_ARRAY:
7200 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head) {
7201 if (bce->checkSingletonContext()) {
7202 // Bake in the object entirely if it will only be created once.
7203 ok = EmitSingletonInitialiser(cx, bce, pn);
7204 break;
7207 // If the array consists entirely of primitive values, make a
7208 // template object with copy on write elements that can be reused
7209 // every time the initializer executes.
7210 if (bce->emitterMode != BytecodeEmitter::SelfHosting && pn->pn_count != 0) {
7211 RootedValue value(cx);
7212 if (!pn->getConstantValue(cx, ParseNode::DontAllowNestedObjects, &value))
7213 return false;
7214 if (!value.isMagic(JS_GENERIC_MAGIC)) {
7215 // Note: the type of the template object might not yet reflect
7216 // that the object has copy on write elements. When the
7217 // interpreter or JIT compiler fetches the template, it should
7218 // use types::GetOrFixupCopyOnWriteObject to make sure the type
7219 // for the template is accurate. We don't do this here as we
7220 // want to use types::InitObject, which requires a finished
7221 // script.
7222 NativeObject* obj = &value.toObject().as<NativeObject>();
7223 if (!ObjectElements::MakeElementsCopyOnWrite(cx, obj))
7224 return false;
7226 ObjectBox* objbox = bce->parser->newObjectBox(obj);
7227 if (!objbox)
7228 return false;
7230 ok = EmitObjectOp(cx, objbox, JSOP_NEWARRAY_COPYONWRITE, bce);
7231 break;
7236 ok = EmitArray(cx, bce, pn->pn_head, pn->pn_count);
7237 break;
7239 case PNK_ARRAYCOMP:
7240 ok = EmitArrayComp(cx, bce, pn);
7241 break;
7243 case PNK_OBJECT:
7244 ok = EmitObject(cx, bce, pn);
7245 break;
7247 case PNK_NAME:
7248 if (!EmitNameOp(cx, bce, pn, false))
7249 return false;
7250 break;
7252 case PNK_TEMPLATE_STRING_LIST:
7253 ok = EmitTemplateString(cx, bce, pn);
7254 break;
7256 case PNK_TEMPLATE_STRING:
7257 case PNK_STRING:
7258 ok = EmitAtomOp(cx, pn, JSOP_STRING, bce);
7259 break;
7261 case PNK_NUMBER:
7262 ok = EmitNumberOp(cx, pn->pn_dval, bce);
7263 break;
7265 case PNK_REGEXP:
7266 ok = EmitRegExp(cx, bce->regexpList.add(pn->as<RegExpLiteral>().objbox()), bce);
7267 break;
7269 case PNK_TRUE:
7270 case PNK_FALSE:
7271 case PNK_THIS:
7272 case PNK_NULL:
7273 if (Emit1(cx, bce, pn->getOp()) < 0)
7274 return false;
7275 break;
7277 case PNK_DEBUGGER:
7278 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
7279 return false;
7280 if (Emit1(cx, bce, JSOP_DEBUGGER) < 0)
7281 return false;
7282 break;
7284 case PNK_NOP:
7285 MOZ_ASSERT(pn->getArity() == PN_NULLARY);
7286 break;
7288 default:
7289 MOZ_ASSERT(0);
7292 /* bce->emitLevel == 1 means we're last on the stack, so finish up. */
7293 if (ok && bce->emitLevel == 1) {
7294 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end))
7295 return false;
7298 return ok;
7301 static int
7302 AllocSrcNote(ExclusiveContext* cx, SrcNotesVector& notes)
7304 // Start it off moderately large to avoid repeated resizings early on.
7305 // ~99% of cases fit within 256 bytes.
7306 if (notes.capacity() == 0 && !notes.reserve(256))
7307 return -1;
7309 jssrcnote dummy = 0;
7310 if (!notes.append(dummy)) {
7311 js_ReportOutOfMemory(cx);
7312 return -1;
7314 return notes.length() - 1;
7318 frontend::NewSrcNote(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type)
7320 SrcNotesVector& notes = bce->notes();
7321 int index;
7323 index = AllocSrcNote(cx, notes);
7324 if (index < 0)
7325 return -1;
7328 * Compute delta from the last annotated bytecode's offset. If it's too
7329 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7331 ptrdiff_t offset = bce->offset();
7332 ptrdiff_t delta = offset - bce->lastNoteOffset();
7333 bce->current->lastNoteOffset = offset;
7334 if (delta >= SN_DELTA_LIMIT) {
7335 do {
7336 ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK);
7337 SN_MAKE_XDELTA(&notes[index], xdelta);
7338 delta -= xdelta;
7339 index = AllocSrcNote(cx, notes);
7340 if (index < 0)
7341 return -1;
7342 } while (delta >= SN_DELTA_LIMIT);
7346 * Initialize type and delta, then allocate the minimum number of notes
7347 * needed for type's arity. Usually, we won't need more, but if an offset
7348 * does take two bytes, SetSrcNoteOffset will grow notes.
7350 SN_MAKE_NOTE(&notes[index], type, delta);
7351 for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) {
7352 if (NewSrcNote(cx, bce, SRC_NULL) < 0)
7353 return -1;
7355 return index;
7359 frontend::NewSrcNote2(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type, ptrdiff_t offset)
7361 int index;
7363 index = NewSrcNote(cx, bce, type);
7364 if (index >= 0) {
7365 if (!SetSrcNoteOffset(cx, bce, index, 0, offset))
7366 return -1;
7368 return index;
7372 frontend::NewSrcNote3(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type, ptrdiff_t offset1,
7373 ptrdiff_t offset2)
7375 int index;
7377 index = NewSrcNote(cx, bce, type);
7378 if (index >= 0) {
7379 if (!SetSrcNoteOffset(cx, bce, index, 0, offset1))
7380 return -1;
7381 if (!SetSrcNoteOffset(cx, bce, index, 1, offset2))
7382 return -1;
7384 return index;
7387 bool
7388 frontend::AddToSrcNoteDelta(ExclusiveContext* cx, BytecodeEmitter* bce, jssrcnote* sn, ptrdiff_t delta)
7391 * Called only from FinishTakingSrcNotes to add to main script note
7392 * deltas, and only by a small positive amount.
7394 MOZ_ASSERT(bce->current == &bce->main);
7395 MOZ_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7397 ptrdiff_t base = SN_DELTA(sn);
7398 ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7399 ptrdiff_t newdelta = base + delta;
7400 if (newdelta < limit) {
7401 SN_SET_DELTA(sn, newdelta);
7402 } else {
7403 jssrcnote xdelta;
7404 SN_MAKE_XDELTA(&xdelta, delta);
7405 if (!(sn = bce->main.notes.insert(sn, xdelta)))
7406 return false;
7408 return true;
7411 static bool
7412 SetSrcNoteOffset(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned index, unsigned which,
7413 ptrdiff_t offset)
7415 if (!SN_REPRESENTABLE_OFFSET(offset)) {
7416 ReportStatementTooLarge(bce->parser->tokenStream, bce->topStmt);
7417 return false;
7420 SrcNotesVector& notes = bce->notes();
7422 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7423 jssrcnote* sn = notes.begin() + index;
7424 MOZ_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7425 MOZ_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7426 for (sn++; which; sn++, which--) {
7427 if (*sn & SN_4BYTE_OFFSET_FLAG)
7428 sn += 3;
7432 * See if the new offset requires four bytes either by being too big or if
7433 * the offset has already been inflated (in which case, we need to stay big
7434 * to not break the srcnote encoding if this isn't the last srcnote).
7436 if (offset > (ptrdiff_t)SN_4BYTE_OFFSET_MASK || (*sn & SN_4BYTE_OFFSET_FLAG)) {
7437 /* Maybe this offset was already set to a four-byte value. */
7438 if (!(*sn & SN_4BYTE_OFFSET_FLAG)) {
7439 /* Insert three dummy bytes that will be overwritten shortly. */
7440 jssrcnote dummy = 0;
7441 if (!(sn = notes.insert(sn, dummy)) ||
7442 !(sn = notes.insert(sn, dummy)) ||
7443 !(sn = notes.insert(sn, dummy)))
7445 js_ReportOutOfMemory(cx);
7446 return false;
7449 *sn++ = (jssrcnote)(SN_4BYTE_OFFSET_FLAG | (offset >> 24));
7450 *sn++ = (jssrcnote)(offset >> 16);
7451 *sn++ = (jssrcnote)(offset >> 8);
7453 *sn = (jssrcnote)offset;
7454 return true;
7458 * Finish taking source notes in cx's notePool.
7459 * If successful, the final source note count is stored in the out outparam.
7461 bool
7462 frontend::FinishTakingSrcNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t* out)
7464 MOZ_ASSERT(bce->current == &bce->main);
7466 unsigned prologCount = bce->prolog.notes.length();
7467 if (prologCount && bce->prolog.currentLine != bce->firstLine) {
7468 bce->switchToProlog();
7469 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0)
7470 return false;
7471 bce->switchToMain();
7472 } else {
7474 * Either no prolog srcnotes, or no line number change over prolog.
7475 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7476 * of the first main note, by adding to its delta and possibly even
7477 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7478 * that came at and after the last annotated bytecode.
7480 ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset;
7481 MOZ_ASSERT(offset >= 0);
7482 if (offset > 0 && bce->main.notes.length() != 0) {
7483 /* NB: Use as much of the first main note's delta as we can. */
7484 jssrcnote* sn = bce->main.notes.begin();
7485 ptrdiff_t delta = SN_IS_XDELTA(sn)
7486 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7487 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7488 if (offset < delta)
7489 delta = offset;
7490 for (;;) {
7491 if (!AddToSrcNoteDelta(cx, bce, sn, delta))
7492 return false;
7493 offset -= delta;
7494 if (offset == 0)
7495 break;
7496 delta = Min(offset, SN_XDELTA_MASK);
7497 sn = bce->main.notes.begin();
7502 // The prolog count might have changed, so we can't reuse prologCount.
7503 // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended
7504 // when the notes are copied to their final destination by CopySrcNotes.
7505 *out = bce->prolog.notes.length() + bce->main.notes.length() + 1;
7506 return true;
7509 void
7510 frontend::CopySrcNotes(BytecodeEmitter* bce, jssrcnote* destination, uint32_t nsrcnotes)
7512 unsigned prologCount = bce->prolog.notes.length();
7513 unsigned mainCount = bce->main.notes.length();
7514 unsigned totalCount = prologCount + mainCount;
7515 MOZ_ASSERT(totalCount == nsrcnotes - 1);
7516 if (prologCount)
7517 PodCopy(destination, bce->prolog.notes.begin(), prologCount);
7518 PodCopy(destination + prologCount, bce->main.notes.begin(), mainCount);
7519 SN_MAKE_TERMINATOR(&destination[totalCount]);
7522 void
7523 CGConstList::finish(ConstArray* array)
7525 MOZ_ASSERT(length() == array->length);
7527 for (unsigned i = 0; i < length(); i++)
7528 array->vector[i] = list[i];
7532 * Find the index of the given object for code generator.
7534 * Since the emitter refers to each parsed object only once, for the index we
7535 * use the number of already indexes objects. We also add the object to a list
7536 * to convert the list to a fixed-size array when we complete code generation,
7537 * see js::CGObjectList::finish below.
7539 * Most of the objects go to BytecodeEmitter::objectList but for regexp we use
7540 * a separated BytecodeEmitter::regexpList. In this way the emitted index can
7541 * be directly used to store and fetch a reference to a cloned RegExp object
7542 * that shares the same JSRegExp private data created for the object literal in
7543 * objbox. We need a cloned object to hold lastIndex and other direct
7544 * properties that should not be shared among threads sharing a precompiled
7545 * function or script.
7547 * If the code being compiled is function code, allocate a reserved slot in
7548 * the cloned function object that shares its precompiled script with other
7549 * cloned function objects and with the compiler-created clone-parent. There
7550 * are nregexps = script->regexps()->length such reserved slots in each
7551 * function object cloned from fun->object. NB: during compilation, a funobj
7552 * slots element must never be allocated, because JSObject::allocSlot could
7553 * hand out one of the slots that should be given to a regexp clone.
7555 * If the code being compiled is global code, the cloned regexp are stored in
7556 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to
7557 * nregexps.
7559 * The slots initially contain undefined or null. We populate them lazily when
7560 * JSOP_REGEXP is executed for the first time.
7562 * Why clone regexp objects? ECMA specifies that when a regular expression
7563 * literal is scanned, a RegExp object is created. In the spec, compilation
7564 * and execution happen indivisibly, but in this implementation and many of
7565 * its embeddings, code is precompiled early and re-executed in multiple
7566 * threads, or using multiple global objects, or both, for efficiency.
7568 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7569 * objects, which makes for collisions on the lastIndex property (especially
7570 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7571 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7573 unsigned
7574 CGObjectList::add(ObjectBox* objbox)
7576 MOZ_ASSERT(!objbox->emitLink);
7577 objbox->emitLink = lastbox;
7578 lastbox = objbox;
7579 return length++;
7582 unsigned
7583 CGObjectList::indexOf(JSObject* obj)
7585 MOZ_ASSERT(length > 0);
7586 unsigned index = length - 1;
7587 for (ObjectBox* box = lastbox; box->object != obj; box = box->emitLink)
7588 index--;
7589 return index;
7592 void
7593 CGObjectList::finish(ObjectArray* array)
7595 MOZ_ASSERT(length <= INDEX_LIMIT);
7596 MOZ_ASSERT(length == array->length);
7598 js::HeapPtrNativeObject* cursor = array->vector + array->length;
7599 ObjectBox* objbox = lastbox;
7600 do {
7601 --cursor;
7602 MOZ_ASSERT(!*cursor);
7603 *cursor = objbox->object;
7604 } while ((objbox = objbox->emitLink) != nullptr);
7605 MOZ_ASSERT(cursor == array->vector);
7608 ObjectBox*
7609 CGObjectList::find(uint32_t index)
7611 MOZ_ASSERT(index < length);
7612 ObjectBox* box = lastbox;
7613 for (unsigned n = length - 1; n > index; n--)
7614 box = box->emitLink;
7615 return box;
7618 bool
7619 CGTryNoteList::append(JSTryNoteKind kind, uint32_t stackDepth, size_t start, size_t end)
7621 MOZ_ASSERT(start <= end);
7622 MOZ_ASSERT(size_t(uint32_t(start)) == start);
7623 MOZ_ASSERT(size_t(uint32_t(end)) == end);
7625 JSTryNote note;
7626 note.kind = kind;
7627 note.stackDepth = stackDepth;
7628 note.start = uint32_t(start);
7629 note.length = uint32_t(end - start);
7631 return list.append(note);
7634 void
7635 CGTryNoteList::finish(TryNoteArray* array)
7637 MOZ_ASSERT(length() == array->length);
7639 for (unsigned i = 0; i < length(); i++)
7640 array->vector[i] = list[i];
7643 bool
7644 CGBlockScopeList::append(uint32_t scopeObject, uint32_t offset, uint32_t parent)
7646 BlockScopeNote note;
7647 mozilla::PodZero(&note);
7649 note.index = scopeObject;
7650 note.start = offset;
7651 note.parent = parent;
7653 return list.append(note);
7656 uint32_t
7657 CGBlockScopeList::findEnclosingScope(uint32_t index)
7659 MOZ_ASSERT(index < length());
7660 MOZ_ASSERT(list[index].index != BlockScopeNote::NoBlockScopeIndex);
7662 DebugOnly<uint32_t> pos = list[index].start;
7663 while (index--) {
7664 MOZ_ASSERT(list[index].start <= pos);
7665 if (list[index].length == 0) {
7666 // We are looking for the nearest enclosing live scope. If the
7667 // scope contains POS, it should still be open, so its length should
7668 // be zero.
7669 return list[index].index;
7670 } else {
7671 // Conversely, if the length is not zero, it should not contain
7672 // POS.
7673 MOZ_ASSERT(list[index].start + list[index].length <= pos);
7677 return BlockScopeNote::NoBlockScopeIndex;
7680 void
7681 CGBlockScopeList::recordEnd(uint32_t index, uint32_t offset)
7683 MOZ_ASSERT(index < length());
7684 MOZ_ASSERT(offset >= list[index].start);
7685 MOZ_ASSERT(list[index].length == 0);
7687 list[index].length = offset - list[index].start;
7690 void
7691 CGBlockScopeList::finish(BlockScopeArray* array)
7693 MOZ_ASSERT(length() == array->length);
7695 for (unsigned i = 0; i < length(); i++)
7696 array->vector[i] = list[i];
7699 void
7700 CGYieldOffsetList::finish(YieldOffsetArray& array, uint32_t prologLength)
7702 MOZ_ASSERT(length() == array.length());
7704 for (unsigned i = 0; i < length(); i++)
7705 array[i] = prologLength + list[i];
7709 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
7710 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR.
7712 const JSSrcNoteSpec js_SrcNoteSpec[] = {
7713 #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity },
7714 FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC)
7715 #undef DEFINE_SRC_NOTE_SPEC
7718 static int
7719 SrcNoteArity(jssrcnote* sn)
7721 MOZ_ASSERT(SN_TYPE(sn) < SRC_LAST);
7722 return js_SrcNoteSpec[SN_TYPE(sn)].arity;
7725 JS_FRIEND_API(unsigned)
7726 js_SrcNoteLength(jssrcnote* sn)
7728 unsigned arity;
7729 jssrcnote* base;
7731 arity = SrcNoteArity(sn);
7732 for (base = sn++; arity; sn++, arity--) {
7733 if (*sn & SN_4BYTE_OFFSET_FLAG)
7734 sn += 3;
7736 return sn - base;
7739 JS_FRIEND_API(ptrdiff_t)
7740 js_GetSrcNoteOffset(jssrcnote* sn, unsigned which)
7742 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7743 MOZ_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7744 MOZ_ASSERT((int) which < SrcNoteArity(sn));
7745 for (sn++; which; sn++, which--) {
7746 if (*sn & SN_4BYTE_OFFSET_FLAG)
7747 sn += 3;
7749 if (*sn & SN_4BYTE_OFFSET_FLAG) {
7750 return (ptrdiff_t)(((uint32_t)(sn[0] & SN_4BYTE_OFFSET_MASK) << 24)
7751 | (sn[1] << 16)
7752 | (sn[2] << 8)
7753 | sn[3]);
7755 return (ptrdiff_t)*sn;