Bug 1159973 - Abort parsing when TokenStream::SourceCoords hits OOM. r=jorendorff...
[gecko.git] / js / src / frontend / BytecodeEmitter.cpp
blob00f9f5267ca25c578eca95756497aa02329efc8e
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /*
8 * JS bytecode generation.
9 */
11 #include "frontend/BytecodeEmitter.h"
13 #include "mozilla/DebugOnly.h"
14 #include "mozilla/FloatingPoint.h"
15 #include "mozilla/PodOperations.h"
16 #include "mozilla/UniquePtr.h"
18 #include <string.h>
20 #include "jsapi.h"
21 #include "jsatom.h"
22 #include "jscntxt.h"
23 #include "jsfun.h"
24 #include "jsnum.h"
25 #include "jsopcode.h"
26 #include "jsscript.h"
27 #include "jstypes.h"
28 #include "jsutil.h"
30 #include "asmjs/AsmJSLink.h"
31 #include "frontend/Parser.h"
32 #include "frontend/TokenStream.h"
33 #include "vm/Debugger.h"
35 #include "jsatominlines.h"
36 #include "jsobjinlines.h"
37 #include "jsscriptinlines.h"
39 #include "frontend/ParseMaps-inl.h"
40 #include "frontend/ParseNode-inl.h"
41 #include "vm/ScopeObject-inl.h"
43 using namespace js;
44 using namespace js::gc;
45 using namespace js::frontend;
47 using mozilla::DebugOnly;
48 using mozilla::NumberIsInt32;
49 using mozilla::PodCopy;
50 using mozilla::UniquePtr;
52 static bool
53 SetSrcNoteOffset(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned index, unsigned which, ptrdiff_t offset);
55 static bool
56 UpdateSourceCoordNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset);
58 struct frontend::StmtInfoBCE : public StmtInfoBase
60 StmtInfoBCE* down; /* info for enclosing statement */
61 StmtInfoBCE* downScope; /* next enclosing lexical scope */
63 ptrdiff_t update; /* loop update offset (top if none) */
64 ptrdiff_t breaks; /* offset of last break in loop */
65 ptrdiff_t continues; /* offset of last continue in loop */
66 uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */
68 explicit StmtInfoBCE(ExclusiveContext* cx) : StmtInfoBase(cx) {}
71 * To reuse space, alias two of the ptrdiff_t fields for use during
72 * try/catch/finally code generation and backpatching.
74 * Only a loop, switch, or label statement info record can have breaks and
75 * continues, and only a for loop has an update backpatch chain, so it's
76 * safe to overlay these for the "trying" StmtTypes.
79 ptrdiff_t& gosubs() {
80 JS_ASSERT(type == STMT_FINALLY);
81 return breaks;
84 ptrdiff_t& guardJump() {
85 JS_ASSERT(type == STMT_TRY || type == STMT_FINALLY);
86 return continues;
91 namespace {
93 struct LoopStmtInfo : public StmtInfoBCE
95 int32_t stackDepth; // Stack depth when this loop was pushed.
96 uint32_t loopDepth; // Loop depth.
98 // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
99 bool canIonOsr;
101 explicit LoopStmtInfo(ExclusiveContext* cx) : StmtInfoBCE(cx) {}
103 static LoopStmtInfo* fromStmtInfo(StmtInfoBCE* stmt) {
104 JS_ASSERT(stmt->isLoop());
105 return static_cast<LoopStmtInfo*>(stmt);
109 } // anonymous namespace
111 BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent,
112 Parser<FullParseHandler>* parser, SharedContext* sc,
113 HandleScript script, bool insideEval, HandleScript evalCaller,
114 bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode)
115 : sc(sc),
116 parent(parent),
117 script(sc->context, script),
118 prolog(sc->context, lineNum),
119 main(sc->context, lineNum),
120 current(&main),
121 parser(parser),
122 evalCaller(evalCaller),
123 topStmt(nullptr),
124 topScopeStmt(nullptr),
125 staticScope(sc->context),
126 atomIndices(sc->context),
127 firstLine(lineNum),
128 stackDepth(0), maxStackDepth(0),
129 arrayCompDepth(0),
130 emitLevel(0),
131 constList(sc->context),
132 tryNoteList(sc->context),
133 blockScopeList(sc->context),
134 typesetCount(0),
135 hasSingletons(false),
136 emittingForInit(false),
137 emittingRunOnceLambda(false),
138 lazyRunOnceLambda(false),
139 insideEval(insideEval),
140 hasGlobalScope(hasGlobalScope),
141 emitterMode(emitterMode)
143 JS_ASSERT_IF(evalCaller, insideEval);
146 bool
147 BytecodeEmitter::init()
149 return atomIndices.ensureMap(sc->context);
152 static ptrdiff_t
153 EmitCheck(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t delta)
155 ptrdiff_t offset = bce->code().length();
157 // Start it off moderately large to avoid repeated resizings early on.
158 if (bce->code().capacity() == 0 && !bce->code().reserve(1024))
159 return -1;
161 jsbytecode dummy = 0;
162 if (!bce->code().appendN(dummy, delta)) {
163 js_ReportOutOfMemory(cx);
164 return -1;
166 return offset;
169 static void
170 UpdateDepth(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t target)
172 jsbytecode* pc = bce->code(target);
173 JSOp op = (JSOp) *pc;
174 const JSCodeSpec* cs = &js_CodeSpec[op];
176 if (cs->format & JOF_TMPSLOT_MASK) {
178 * An opcode may temporarily consume stack space during execution.
179 * Account for this in maxStackDepth separately from uses/defs here.
181 uint32_t depth = (uint32_t) bce->stackDepth +
182 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT);
183 if (depth > bce->maxStackDepth)
184 bce->maxStackDepth = depth;
187 int nuses = StackUses(nullptr, pc);
188 int ndefs = StackDefs(nullptr, pc);
190 bce->stackDepth -= nuses;
191 JS_ASSERT(bce->stackDepth >= 0);
192 bce->stackDepth += ndefs;
193 if ((uint32_t)bce->stackDepth > bce->maxStackDepth)
194 bce->maxStackDepth = bce->stackDepth;
197 ptrdiff_t
198 frontend::Emit1(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
200 ptrdiff_t offset = EmitCheck(cx, bce, 1);
201 if (offset < 0)
202 return -1;
204 jsbytecode* code = bce->code(offset);
205 code[0] = jsbytecode(op);
206 UpdateDepth(cx, bce, offset);
207 return offset;
210 ptrdiff_t
211 frontend::Emit2(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, jsbytecode op1)
213 ptrdiff_t offset = EmitCheck(cx, bce, 2);
214 if (offset < 0)
215 return -1;
217 jsbytecode* code = bce->code(offset);
218 code[0] = jsbytecode(op);
219 code[1] = op1;
220 UpdateDepth(cx, bce, offset);
221 return offset;
224 ptrdiff_t
225 frontend::Emit3(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, jsbytecode op1,
226 jsbytecode op2)
228 /* These should filter through EmitVarOp. */
229 JS_ASSERT(!IsArgOp(op));
230 JS_ASSERT(!IsLocalOp(op));
232 ptrdiff_t offset = EmitCheck(cx, bce, 3);
233 if (offset < 0)
234 return -1;
236 jsbytecode* code = bce->code(offset);
237 code[0] = jsbytecode(op);
238 code[1] = op1;
239 code[2] = op2;
240 UpdateDepth(cx, bce, offset);
241 return offset;
244 ptrdiff_t
245 frontend::EmitN(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, size_t extra)
247 ptrdiff_t length = 1 + (ptrdiff_t)extra;
248 ptrdiff_t offset = EmitCheck(cx, bce, length);
249 if (offset < 0)
250 return -1;
252 jsbytecode* code = bce->code(offset);
253 code[0] = jsbytecode(op);
254 /* The remaining |extra| bytes are set by the caller */
257 * Don't UpdateDepth if op's use-count comes from the immediate
258 * operand yet to be stored in the extra bytes after op.
260 if (js_CodeSpec[op].nuses >= 0)
261 UpdateDepth(cx, bce, offset);
263 return offset;
266 static ptrdiff_t
267 EmitJump(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, ptrdiff_t off)
269 ptrdiff_t offset = EmitCheck(cx, bce, 5);
270 if (offset < 0)
271 return -1;
273 jsbytecode* code = bce->code(offset);
274 code[0] = jsbytecode(op);
275 SET_JUMP_OFFSET(code, off);
276 UpdateDepth(cx, bce, offset);
277 return offset;
280 static ptrdiff_t
281 EmitCall(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op, uint16_t argc, ParseNode* pn=nullptr)
283 if (pn && !UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
284 return -1;
285 return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc));
288 // Dup the var in operand stack slot "slot". The first item on the operand
289 // stack is one slot past the last fixed slot. The last (most recent) item is
290 // slot bce->stackDepth - 1.
292 // The instruction that is written (JSOP_DUPAT) switches the depth around so
293 // that it is addressed from the sp instead of from the fp. This is useful when
294 // you don't know the size of the fixed stack segment (nfixed), as is the case
295 // when compiling scripts (because each statement is parsed and compiled
296 // separately, but they all together form one script with one fixed stack
297 // frame).
298 static bool
299 EmitDupAt(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned slot)
301 JS_ASSERT(slot < unsigned(bce->stackDepth));
302 // The slot's position on the operand stack, measured from the top.
303 unsigned slotFromTop = bce->stackDepth - 1 - slot;
304 if (slotFromTop >= JS_BIT(24)) {
305 bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
306 return false;
308 ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3);
309 if (off < 0)
310 return false;
311 jsbytecode* pc = bce->code(off);
312 SET_UINT24(pc, slotFromTop);
313 return true;
316 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
317 const char js_with_statement_str[] = "with statement";
318 const char js_finally_block_str[] = "finally block";
319 const char js_script_str[] = "script";
321 static const char * const statementName[] = {
322 "label statement", /* LABEL */
323 "if statement", /* IF */
324 "else statement", /* ELSE */
325 "destructuring body", /* BODY */
326 "switch statement", /* SWITCH */
327 "block", /* BLOCK */
328 js_with_statement_str, /* WITH */
329 "catch block", /* CATCH */
330 "try block", /* TRY */
331 js_finally_block_str, /* FINALLY */
332 js_finally_block_str, /* SUBROUTINE */
333 "do loop", /* DO_LOOP */
334 "for loop", /* FOR_LOOP */
335 "for/in loop", /* FOR_IN_LOOP */
336 "for/of loop", /* FOR_OF_LOOP */
337 "while loop", /* WHILE_LOOP */
338 "spread", /* SPREAD */
341 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
343 static const char*
344 StatementName(StmtInfoBCE* topStmt)
346 if (!topStmt)
347 return js_script_str;
348 return statementName[topStmt->type];
351 static void
352 ReportStatementTooLarge(TokenStream& ts, StmtInfoBCE* topStmt)
354 ts.reportError(JSMSG_NEED_DIET, StatementName(topStmt));
358 * Emit a backpatch op with offset pointing to the previous jump of this type,
359 * so that we can walk back up the chain fixing up the op and jump offset.
361 static ptrdiff_t
362 EmitBackPatchOp(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t* lastp)
364 ptrdiff_t offset, delta;
366 offset = bce->offset();
367 delta = offset - *lastp;
368 *lastp = offset;
369 JS_ASSERT(delta > 0);
370 return EmitJump(cx, bce, JSOP_BACKPATCH, delta);
373 static inline unsigned
374 LengthOfSetLine(unsigned line)
376 return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1);
379 /* Updates line number notes, not column notes. */
380 static inline bool
381 UpdateLineNumberNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset)
383 TokenStream* ts = &bce->parser->tokenStream;
384 bool onThisLine;
385 if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine(), &onThisLine))
386 return ts->reportError(JSMSG_OUT_OF_MEMORY);
387 if (!onThisLine) {
388 unsigned line = ts->srcCoords.lineNum(offset);
389 unsigned delta = line - bce->currentLine();
392 * Encode any change in the current source line number by using
393 * either several SRC_NEWLINE notes or just one SRC_SETLINE note,
394 * whichever consumes less space.
396 * NB: We handle backward line number deltas (possible with for
397 * loops where the update part is emitted after the body, but its
398 * line number is <= any line number in the body) here by letting
399 * unsigned delta_ wrap to a very large number, which triggers a
400 * SRC_SETLINE.
402 bce->current->currentLine = line;
403 bce->current->lastColumn = 0;
404 if (delta >= LengthOfSetLine(line)) {
405 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0)
406 return false;
407 } else {
408 do {
409 if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0)
410 return false;
411 } while (--delta != 0);
414 return true;
417 /* Updates the line number and column number information in the source notes. */
418 static bool
419 UpdateSourceCoordNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t offset)
421 if (!UpdateLineNumberNotes(cx, bce, offset))
422 return false;
424 uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset);
425 ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn);
426 if (colspan != 0) {
427 if (colspan < 0) {
428 colspan += SN_COLSPAN_DOMAIN;
429 } else if (colspan >= SN_COLSPAN_DOMAIN / 2) {
430 // If the column span is so large that we can't store it, then just
431 // discard this information because column information would most
432 // likely be useless anyway once the column numbers are ~4000000.
433 // This has been known to happen with scripts that have been
434 // minimized and put into all one line.
435 return true;
437 if (NewSrcNote2(cx, bce, SRC_COLSPAN, colspan) < 0)
438 return false;
439 bce->current->lastColumn = columnIndex;
441 return true;
444 static ptrdiff_t
445 EmitLoopHead(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* nextpn)
447 if (nextpn) {
449 * Try to give the JSOP_LOOPHEAD the same line number as the next
450 * instruction. nextpn is often a block, in which case the next
451 * instruction typically comes from the first statement inside.
453 JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
454 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
455 nextpn = nextpn->pn_head;
456 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
457 return -1;
460 return Emit1(cx, bce, JSOP_LOOPHEAD);
463 static bool
464 EmitLoopEntry(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* nextpn)
466 if (nextpn) {
467 /* Update the line number, as for LOOPHEAD. */
468 JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
469 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
470 nextpn = nextpn->pn_head;
471 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
472 return false;
475 LoopStmtInfo* loop = LoopStmtInfo::fromStmtInfo(bce->topStmt);
476 JS_ASSERT(loop->loopDepth > 0);
478 uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr);
479 return Emit2(cx, bce, JSOP_LOOPENTRY, loopDepthAndFlags) >= 0;
483 * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
484 * a type set to store its result.
486 static inline void
487 CheckTypeSet(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
489 if (js_CodeSpec[op].format & JOF_TYPESET) {
490 if (bce->typesetCount < UINT16_MAX)
491 bce->typesetCount++;
496 * Macro to emit a bytecode followed by a uint16_t immediate operand stored in
497 * big-endian order.
499 * NB: We use cx and bce from our caller's lexical environment, and return
500 * false on error.
502 #define EMIT_UINT16_IMM_OP(op, i) \
503 JS_BEGIN_MACRO \
504 if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
505 return false; \
506 CheckTypeSet(cx, bce, op); \
507 JS_END_MACRO
509 static bool
510 FlushPops(ExclusiveContext* cx, BytecodeEmitter* bce, int* npops)
512 JS_ASSERT(*npops != 0);
513 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
514 *npops = 0;
515 return true;
518 static bool
519 PopIterator(ExclusiveContext* cx, BytecodeEmitter* bce)
521 if (Emit1(cx, bce, JSOP_ENDITER) < 0)
522 return false;
523 return true;
526 namespace {
528 class NonLocalExitScope {
529 ExclusiveContext* cx;
530 BytecodeEmitter* bce;
531 const uint32_t savedScopeIndex;
532 const int savedDepth;
533 uint32_t openScopeIndex;
535 NonLocalExitScope(const NonLocalExitScope&) MOZ_DELETE;
537 public:
538 explicit NonLocalExitScope(ExclusiveContext* cx_, BytecodeEmitter* bce_)
539 : cx(cx_),
540 bce(bce_),
541 savedScopeIndex(bce->blockScopeList.length()),
542 savedDepth(bce->stackDepth),
543 openScopeIndex(UINT32_MAX) {
544 if (bce->staticScope) {
545 StmtInfoBCE* stmt = bce->topStmt;
546 while (1) {
547 JS_ASSERT(stmt);
548 if (stmt->isNestedScope) {
549 openScopeIndex = stmt->blockScopeIndex;
550 break;
552 stmt = stmt->down;
557 ~NonLocalExitScope() {
558 for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++)
559 bce->blockScopeList.recordEnd(n, bce->offset());
560 bce->stackDepth = savedDepth;
563 bool popScopeForNonLocalExit(uint32_t blockScopeIndex) {
564 uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex);
565 uint32_t parent = openScopeIndex;
567 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent))
568 return false;
569 openScopeIndex = bce->blockScopeList.length() - 1;
570 return true;
573 bool prepareForNonLocalJump(StmtInfoBCE* toStmt);
577 * Emit additional bytecode(s) for non-local jumps.
579 bool
580 NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE* toStmt)
582 int npops = 0;
584 #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false
586 for (StmtInfoBCE* stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) {
587 switch (stmt->type) {
588 case STMT_FINALLY:
589 FLUSH_POPS();
590 if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0)
591 return false;
592 break;
594 case STMT_WITH:
595 if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
596 return false;
597 JS_ASSERT(stmt->isNestedScope);
598 if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
599 return false;
600 break;
602 case STMT_FOR_OF_LOOP:
603 npops += 2;
604 break;
606 case STMT_FOR_IN_LOOP:
607 FLUSH_POPS();
608 if (!PopIterator(cx, bce))
609 return false;
610 break;
612 case STMT_SPREAD:
613 MOZ_ASSERT_UNREACHABLE("can't break/continue/return from inside a spread");
614 break;
616 case STMT_SUBROUTINE:
618 * There's a [exception or hole, retsub pc-index] pair on the
619 * stack that we need to pop.
621 npops += 2;
622 break;
624 default:;
627 if (stmt->isBlockScope) {
628 JS_ASSERT(stmt->isNestedScope);
629 StaticBlockObject& blockObj = stmt->staticBlock();
630 if (Emit1(cx, bce, JSOP_DEBUGLEAVEBLOCK) < 0)
631 return false;
632 if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
633 return false;
634 if (blockObj.needsClone()) {
635 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0)
636 return false;
641 FLUSH_POPS();
642 return true;
644 #undef FLUSH_POPS
647 } // anonymous namespace
649 static ptrdiff_t
650 EmitGoto(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* toStmt, ptrdiff_t* lastp,
651 SrcNoteType noteType = SRC_NULL)
653 NonLocalExitScope nle(cx, bce);
655 if (!nle.prepareForNonLocalJump(toStmt))
656 return -1;
658 if (noteType != SRC_NULL) {
659 if (NewSrcNote(cx, bce, noteType) < 0)
660 return -1;
663 return EmitBackPatchOp(cx, bce, lastp);
666 static bool
667 BackPatch(ExclusiveContext* cx, BytecodeEmitter* bce, ptrdiff_t last, jsbytecode* target, jsbytecode op)
669 jsbytecode* pc, *stop;
670 ptrdiff_t delta, span;
672 pc = bce->code(last);
673 stop = bce->code(-1);
674 while (pc != stop) {
675 delta = GET_JUMP_OFFSET(pc);
676 span = target - pc;
677 SET_JUMP_OFFSET(pc, span);
678 *pc = op;
679 pc -= delta;
681 return true;
684 #define SET_STATEMENT_TOP(stmt, top) \
685 ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
687 static void
688 PushStatementInner(BytecodeEmitter* bce, StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
690 SET_STATEMENT_TOP(stmt, top);
691 PushStatement(bce, stmt, type);
694 static void
695 PushStatementBCE(BytecodeEmitter* bce, StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
697 PushStatementInner(bce, stmt, type, top);
698 JS_ASSERT(!stmt->isLoop());
701 static void
702 PushLoopStatement(BytecodeEmitter* bce, LoopStmtInfo* stmt, StmtType type, ptrdiff_t top)
704 PushStatementInner(bce, stmt, type, top);
705 JS_ASSERT(stmt->isLoop());
707 LoopStmtInfo* downLoop = nullptr;
708 for (StmtInfoBCE* outer = stmt->down; outer; outer = outer->down) {
709 if (outer->isLoop()) {
710 downLoop = LoopStmtInfo::fromStmtInfo(outer);
711 break;
715 stmt->stackDepth = bce->stackDepth;
716 stmt->loopDepth = downLoop ? downLoop->loopDepth + 1 : 1;
718 int loopSlots;
719 if (type == STMT_SPREAD)
720 loopSlots = 3;
721 else if (type == STMT_FOR_OF_LOOP)
722 loopSlots = 2;
723 else if (type == STMT_FOR_IN_LOOP)
724 loopSlots = 1;
725 else
726 loopSlots = 0;
728 MOZ_ASSERT(loopSlots <= stmt->stackDepth);
730 if (downLoop)
731 stmt->canIonOsr = (downLoop->canIonOsr &&
732 stmt->stackDepth == downLoop->stackDepth + loopSlots);
733 else
734 stmt->canIonOsr = stmt->stackDepth == loopSlots;
738 * Return the enclosing lexical scope, which is the innermost enclosing static
739 * block object or compiler created function.
741 static JSObject*
742 EnclosingStaticScope(BytecodeEmitter* bce)
744 if (bce->staticScope)
745 return bce->staticScope;
747 if (!bce->sc->isFunctionBox()) {
748 JS_ASSERT(!bce->parent);
749 return nullptr;
752 return bce->sc->asFunctionBox()->function();
755 #ifdef DEBUG
756 static bool
757 AllLocalsAliased(StaticBlockObject& obj)
759 for (unsigned i = 0; i < obj.numVariables(); i++)
760 if (!obj.isAliased(i))
761 return false;
762 return true;
764 #endif
766 static bool
767 ComputeAliasedSlots(ExclusiveContext* cx, BytecodeEmitter* bce, Handle<StaticBlockObject*> blockObj)
769 for (unsigned i = 0; i < blockObj->numVariables(); i++) {
770 Definition* dn = blockObj->definitionParseNode(i);
772 JS_ASSERT(dn->isDefn());
773 if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(),
774 blockObj->blockIndexToLocalIndex(dn->frameSlot())))
776 return false;
779 #ifdef DEBUG
780 for (ParseNode* pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
781 JS_ASSERT(pnu->pn_lexdef == dn);
782 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
783 JS_ASSERT(pnu->pn_cookie.isFree());
785 #endif
787 blockObj->setAliased(i, bce->isAliasedName(dn));
790 JS_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj));
792 return true;
795 static bool
796 EmitInternedObjectOp(ExclusiveContext* cx, uint32_t index, JSOp op, BytecodeEmitter* bce);
798 // In a function, block-scoped locals go after the vars, and form part of the
799 // fixed part of a stack frame. Outside a function, there are no fixed vars,
800 // but block-scoped locals still form part of the fixed part of a stack frame
801 // and are thus addressable via GETLOCAL and friends.
802 static void
803 ComputeLocalOffset(ExclusiveContext* cx, BytecodeEmitter* bce, Handle<StaticBlockObject*> blockObj)
805 unsigned nfixedvars = bce->sc->isFunctionBox() ? bce->script->bindings.numVars() : 0;
806 unsigned localOffset = nfixedvars;
808 if (bce->staticScope) {
809 Rooted<NestedScopeObject*> outer(cx, bce->staticScope);
810 for (; outer; outer = outer->enclosingNestedScope()) {
811 if (outer->is<StaticBlockObject>()) {
812 StaticBlockObject& outerBlock = outer->as<StaticBlockObject>();
813 localOffset = outerBlock.localOffset() + outerBlock.numVariables();
814 break;
819 JS_ASSERT(localOffset + blockObj->numVariables()
820 <= nfixedvars + bce->script->bindings.numBlockScoped());
822 blockObj->setLocalOffset(localOffset);
825 // ~ Nested Scopes ~
827 // A nested scope is a region of a compilation unit (function, script, or eval
828 // code) with an additional node on the scope chain. This node may either be a
829 // "with" object or a "block" object. "With" objects represent "with" scopes.
830 // Block objects represent lexical scopes, and contain named block-scoped
831 // bindings, for example "let" bindings or the exception in a catch block.
832 // Those variables may be local and thus accessible directly from the stack, or
833 // "aliased" (accessed by name from nested functions, or dynamically via nested
834 // "eval" or "with") and only accessible through the scope chain.
836 // All nested scopes are present on the "static scope chain". A nested scope
837 // that is a "with" scope will be present on the scope chain at run-time as
838 // well. A block scope may or may not have a corresponding link on the run-time
839 // scope chain; if no variable declared in the block scope is "aliased", then no
840 // scope chain node is allocated.
842 // To help debuggers, the bytecode emitter arranges to record the PC ranges
843 // comprehended by a nested scope, and ultimately attach them to the JSScript.
844 // An element in the "block scope array" specifies the PC range, and links to a
845 // NestedScopeObject in the object list of the script. That scope object is
846 // linked to the previous link in the static scope chain, if any. The static
847 // scope chain at any pre-retire PC can be retrieved using
848 // JSScript::getStaticScope(jsbytecode* pc).
850 // Block scopes store their locals in the fixed part of a stack frame, after the
851 // "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
852 // binding that occurs in a function (as opposed to a script or in eval code).
853 // Only functions have fixed var bindings.
855 // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
856 // block scope, even if the block has no aliased locals. This allows
857 // DebugScopes to invalidate any association between a debugger scope object,
858 // which can proxy access to unaliased stack locals, and the actual live frame.
859 // In normal, non-debug mode, this opcode does not cause any baseline code to be
860 // emitted.
862 // Enter a nested scope with EnterNestedScope. It will emit
863 // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
864 // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks,
865 // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
866 // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE
867 // object, and pass that same object to the corresponding LeaveNestedScope. If
868 // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for
869 // with scopes pass STMT_WITH.
871 static bool
872 EnterNestedScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmt, ObjectBox* objbox,
873 StmtType stmtType)
875 Rooted<NestedScopeObject*> scopeObj(cx, &objbox->object->as<NestedScopeObject>());
876 uint32_t scopeObjectIndex = bce->objectList.add(objbox);
878 switch (stmtType) {
879 case STMT_BLOCK: {
880 Rooted<StaticBlockObject*> blockObj(cx, &scopeObj->as<StaticBlockObject>());
882 ComputeLocalOffset(cx, bce, blockObj);
884 if (!ComputeAliasedSlots(cx, bce, blockObj))
885 return false;
887 if (blockObj->needsClone()) {
888 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_PUSHBLOCKSCOPE, bce))
889 return false;
891 break;
893 case STMT_WITH:
894 JS_ASSERT(scopeObj->is<StaticWithObject>());
895 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_ENTERWITH, bce))
896 return false;
897 break;
898 default:
899 MOZ_CRASH("Unexpected scope statement");
902 uint32_t parent = BlockScopeNote::NoBlockScopeIndex;
903 if (StmtInfoBCE* stmt = bce->topScopeStmt) {
904 for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {}
905 parent = stmt->blockScopeIndex;
908 stmt->blockScopeIndex = bce->blockScopeList.length();
909 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent))
910 return false;
912 PushStatementBCE(bce, stmt, stmtType, bce->offset());
913 scopeObj->initEnclosingNestedScope(EnclosingStaticScope(bce));
914 FinishPushNestedScope(bce, stmt, *scopeObj);
915 JS_ASSERT(stmt->isNestedScope);
916 stmt->isBlockScope = (stmtType == STMT_BLOCK);
918 return true;
921 // Patches |breaks| and |continues| unless the top statement info record
922 // represents a try-catch-finally suite. May fail if a jump offset overflows.
923 static bool
924 PopStatementBCE(ExclusiveContext* cx, BytecodeEmitter* bce)
926 StmtInfoBCE* stmt = bce->topStmt;
927 if (!stmt->isTrying() &&
928 (!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) ||
929 !BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO)))
931 return false;
934 FinishPopStatement(bce);
935 return true;
938 static bool
939 LeaveNestedScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmt)
941 JS_ASSERT(stmt == bce->topStmt);
942 JS_ASSERT(stmt->isNestedScope);
943 JS_ASSERT(stmt->isBlockScope == !(stmt->type == STMT_WITH));
944 uint32_t blockScopeIndex = stmt->blockScopeIndex;
946 #ifdef DEBUG
947 JS_ASSERT(bce->blockScopeList.list[blockScopeIndex].length == 0);
948 uint32_t blockObjIndex = bce->blockScopeList.list[blockScopeIndex].index;
949 ObjectBox* blockObjBox = bce->objectList.find(blockObjIndex);
950 NestedScopeObject* staticScope = &blockObjBox->object->as<NestedScopeObject>();
951 JS_ASSERT(stmt->staticScope == staticScope);
952 JS_ASSERT(staticScope == bce->staticScope);
953 JS_ASSERT_IF(!stmt->isBlockScope, staticScope->is<StaticWithObject>());
954 #endif
956 if (!PopStatementBCE(cx, bce))
957 return false;
959 if (Emit1(cx, bce, stmt->isBlockScope ? JSOP_DEBUGLEAVEBLOCK : JSOP_LEAVEWITH) < 0)
960 return false;
962 bce->blockScopeList.recordEnd(blockScopeIndex, bce->offset());
964 if (stmt->isBlockScope && stmt->staticScope->as<StaticBlockObject>().needsClone()) {
965 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0)
966 return false;
969 return true;
972 static bool
973 EmitIndex32(ExclusiveContext* cx, JSOp op, uint32_t index, BytecodeEmitter* bce)
975 const size_t len = 1 + UINT32_INDEX_LEN;
976 JS_ASSERT(len == size_t(js_CodeSpec[op].length));
977 ptrdiff_t offset = EmitCheck(cx, bce, len);
978 if (offset < 0)
979 return false;
981 jsbytecode* code = bce->code(offset);
982 code[0] = jsbytecode(op);
983 SET_UINT32_INDEX(code, index);
984 UpdateDepth(cx, bce, offset);
985 CheckTypeSet(cx, bce, op);
986 return true;
989 static bool
990 EmitIndexOp(ExclusiveContext* cx, JSOp op, uint32_t index, BytecodeEmitter* bce)
992 const size_t len = js_CodeSpec[op].length;
993 JS_ASSERT(len >= 1 + UINT32_INDEX_LEN);
994 ptrdiff_t offset = EmitCheck(cx, bce, len);
995 if (offset < 0)
996 return false;
998 jsbytecode* code = bce->code(offset);
999 code[0] = jsbytecode(op);
1000 SET_UINT32_INDEX(code, index);
1001 UpdateDepth(cx, bce, offset);
1002 CheckTypeSet(cx, bce, op);
1003 return true;
1006 static bool
1007 EmitAtomOp(ExclusiveContext* cx, JSAtom* atom, JSOp op, BytecodeEmitter* bce)
1009 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1011 if (op == JSOP_GETPROP && atom == cx->names().length) {
1012 /* Specialize length accesses for the interpreter. */
1013 op = JSOP_LENGTH;
1016 jsatomid index;
1017 if (!bce->makeAtomIndex(atom, &index))
1018 return false;
1020 return EmitIndexOp(cx, op, index, bce);
1023 static bool
1024 EmitAtomOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
1026 JS_ASSERT(pn->pn_atom != nullptr);
1027 return EmitAtomOp(cx, pn->pn_atom, op, bce);
1030 static bool
1031 EmitInternedObjectOp(ExclusiveContext* cx, uint32_t index, JSOp op, BytecodeEmitter* bce)
1033 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1034 JS_ASSERT(index < bce->objectList.length);
1035 return EmitIndex32(cx, op, index, bce);
1038 static bool
1039 EmitObjectOp(ExclusiveContext* cx, ObjectBox* objbox, JSOp op, BytecodeEmitter* bce)
1041 return EmitInternedObjectOp(cx, bce->objectList.add(objbox), op, bce);
1044 static bool
1045 EmitObjectPairOp(ExclusiveContext* cx, ObjectBox* objbox1, ObjectBox* objbox2, JSOp op,
1046 BytecodeEmitter* bce)
1048 uint32_t index = bce->objectList.add(objbox1);
1049 bce->objectList.add(objbox2);
1050 return EmitInternedObjectOp(cx, index, op, bce);
1053 static bool
1054 EmitRegExp(ExclusiveContext* cx, uint32_t index, BytecodeEmitter* bce)
1056 return EmitIndex32(cx, JSOP_REGEXP, index, bce);
1060 * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are
1061 * not used to unconditionally emit JSOP_GETLOCAL. Variable access should
1062 * instead be emitted using EmitVarOp. In special cases, when the caller
1063 * definitely knows that a given local slot is unaliased, this function may be
1064 * used as a non-asserting version of EMIT_UINT16_IMM_OP.
1066 static bool
1067 EmitUnaliasedVarOp(ExclusiveContext* cx, JSOp op, uint32_t slot, BytecodeEmitter* bce)
1069 JS_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
1071 if (IsLocalOp(op)) {
1072 ptrdiff_t off = EmitN(cx, bce, op, LOCALNO_LEN);
1073 if (off < 0)
1074 return false;
1076 SET_LOCALNO(bce->code(off), slot);
1077 return true;
1080 JS_ASSERT(IsArgOp(op));
1081 ptrdiff_t off = EmitN(cx, bce, op, ARGNO_LEN);
1082 if (off < 0)
1083 return false;
1085 SET_ARGNO(bce->code(off), slot);
1086 return true;
1089 static bool
1090 EmitAliasedVarOp(ExclusiveContext* cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter* bce)
1092 JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
1094 unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN;
1095 JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
1097 ptrdiff_t off = EmitN(cx, bce, op, n);
1098 if (off < 0)
1099 return false;
1101 jsbytecode* pc = bce->code(off);
1102 SET_SCOPECOORD_HOPS(pc, sc.hops());
1103 pc += SCOPECOORD_HOPS_LEN;
1104 SET_SCOPECOORD_SLOT(pc, sc.slot());
1105 pc += SCOPECOORD_SLOT_LEN;
1106 CheckTypeSet(cx, bce, op);
1107 return true;
1110 // Compute the number of nested scope objects that will actually be on the scope
1111 // chain at runtime, given the BCE's current staticScope.
1112 static unsigned
1113 DynamicNestedScopeDepth(BytecodeEmitter* bce)
1115 unsigned depth = 0;
1116 for (NestedScopeObject* b = bce->staticScope; b; b = b->enclosingNestedScope()) {
1117 if (!b->is<StaticBlockObject>() || b->as<StaticBlockObject>().needsClone())
1118 ++depth;
1121 return depth;
1124 static bool
1125 LookupAliasedName(HandleScript script, PropertyName* name, uint32_t* pslot)
1128 * Beware: BindingIter may contain more than one Binding for a given name
1129 * (in the case of |function f(x,x) {}|) but only one will be aliased.
1131 uint32_t slot = CallObject::RESERVED_SLOTS;
1132 for (BindingIter bi(script); !bi.done(); bi++) {
1133 if (bi->aliased()) {
1134 if (bi->name() == name) {
1135 *pslot = slot;
1136 return true;
1138 slot++;
1141 return false;
1144 static bool
1145 LookupAliasedNameSlot(HandleScript script, PropertyName* name, ScopeCoordinate* sc)
1147 uint32_t slot;
1148 if (!LookupAliasedName(script, name, &slot))
1149 return false;
1151 sc->setSlot(slot);
1152 return true;
1156 * Use this function instead of assigning directly to 'hops' to guard for
1157 * uint8_t overflows.
1159 static bool
1160 AssignHops(BytecodeEmitter* bce, ParseNode* pn, unsigned src, ScopeCoordinate* dst)
1162 if (src > UINT8_MAX) {
1163 bce->reportError(pn, JSMSG_TOO_DEEP, js_function_str);
1164 return false;
1167 dst->setHops(src);
1168 return true;
1171 static bool
1172 EmitAliasedVarOp(ExclusiveContext* cx, JSOp op, ParseNode* pn, BytecodeEmitter* bce)
1175 * While pn->pn_cookie tells us how many function scopes are between the use and the def this
1176 * is not the same as how many hops up the dynamic scope chain are needed. In particular:
1177 * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic
1178 * scope object).
1179 * - a heavyweight named function scope contributes an extra scope to the scope chain (a
1180 * DeclEnvObject that holds just the name).
1181 * - all the intervening let/catch blocks must be counted.
1183 unsigned skippedScopes = 0;
1184 BytecodeEmitter* bceOfDef = bce;
1185 if (pn->isUsed()) {
1187 * As explained in BindNameToSlot, the 'level' of a use indicates how
1188 * many function scopes (i.e., BytecodeEmitters) to skip to find the
1189 * enclosing function scope of the definition being accessed.
1191 for (unsigned i = pn->pn_cookie.level(); i; i--) {
1192 skippedScopes += DynamicNestedScopeDepth(bceOfDef);
1193 FunctionBox* funbox = bceOfDef->sc->asFunctionBox();
1194 if (funbox->isHeavyweight()) {
1195 skippedScopes++;
1196 if (funbox->function()->isNamedLambda())
1197 skippedScopes++;
1199 bceOfDef = bceOfDef->parent;
1201 } else {
1202 JS_ASSERT(pn->isDefn());
1203 JS_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel());
1207 * The final part of the skippedScopes computation depends on the type of
1208 * variable. An arg or local variable is at the outer scope of a function
1209 * and so includes the full DynamicNestedScopeDepth. A let/catch-binding
1210 * requires a search of the block chain to see how many (dynamic) block
1211 * objects to skip.
1213 ScopeCoordinate sc;
1214 if (IsArgOp(pn->getOp())) {
1215 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc))
1216 return false;
1217 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc));
1218 } else {
1219 JS_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION));
1220 uint32_t local = pn->pn_cookie.slot();
1221 if (local < bceOfDef->script->bindings.numVars()) {
1222 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc))
1223 return false;
1224 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc));
1225 } else {
1226 JS_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals());
1227 JS_ASSERT(bceOfDef->staticScope->is<StaticBlockObject>());
1228 Rooted<StaticBlockObject*> b(cx, &bceOfDef->staticScope->as<StaticBlockObject>());
1229 while (local < b->localOffset()) {
1230 if (b->needsClone())
1231 skippedScopes++;
1232 b = &b->enclosingNestedScope()->as<StaticBlockObject>();
1234 if (!AssignHops(bce, pn, skippedScopes, &sc))
1235 return false;
1236 sc.setSlot(b->localIndexToSlot(local));
1240 return EmitAliasedVarOp(cx, op, sc, bce);
1243 static bool
1244 EmitVarOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
1246 JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME));
1247 JS_ASSERT(!pn->pn_cookie.isFree());
1249 if (IsAliasedVarOp(op)) {
1250 ScopeCoordinate sc;
1251 sc.setHops(pn->pn_cookie.level());
1252 sc.setSlot(pn->pn_cookie.slot());
1253 return EmitAliasedVarOp(cx, op, sc, bce);
1256 JS_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op));
1258 if (!bce->isAliasedName(pn)) {
1259 JS_ASSERT(pn->isUsed() || pn->isDefn());
1260 JS_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0);
1261 JS_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel());
1262 return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), bce);
1265 switch (op) {
1266 case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break;
1267 case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break;
1268 default: MOZ_CRASH("unexpected var op");
1271 return EmitAliasedVarOp(cx, op, pn, bce);
1274 static JSOp
1275 GetIncDecInfo(ParseNodeKind kind, bool* post)
1277 JS_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT ||
1278 kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT);
1279 *post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT;
1280 return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB;
1283 static bool
1284 EmitVarIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
1286 JSOp op = pn->pn_kid->getOp();
1287 JS_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op));
1288 JS_ASSERT(pn->pn_kid->isKind(PNK_NAME));
1289 JS_ASSERT(!pn->pn_kid->pn_cookie.isFree());
1291 bool post;
1292 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
1294 JSOp getOp, setOp;
1295 if (IsLocalOp(op)) {
1296 getOp = JSOP_GETLOCAL;
1297 setOp = JSOP_SETLOCAL;
1298 } else if (IsArgOp(op)) {
1299 getOp = JSOP_GETARG;
1300 setOp = JSOP_SETARG;
1301 } else {
1302 getOp = JSOP_GETALIASEDVAR;
1303 setOp = JSOP_SETALIASEDVAR;
1306 if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V
1307 return false;
1308 if (Emit1(cx, bce, JSOP_POS) < 0) // N
1309 return false;
1310 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N
1311 return false;
1312 if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1
1313 return false;
1314 if (Emit1(cx, bce, binop) < 0) // N? N+1
1315 return false;
1316 if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1
1317 return false;
1318 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
1319 return false;
1321 return true;
1324 bool
1325 BytecodeEmitter::isAliasedName(ParseNode* pn)
1327 Definition* dn = pn->resolve();
1328 JS_ASSERT(dn->isDefn());
1329 JS_ASSERT(!dn->isPlaceholder());
1330 JS_ASSERT(dn->isBound());
1332 /* If dn is in an enclosing function, it is definitely aliased. */
1333 if (dn->pn_cookie.level() != script->staticLevel())
1334 return true;
1336 switch (dn->kind()) {
1337 case Definition::LET:
1339 * There are two ways to alias a let variable: nested functions and
1340 * dynamic scope operations. (This is overly conservative since the
1341 * bindingsAccessedDynamically flag, checked by allLocalsAliased, is
1342 * function-wide.)
1344 * In addition all locals in generators are marked as aliased, to ensure
1345 * that they are allocated on scope chains instead of on the stack. See
1346 * the definition of SharedContext::allLocalsAliased.
1348 return dn->isClosed() || sc->allLocalsAliased();
1349 case Definition::ARG:
1351 * Consult the bindings, since they already record aliasing. We might
1352 * be tempted to use the same definition as VAR/CONST/LET, but there is
1353 * a problem caused by duplicate arguments: only the last argument with
1354 * a given name is aliased. This is necessary to avoid generating a
1355 * shape for the call object with with more than one name for a given
1356 * slot (which violates internal engine invariants). All this means that
1357 * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
1358 * mark both parameters in function(x,x) as aliased.
1360 return script->formalIsAliased(pn->pn_cookie.slot());
1361 case Definition::VAR:
1362 case Definition::CONST:
1363 JS_ASSERT_IF(sc->allLocalsAliased(), script->varIsAliased(pn->pn_cookie.slot()));
1364 return script->varIsAliased(pn->pn_cookie.slot());
1365 case Definition::PLACEHOLDER:
1366 case Definition::NAMED_LAMBDA:
1367 case Definition::MISSING:
1368 MOZ_CRASH("unexpected dn->kind");
1370 return false;
1374 * Try to convert a *NAME op with a free name to a more specialized GNAME,
1375 * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
1376 * Return true if a conversion was made.
1378 static bool
1379 TryConvertFreeName(BytecodeEmitter* bce, ParseNode* pn)
1382 * In self-hosting mode, JSOP_*NAME is unconditionally converted to
1383 * JSOP_*INTRINSIC. This causes lookups to be redirected to the special
1384 * intrinsics holder in the global object, into which any missing values are
1385 * cloned lazily upon first access.
1387 if (bce->emitterMode == BytecodeEmitter::SelfHosting) {
1388 JSOp op;
1389 switch (pn->getOp()) {
1390 case JSOP_NAME: op = JSOP_GETINTRINSIC; break;
1391 case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break;
1392 /* Other *NAME ops aren't (yet) supported in self-hosted code. */
1393 default: MOZ_CRASH("intrinsic");
1395 pn->setOp(op);
1396 return true;
1400 * When parsing inner functions lazily, parse nodes for outer functions no
1401 * longer exist and only the function's scope chain is available for
1402 * resolving upvar accesses within the inner function.
1404 if (bce->emitterMode == BytecodeEmitter::LazyFunction) {
1405 // The only statements within a lazy function which can push lexical
1406 // scopes are try/catch blocks. Use generic ops in this case.
1407 for (StmtInfoBCE* stmt = bce->topStmt; stmt; stmt = stmt->down) {
1408 if (stmt->type == STMT_CATCH)
1409 return true;
1412 size_t hops = 0;
1413 FunctionBox* funbox = bce->sc->asFunctionBox();
1414 if (funbox->hasExtensibleScope())
1415 return false;
1416 if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom)
1417 return false;
1418 if (funbox->isHeavyweight()) {
1419 hops++;
1420 if (funbox->function()->isNamedLambda())
1421 hops++;
1423 if (bce->script->directlyInsideEval())
1424 return false;
1425 RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope());
1426 for (StaticScopeIter<CanGC> ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) {
1427 if (ssi.type() != StaticScopeIter<CanGC>::FUNCTION) {
1428 if (ssi.type() == StaticScopeIter<CanGC>::BLOCK) {
1429 // Use generic ops if a catch block is encountered.
1430 return false;
1432 if (ssi.hasDynamicScopeObject())
1433 hops++;
1434 continue;
1436 RootedScript script(bce->sc->context, ssi.funScript());
1437 if (script->functionNonDelazifying()->atom() == pn->pn_atom)
1438 return false;
1439 if (ssi.hasDynamicScopeObject()) {
1440 uint32_t slot;
1441 if (LookupAliasedName(script, pn->pn_atom->asPropertyName(), &slot)) {
1442 JSOp op;
1443 switch (pn->getOp()) {
1444 case JSOP_NAME: op = JSOP_GETALIASEDVAR; break;
1445 case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break;
1446 default: return false;
1448 pn->setOp(op);
1449 JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->parser->tokenStream, hops, slot));
1450 return true;
1452 hops++;
1455 if (script->funHasExtensibleScope() || script->directlyInsideEval())
1456 return false;
1460 // Unbound names aren't recognizable global-property references if the
1461 // script isn't running against its global object.
1462 if (!bce->script->compileAndGo() || !bce->hasGlobalScope)
1463 return false;
1465 // Deoptimized names also aren't necessarily globals.
1466 if (pn->isDeoptimized())
1467 return false;
1469 if (bce->sc->isFunctionBox()) {
1470 // Unbound names in function code may not be globals if new locals can
1471 // be added to this function (or an enclosing one) to alias a global
1472 // reference.
1473 FunctionBox* funbox = bce->sc->asFunctionBox();
1474 if (funbox->mightAliasLocals())
1475 return false;
1478 // If this is eval code, being evaluated inside strict mode eval code,
1479 // an "unbound" name might be a binding local to that outer eval:
1481 // var x = "GLOBAL";
1482 // eval('"use strict"; ' +
1483 // 'var x; ' +
1484 // 'eval("print(x)");'); // "undefined", not "GLOBAL"
1486 // Given the enclosing eval code's strictness and its bindings (neither is
1487 // readily available now), we could exactly check global-ness, but it's not
1488 // worth the trouble for doubly-nested eval code. So we conservatively
1489 // approximate. If the outer eval code is strict, then this eval code will
1490 // be: thus, don't optimize if we're compiling strict code inside an eval.
1491 if (bce->insideEval && bce->sc->strict)
1492 return false;
1494 JSOp op;
1495 switch (pn->getOp()) {
1496 case JSOP_NAME: op = JSOP_GETGNAME; break;
1497 case JSOP_SETNAME: op = JSOP_SETGNAME; break;
1498 case JSOP_SETCONST:
1499 // Not supported.
1500 return false;
1501 default: MOZ_CRASH("gname");
1503 pn->setOp(op);
1504 return true;
1508 * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
1509 * loads and stores, given the compile-time information in bce and a PNK_NAME
1510 * node pn. It returns false on error, true on success.
1512 * The caller can test pn->pn_cookie.isFree() to tell whether optimization
1513 * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If
1514 * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
1515 * been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1516 * pn->pn_op was modified, if this function finds an argument or local variable
1517 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1518 * successful return.
1520 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1521 * to update the special cases in EmitFor (for-in) and EmitAssignment (= and
1522 * op=, e.g. +=).
1524 static bool
1525 BindNameToSlotHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
1527 JS_ASSERT(pn->isKind(PNK_NAME));
1529 JS_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound());
1531 /* Don't attempt if 'pn' is already bound or deoptimized or a function. */
1532 if (pn->isBound() || pn->isDeoptimized())
1533 return true;
1535 /* JSOP_CALLEE is pre-bound by definition. */
1536 JSOp op = pn->getOp();
1537 JS_ASSERT(op != JSOP_CALLEE);
1538 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1541 * The parser already linked name uses to definitions when (where not
1542 * prevented by non-lexical constructs like 'with' and 'eval').
1544 Definition* dn;
1545 if (pn->isUsed()) {
1546 JS_ASSERT(pn->pn_cookie.isFree());
1547 dn = pn->pn_lexdef;
1548 JS_ASSERT(dn->isDefn());
1549 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1550 } else if (pn->isDefn()) {
1551 dn = (Definition*) pn;
1552 } else {
1553 return true;
1557 * Turn attempts to mutate const-declared bindings into get ops (for
1558 * pre-increment and pre-decrement ops, our caller will have to emit
1559 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1561 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1562 * bindings visible to the compiler are permanent in JS unless the
1563 * declaration originates at top level in eval code.
1565 switch (op) {
1566 case JSOP_NAME:
1567 case JSOP_SETCONST:
1568 break;
1569 default:
1570 if (pn->isConst()) {
1571 if (bce->sc->needStrictChecks()) {
1572 JSAutoByteString name;
1573 if (!AtomToPrintableString(cx, pn->pn_atom, &name) ||
1574 !bce->reportStrictModeError(pn, JSMSG_READ_ONLY, name.ptr()))
1576 return false;
1579 pn->setOp(op = JSOP_NAME);
1583 if (dn->pn_cookie.isFree()) {
1584 if (HandleScript caller = bce->evalCaller) {
1585 JS_ASSERT(bce->script->compileAndGo());
1588 * Don't generate upvars on the left side of a for loop. See
1589 * bug 470758.
1591 if (bce->emittingForInit)
1592 return true;
1595 * If this is an eval in the global scope, then unbound variables
1596 * must be globals, so try to use GNAME ops.
1598 if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) {
1599 pn->pn_dflags |= PND_BOUND;
1600 return true;
1604 * Out of tricks, so we must rely on PICs to optimize named
1605 * accesses from direct eval called from function code.
1607 return true;
1610 /* Optimize accesses to undeclared globals. */
1611 if (!TryConvertFreeName(bce, pn))
1612 return true;
1614 pn->pn_dflags |= PND_BOUND;
1615 return true;
1619 * At this point, we are only dealing with uses that have already been
1620 * bound to definitions via pn_lexdef. The rest of this routine converts
1621 * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG
1622 * op. This requires setting the node's pn_cookie with a pair (level, slot)
1623 * where 'level' is the number of function scopes between the use and the
1624 * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL
1625 * op. For example, in this code:
1627 * function(a,b,x) { return x }
1628 * function(y) { function() { return y } }
1630 * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0).
1632 JS_ASSERT(!pn->isDefn());
1633 JS_ASSERT(pn->isUsed());
1634 JS_ASSERT(pn->pn_lexdef);
1635 JS_ASSERT(pn->pn_cookie.isFree());
1638 * We are compiling a function body and may be able to optimize name
1639 * to stack slot. Look for an argument or variable in the function and
1640 * rewrite pn_op and update pn accordingly.
1642 switch (dn->kind()) {
1643 case Definition::ARG:
1644 switch (op) {
1645 case JSOP_NAME: op = JSOP_GETARG; break;
1646 case JSOP_SETNAME: op = JSOP_SETARG; break;
1647 default: MOZ_CRASH("arg");
1649 JS_ASSERT(!pn->isConst());
1650 break;
1652 case Definition::VAR:
1653 case Definition::CONST:
1654 case Definition::LET:
1655 switch (op) {
1656 case JSOP_NAME: op = JSOP_GETLOCAL; break;
1657 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
1658 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
1659 default: MOZ_CRASH("local");
1661 break;
1663 case Definition::NAMED_LAMBDA: {
1664 JS_ASSERT(dn->isOp(JSOP_CALLEE));
1665 JS_ASSERT(op != JSOP_CALLEE);
1668 * Currently, the ALIASEDVAR ops do not support accessing the
1669 * callee of a DeclEnvObject, so use NAME.
1671 if (dn->pn_cookie.level() != bce->script->staticLevel())
1672 return true;
1674 DebugOnly<JSFunction*> fun = bce->sc->asFunctionBox()->function();
1675 JS_ASSERT(fun->isLambda());
1676 JS_ASSERT(pn->pn_atom == fun->atom());
1679 * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to
1680 * address two cases: a new binding introduced by eval, and
1681 * assignment to the name in strict mode.
1683 * var fun = (function f(s) { eval(s); return f; });
1684 * assertEq(fun("var f = 42"), 42);
1686 * ECMAScript specifies that a function expression's name is bound
1687 * in a lexical environment distinct from that used to bind its
1688 * named parameters, the arguments object, and its variables. The
1689 * new binding for "var f = 42" shadows the binding for the
1690 * function itself, so the name of the function will not refer to
1691 * the function.
1693 * (function f() { "use strict"; f = 12; })();
1695 * Outside strict mode, assignment to a function expression's name
1696 * has no effect. But in strict mode, this attempt to mutate an
1697 * immutable binding must throw a TypeError. We implement this by
1698 * not optimizing such assignments and by marking such functions as
1699 * heavyweight, ensuring that the function name is represented in
1700 * the scope chain so that assignment will throw a TypeError.
1702 if (!bce->sc->asFunctionBox()->isHeavyweight()) {
1703 op = JSOP_CALLEE;
1704 pn->pn_dflags |= PND_CONST;
1707 pn->setOp(op);
1708 pn->pn_dflags |= PND_BOUND;
1709 return true;
1712 case Definition::PLACEHOLDER:
1713 return true;
1715 case Definition::MISSING:
1716 MOZ_CRASH("missing");
1720 * The difference between the current static level and the static level of
1721 * the definition is the number of function scopes between the current
1722 * scope and dn's scope.
1724 unsigned skip = bce->script->staticLevel() - dn->pn_cookie.level();
1725 JS_ASSERT_IF(skip, dn->isClosed());
1728 * Explicitly disallow accessing var/let bindings in global scope from
1729 * nested functions. The reason for this limitation is that, since the
1730 * global script is not included in the static scope chain (1. because it
1731 * has no object to stand in the static scope chain, 2. to minimize memory
1732 * bloat where a single live function keeps its whole global script
1733 * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
1734 * associated types::TypeSet.
1736 if (skip) {
1737 BytecodeEmitter* bceSkipped = bce;
1738 for (unsigned i = 0; i < skip; i++)
1739 bceSkipped = bceSkipped->parent;
1740 if (!bceSkipped->sc->isFunctionBox())
1741 return true;
1744 JS_ASSERT(!pn->isOp(op));
1745 pn->setOp(op);
1746 if (!pn->pn_cookie.set(bce->parser->tokenStream, skip, dn->pn_cookie.slot()))
1747 return false;
1749 pn->pn_dflags |= PND_BOUND;
1750 return true;
1754 * Attempts to bind the name, then checks that no dynamic scope lookup ops are
1755 * emitted in self-hosting mode. NAME ops do lookups off current scope chain,
1756 * and we do not want to allow self-hosted code to use the dynamic scope.
1758 static bool
1759 BindNameToSlot(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
1761 if (!BindNameToSlotHelper(cx, bce, pn))
1762 return false;
1764 if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) {
1765 bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME);
1766 return false;
1769 return true;
1773 * If pn contains a useful expression, return true with *answer set to true.
1774 * If pn contains a useless expression, return true with *answer set to false.
1775 * Return false on error.
1777 * The caller should initialize *answer to false and invoke this function on
1778 * an expression statement or similar subtree to decide whether the tree could
1779 * produce code that has any side effects. For an expression statement, we
1780 * define useless code as code with no side effects, because the main effect,
1781 * the value left on the stack after the code executes, will be discarded by a
1782 * pop bytecode.
1784 static bool
1785 CheckSideEffects(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool* answer)
1787 if (!pn || *answer)
1788 return true;
1790 switch (pn->getArity()) {
1791 case PN_CODE:
1793 * A named function, contrary to ES3, is no longer useful, because we
1794 * bind its name lexically (using JSOP_CALLEE) instead of creating an
1795 * Object instance and binding a readonly, permanent property in it
1796 * (the object and binding can be detected and hijacked or captured).
1797 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
1799 MOZ_ASSERT(*answer == false);
1800 return true;
1802 case PN_LIST:
1803 if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) ||
1804 pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) {
1806 * Non-operators along with ||, &&, ===, and !== never invoke
1807 * toString or valueOf.
1809 bool ok = true;
1810 for (ParseNode* pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
1811 ok &= CheckSideEffects(cx, bce, pn2, answer);
1812 return ok;
1815 if (pn->isKind(PNK_GENEXP)) {
1816 /* Generator-expressions are harmless if the result is ignored. */
1817 MOZ_ASSERT(*answer == false);
1818 return true;
1822 * All invocation operations (construct: PNK_NEW, call: PNK_CALL)
1823 * are presumed to be useful, because they may have side effects
1824 * even if their main effect (their return value) is discarded.
1826 * PNK_ELEM binary trees of 3+ nodes are flattened into lists to
1827 * avoid too much recursion. All such lists must be presumed to be
1828 * useful because each index operation could invoke a getter.
1830 * Likewise, array and object initialisers may call prototype
1831 * setters (the __defineSetter__ built-in, and writable __proto__
1832 * on Array.prototype create this hazard). Initialiser list nodes
1833 * have JSOP_NEWINIT in their pn_op.
1835 *answer = true;
1836 return true;
1838 case PN_TERNARY:
1839 return CheckSideEffects(cx, bce, pn->pn_kid1, answer) &&
1840 CheckSideEffects(cx, bce, pn->pn_kid2, answer) &&
1841 CheckSideEffects(cx, bce, pn->pn_kid3, answer);
1843 case PN_BINARY:
1844 case PN_BINARY_OBJ:
1845 if (pn->isAssignment()) {
1847 * Assignment is presumed to be useful, even if the next operation
1848 * is another assignment overwriting this one's ostensible effect,
1849 * because the left operand may be a property with a setter that
1850 * has side effects.
1852 * The only exception is assignment of a useless value to a const
1853 * declared in the function currently being compiled.
1855 ParseNode* pn2 = pn->pn_left;
1856 if (!pn2->isKind(PNK_NAME)) {
1857 *answer = true;
1858 } else {
1859 if (!BindNameToSlot(cx, bce, pn2))
1860 return false;
1861 if (!CheckSideEffects(cx, bce, pn->pn_right, answer))
1862 return false;
1863 if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst()))
1864 *answer = true;
1866 return true;
1869 if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) ||
1870 pn->isOp(JSOP_STRICTNE)) {
1872 * ||, &&, ===, and !== do not convert their operands via
1873 * toString or valueOf method calls.
1875 return CheckSideEffects(cx, bce, pn->pn_left, answer) &&
1876 CheckSideEffects(cx, bce, pn->pn_right, answer);
1880 * We can't easily prove that neither operand ever denotes an
1881 * object with a toString or valueOf method.
1883 *answer = true;
1884 return true;
1886 case PN_UNARY:
1887 switch (pn->getKind()) {
1888 case PNK_DELETE:
1890 ParseNode* pn2 = pn->pn_kid;
1891 switch (pn2->getKind()) {
1892 case PNK_NAME:
1893 if (!BindNameToSlot(cx, bce, pn2))
1894 return false;
1895 if (pn2->isConst()) {
1896 MOZ_ASSERT(*answer == false);
1897 return true;
1899 /* FALL THROUGH */
1900 case PNK_DOT:
1901 case PNK_CALL:
1902 case PNK_ELEM:
1903 /* All these delete addressing modes have effects too. */
1904 *answer = true;
1905 return true;
1906 default:
1907 return CheckSideEffects(cx, bce, pn2, answer);
1909 MOZ_CRASH("We have a returning default case");
1912 case PNK_TYPEOF:
1913 case PNK_VOID:
1914 case PNK_NOT:
1915 case PNK_BITNOT:
1916 if (pn->isOp(JSOP_NOT)) {
1917 /* ! does not convert its operand via toString or valueOf. */
1918 return CheckSideEffects(cx, bce, pn->pn_kid, answer);
1920 /* FALL THROUGH */
1922 default:
1924 * All of PNK_INC, PNK_DEC, PNK_THROW, PNK_YIELD, and PNK_YIELD_STAR
1925 * have direct effects. Of the remaining unary-arity node types, we
1926 * can't easily prove that the operand never denotes an object with
1927 * a toString or valueOf method.
1929 *answer = true;
1930 return true;
1932 MOZ_CRASH("We have a returning default case");
1934 case PN_NAME:
1936 * Take care to avoid trying to bind a label name (labels, both for
1937 * statements and property values in object initialisers, have pn_op
1938 * defaulted to JSOP_NOP).
1940 if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) {
1941 if (!BindNameToSlot(cx, bce, pn))
1942 return false;
1943 if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) {
1945 * Not a use of an unshadowed named function expression's given
1946 * name, so this expression could invoke a getter that has side
1947 * effects.
1949 *answer = true;
1952 if (pn->isKind(PNK_DOT)) {
1953 /* Dotted property references in general can call getters. */
1954 *answer = true;
1956 return CheckSideEffects(cx, bce, pn->maybeExpr(), answer);
1958 case PN_NULLARY:
1959 if (pn->isKind(PNK_DEBUGGER))
1960 *answer = true;
1961 return true;
1963 return true;
1966 bool
1967 BytecodeEmitter::isInLoop()
1969 for (StmtInfoBCE* stmt = topStmt; stmt; stmt = stmt->down) {
1970 if (stmt->isLoop())
1971 return true;
1973 return false;
1976 bool
1977 BytecodeEmitter::checkSingletonContext()
1979 if (!script->compileAndGo() || sc->isFunctionBox() || isInLoop())
1980 return false;
1981 hasSingletons = true;
1982 return true;
1985 bool
1986 BytecodeEmitter::needsImplicitThis()
1988 if (!script->compileAndGo())
1989 return true;
1991 if (sc->isFunctionBox()) {
1992 if (sc->asFunctionBox()->inWith)
1993 return true;
1994 } else {
1995 JSObject* scope = sc->asGlobalSharedContext()->scopeChain();
1996 while (scope) {
1997 if (scope->is<DynamicWithObject>())
1998 return true;
1999 scope = scope->enclosingScope();
2003 for (StmtInfoBCE* stmt = topStmt; stmt; stmt = stmt->down) {
2004 if (stmt->type == STMT_WITH)
2005 return true;
2007 return false;
2010 void
2011 BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext* cx)
2013 // Note: when parsing off thread the resulting scripts need to be handed to
2014 // the debugger after rejoining to the main thread.
2015 if (!cx->isJSContext())
2016 return;
2018 // Lazy scripts are never top level (despite always being invoked with a
2019 // nullptr parent), and so the hook should never be fired.
2020 if (emitterMode != LazyFunction && !parent) {
2021 GlobalObject* compileAndGoGlobal = nullptr;
2022 if (script->compileAndGo())
2023 compileAndGoGlobal = &script->global();
2024 Debugger::onNewScript(cx->asJSContext(), script, compileAndGoGlobal);
2028 inline TokenStream*
2029 BytecodeEmitter::tokenStream()
2031 return &parser->tokenStream;
2034 bool
2035 BytecodeEmitter::reportError(ParseNode* pn, unsigned errorNumber, ...)
2037 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2039 va_list args;
2040 va_start(args, errorNumber);
2041 bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR,
2042 errorNumber, args);
2043 va_end(args);
2044 return result;
2047 bool
2048 BytecodeEmitter::reportStrictWarning(ParseNode* pn, unsigned errorNumber, ...)
2050 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2052 va_list args;
2053 va_start(args, errorNumber);
2054 bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args);
2055 va_end(args);
2056 return result;
2059 bool
2060 BytecodeEmitter::reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...)
2062 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
2064 va_list args;
2065 va_start(args, errorNumber);
2066 bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict,
2067 errorNumber, args);
2068 va_end(args);
2069 return result;
2072 static bool
2073 EmitNewInit(ExclusiveContext* cx, BytecodeEmitter* bce, JSProtoKey key)
2075 const size_t len = 1 + UINT32_INDEX_LEN;
2076 ptrdiff_t offset = EmitCheck(cx, bce, len);
2077 if (offset < 0)
2078 return false;
2080 jsbytecode* code = bce->code(offset);
2081 code[0] = JSOP_NEWINIT;
2082 code[1] = jsbytecode(key);
2083 code[2] = 0;
2084 code[3] = 0;
2085 code[4] = 0;
2086 UpdateDepth(cx, bce, offset);
2087 CheckTypeSet(cx, bce, JSOP_NEWINIT);
2088 return true;
2091 static bool
2092 IteratorResultShape(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned* shape)
2094 JS_ASSERT(bce->script->compileAndGo());
2096 RootedObject obj(cx);
2097 gc::AllocKind kind = GuessObjectGCKind(2);
2098 obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind);
2099 if (!obj)
2100 return false;
2102 Rooted<jsid> value_id(cx, AtomToId(cx->names().value));
2103 Rooted<jsid> done_id(cx, AtomToId(cx->names().done));
2104 if (!DefineNativeProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr,
2105 JSPROP_ENUMERATE))
2106 return false;
2107 if (!DefineNativeProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr,
2108 JSPROP_ENUMERATE))
2109 return false;
2111 ObjectBox* objbox = bce->parser->newObjectBox(obj);
2112 if (!objbox)
2113 return false;
2115 *shape = bce->objectList.add(objbox);
2117 return true;
2120 static bool
2121 EmitPrepareIteratorResult(ExclusiveContext* cx, BytecodeEmitter* bce)
2123 if (bce->script->compileAndGo()) {
2124 unsigned shape;
2125 if (!IteratorResultShape(cx, bce, &shape))
2126 return false;
2127 return EmitIndex32(cx, JSOP_NEWOBJECT, shape, bce);
2130 return EmitNewInit(cx, bce, JSProto_Object);
2133 static bool
2134 EmitFinishIteratorResult(ExclusiveContext* cx, BytecodeEmitter* bce, bool done)
2136 jsatomid value_id;
2137 if (!bce->makeAtomIndex(cx->names().value, &value_id))
2138 return UINT_MAX;
2139 jsatomid done_id;
2140 if (!bce->makeAtomIndex(cx->names().done, &done_id))
2141 return UINT_MAX;
2143 if (!EmitIndex32(cx, JSOP_INITPROP, value_id, bce))
2144 return false;
2145 if (Emit1(cx, bce, done ? JSOP_TRUE : JSOP_FALSE) < 0)
2146 return false;
2147 if (!EmitIndex32(cx, JSOP_INITPROP, done_id, bce))
2148 return false;
2149 if (Emit1(cx, bce, JSOP_ENDINIT) < 0)
2150 return false;
2151 return true;
2154 static bool
2155 EmitNameOp(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool callContext)
2157 if (!BindNameToSlot(cx, bce, pn))
2158 return false;
2160 JSOp op = pn->getOp();
2162 if (op == JSOP_CALLEE) {
2163 if (Emit1(cx, bce, op) < 0)
2164 return false;
2165 } else {
2166 if (!pn->pn_cookie.isFree()) {
2167 JS_ASSERT(JOF_OPTYPE(op) != JOF_ATOM);
2168 if (!EmitVarOp(cx, pn, op, bce))
2169 return false;
2170 } else {
2171 if (!EmitAtomOp(cx, pn, op, bce))
2172 return false;
2176 /* Need to provide |this| value for call */
2177 if (callContext) {
2178 if (op == JSOP_NAME && bce->needsImplicitThis()) {
2179 if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce))
2180 return false;
2181 } else {
2182 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
2183 return false;
2187 return true;
2190 static bool
2191 EmitPropLHS(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2193 JS_ASSERT(pn->isKind(PNK_DOT));
2194 ParseNode* pn2 = pn->maybeExpr();
2197 * If the object operand is also a dotted property reference, reverse the
2198 * list linked via pn_expr temporarily so we can iterate over it from the
2199 * bottom up (reversing again as we go), to avoid excessive recursion.
2201 if (pn2->isKind(PNK_DOT)) {
2202 ParseNode* pndot = pn2;
2203 ParseNode* pnup = nullptr, *pndown;
2204 ptrdiff_t top = bce->offset();
2205 for (;;) {
2206 /* Reverse pndot->pn_expr to point up, not down. */
2207 pndot->pn_offset = top;
2208 JS_ASSERT(!pndot->isUsed());
2209 pndown = pndot->pn_expr;
2210 pndot->pn_expr = pnup;
2211 if (!pndown->isKind(PNK_DOT))
2212 break;
2213 pnup = pndot;
2214 pndot = pndown;
2217 /* pndown is a primary expression, not a dotted property reference. */
2218 if (!EmitTree(cx, bce, pndown))
2219 return false;
2221 do {
2222 /* Walk back up the list, emitting annotated name ops. */
2223 if (!EmitAtomOp(cx, pndot, JSOP_GETPROP, bce))
2224 return false;
2226 /* Reverse the pn_expr link again. */
2227 pnup = pndot->pn_expr;
2228 pndot->pn_expr = pndown;
2229 pndown = pndot;
2230 } while ((pndot = pnup) != nullptr);
2231 return true;
2234 // The non-optimized case.
2235 return EmitTree(cx, bce, pn2);
2238 static bool
2239 EmitPropOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2241 JS_ASSERT(pn->isArity(PN_NAME));
2243 if (!EmitPropLHS(cx, pn, op, bce))
2244 return false;
2246 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0)
2247 return false;
2249 if (!EmitAtomOp(cx, pn, op, bce))
2250 return false;
2252 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0)
2253 return false;
2255 return true;
2258 static bool
2259 EmitPropIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2261 JS_ASSERT(pn->pn_kid->getKind() == PNK_DOT);
2263 bool post;
2264 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2266 JSOp get = JSOP_GETPROP;
2267 if (!EmitPropLHS(cx, pn->pn_kid, get, bce)) // OBJ
2268 return false;
2269 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
2270 return false;
2271 if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V
2272 return false;
2273 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
2274 return false;
2275 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
2276 return false;
2277 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
2278 return false;
2279 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
2280 return false;
2282 if (post) {
2283 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
2284 return false;
2285 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
2286 return false;
2289 if (!EmitAtomOp(cx, pn->pn_kid, JSOP_SETPROP, bce)) // N? N+1
2290 return false;
2291 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2292 return false;
2294 return true;
2297 static bool
2298 EmitNameIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2300 const JSCodeSpec* cs = &js_CodeSpec[pn->pn_kid->getOp()];
2302 bool global = (cs->format & JOF_GNAME);
2303 bool post;
2304 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2306 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ
2307 return false;
2308 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_NAME, bce)) // OBJ V
2309 return false;
2310 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
2311 return false;
2312 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
2313 return false;
2314 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
2315 return false;
2316 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
2317 return false;
2319 if (post) {
2320 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
2321 return false;
2322 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
2323 return false;
2326 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_SETGNAME : JSOP_SETNAME, bce)) // N? N+1
2327 return false;
2328 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2329 return false;
2331 return true;
2335 * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM
2336 * opcode onto the stack in the right order. In the case of SETELEM, the
2337 * value to be assigned must already be pushed.
2339 static bool
2340 EmitElemOperands(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2342 JS_ASSERT(pn->isArity(PN_BINARY));
2343 if (!EmitTree(cx, bce, pn->pn_left))
2344 return false;
2345 if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0)
2346 return false;
2347 if (!EmitTree(cx, bce, pn->pn_right))
2348 return false;
2349 if (op == JSOP_SETELEM && Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0)
2350 return false;
2351 return true;
2354 static inline bool
2355 EmitElemOpBase(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp op)
2357 if (Emit1(cx, bce, op) < 0)
2358 return false;
2359 CheckTypeSet(cx, bce, op);
2361 if (op == JSOP_CALLELEM) {
2362 if (Emit1(cx, bce, JSOP_SWAP) < 0)
2363 return false;
2365 return true;
2368 static bool
2369 EmitElemOp(ExclusiveContext* cx, ParseNode* pn, JSOp op, BytecodeEmitter* bce)
2371 return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op);
2374 static bool
2375 EmitElemIncDec(ExclusiveContext* cx, ParseNode* pn, BytecodeEmitter* bce)
2377 JS_ASSERT(pn->pn_kid->getKind() == PNK_ELEM);
2379 if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce))
2380 return false;
2382 bool post;
2383 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
2386 * We need to convert the key to an object id first, so that we do not do
2387 * it inside both the GETELEM and the SETELEM.
2389 // OBJ KEY*
2390 if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY
2391 return false;
2392 if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY
2393 return false;
2394 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V
2395 return false;
2396 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N
2397 return false;
2398 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N
2399 return false;
2400 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1
2401 return false;
2402 if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1
2403 return false;
2405 if (post) {
2406 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ
2407 return false;
2408 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY
2409 return false;
2410 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1
2411 return false;
2414 if (!EmitElemOpBase(cx, bce, JSOP_SETELEM)) // N? N+1
2415 return false;
2416 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
2417 return false;
2419 return true;
2422 static bool
2423 EmitNumberOp(ExclusiveContext* cx, double dval, BytecodeEmitter* bce)
2425 int32_t ival;
2426 uint32_t u;
2427 ptrdiff_t off;
2428 jsbytecode* pc;
2430 if (NumberIsInt32(dval, &ival)) {
2431 if (ival == 0)
2432 return Emit1(cx, bce, JSOP_ZERO) >= 0;
2433 if (ival == 1)
2434 return Emit1(cx, bce, JSOP_ONE) >= 0;
2435 if ((int)(int8_t)ival == ival)
2436 return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0;
2438 u = (uint32_t)ival;
2439 if (u < JS_BIT(16)) {
2440 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2441 } else if (u < JS_BIT(24)) {
2442 off = EmitN(cx, bce, JSOP_UINT24, 3);
2443 if (off < 0)
2444 return false;
2445 pc = bce->code(off);
2446 SET_UINT24(pc, u);
2447 } else {
2448 off = EmitN(cx, bce, JSOP_INT32, 4);
2449 if (off < 0)
2450 return false;
2451 pc = bce->code(off);
2452 SET_INT32(pc, ival);
2454 return true;
2457 if (!bce->constList.append(DoubleValue(dval)))
2458 return false;
2460 return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce);
2463 static inline void
2464 SetJumpOffsetAt(BytecodeEmitter* bce, ptrdiff_t off)
2466 SET_JUMP_OFFSET(bce->code(off), bce->offset() - off);
2469 static bool
2470 PushUndefinedValues(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned n)
2472 for (unsigned i = 0; i < n; ++i) {
2473 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
2474 return false;
2476 return true;
2479 static bool
2480 InitializeBlockScopedLocalsFromStack(ExclusiveContext* cx, BytecodeEmitter* bce,
2481 Handle<StaticBlockObject*> blockObj)
2483 for (unsigned i = blockObj->numVariables(); i > 0; --i) {
2484 if (blockObj->isAliased(i - 1)) {
2485 ScopeCoordinate sc;
2486 sc.setHops(0);
2487 sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1);
2488 if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
2489 return false;
2490 } else {
2491 unsigned local = blockObj->blockIndexToLocalIndex(i - 1);
2492 if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, local, bce))
2493 return false;
2495 if (Emit1(cx, bce, JSOP_POP) < 0)
2496 return false;
2498 return true;
2501 static bool
2502 EnterBlockScope(ExclusiveContext* cx, BytecodeEmitter* bce, StmtInfoBCE* stmtInfo,
2503 ObjectBox* objbox, unsigned alreadyPushed = 0)
2505 // Initial values for block-scoped locals.
2506 Rooted<StaticBlockObject*> blockObj(cx, &objbox->object->as<StaticBlockObject>());
2507 if (!PushUndefinedValues(cx, bce, blockObj->numVariables() - alreadyPushed))
2508 return false;
2510 if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK))
2511 return false;
2513 if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj))
2514 return false;
2516 return true;
2520 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
2521 * LLVM is deciding to inline this function which uses a lot of stack space
2522 * into EmitTree which is recursive and uses relatively little stack space.
2524 MOZ_NEVER_INLINE static bool
2525 EmitSwitch(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
2527 JSOp switchOp;
2528 bool hasDefault;
2529 ptrdiff_t top, off, defaultOffset;
2530 ParseNode* pn2, *pn3, *pn4;
2531 int32_t low, high;
2532 int noteIndex;
2533 size_t switchSize;
2534 jsbytecode* pc;
2536 /* Try for most optimal, fall back if not dense ints. */
2537 switchOp = JSOP_TABLESWITCH;
2538 hasDefault = false;
2539 defaultOffset = -1;
2541 pn2 = pn->pn_right;
2542 JS_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST));
2544 /* Push the discriminant. */
2545 if (!EmitTree(cx, bce, pn->pn_left))
2546 return false;
2548 StmtInfoBCE stmtInfo(cx);
2549 if (pn2->isKind(PNK_LEXICALSCOPE)) {
2550 if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 0))
2551 return false;
2553 stmtInfo.type = STMT_SWITCH;
2554 stmtInfo.update = top = bce->offset();
2555 /* Advance pn2 to refer to the switch case list. */
2556 pn2 = pn2->expr();
2557 } else {
2558 JS_ASSERT(pn2->isKind(PNK_STATEMENTLIST));
2559 top = bce->offset();
2560 PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top);
2563 /* Switch bytecodes run from here till end of final case. */
2564 uint32_t caseCount = pn2->pn_count;
2565 uint32_t tableLength = 0;
2566 UniquePtr<ParseNode*[], JS::FreePolicy> table(nullptr);
2568 if (caseCount > JS_BIT(16)) {
2569 bce->parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES);
2570 return false;
2573 if (caseCount == 0 ||
2574 (caseCount == 1 &&
2575 (hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) {
2576 caseCount = 0;
2577 low = 0;
2578 high = -1;
2579 } else {
2580 bool ok = true;
2581 #define INTMAP_LENGTH 256
2582 jsbitmap intmap_space[INTMAP_LENGTH];
2583 jsbitmap* intmap = nullptr;
2584 int32_t intmap_bitlen = 0;
2586 low = JSVAL_INT_MAX;
2587 high = JSVAL_INT_MIN;
2589 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2590 if (pn3->isKind(PNK_DEFAULT)) {
2591 hasDefault = true;
2592 caseCount--; /* one of the "cases" was the default */
2593 continue;
2596 JS_ASSERT(pn3->isKind(PNK_CASE));
2597 if (switchOp == JSOP_CONDSWITCH)
2598 continue;
2600 JS_ASSERT(switchOp == JSOP_TABLESWITCH);
2602 pn4 = pn3->pn_left;
2604 if (pn4->getKind() != PNK_NUMBER) {
2605 switchOp = JSOP_CONDSWITCH;
2606 continue;
2609 int32_t i;
2610 if (!NumberIsInt32(pn4->pn_dval, &i)) {
2611 switchOp = JSOP_CONDSWITCH;
2612 continue;
2615 if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) {
2616 switchOp = JSOP_CONDSWITCH;
2617 continue;
2619 if (i < low)
2620 low = i;
2621 if (high < i)
2622 high = i;
2625 * Check for duplicates, which require a JSOP_CONDSWITCH.
2626 * We bias i by 65536 if it's negative, and hope that's a rare
2627 * case (because it requires a malloc'd bitmap).
2629 if (i < 0)
2630 i += JS_BIT(16);
2631 if (i >= intmap_bitlen) {
2632 if (!intmap &&
2633 size_t(i) < (INTMAP_LENGTH * JS_BITMAP_NBITS)) {
2634 intmap = intmap_space;
2635 intmap_bitlen = INTMAP_LENGTH * JS_BITMAP_NBITS;
2636 } else {
2637 /* Just grab 8K for the worst-case bitmap. */
2638 intmap_bitlen = JS_BIT(16);
2639 intmap = cx->pod_malloc<jsbitmap>(JS_BIT(16) / JS_BITMAP_NBITS);
2640 if (!intmap) {
2641 js_ReportOutOfMemory(cx);
2642 return false;
2645 memset(intmap, 0, size_t(intmap_bitlen) / CHAR_BIT);
2647 if (JS_TEST_BIT(intmap, i)) {
2648 switchOp = JSOP_CONDSWITCH;
2649 continue;
2651 JS_SET_BIT(intmap, i);
2654 if (intmap && intmap != intmap_space)
2655 js_free(intmap);
2656 if (!ok)
2657 return false;
2660 * Compute table length and select condswitch instead if overlarge or
2661 * more than half-sparse.
2663 if (switchOp == JSOP_TABLESWITCH) {
2664 tableLength = (uint32_t)(high - low + 1);
2665 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2666 switchOp = JSOP_CONDSWITCH;
2671 * The note has one or two offsets: first tells total switch code length;
2672 * second (if condswitch) tells offset to first JSOP_CASE.
2674 if (switchOp == JSOP_CONDSWITCH) {
2675 /* 0 bytes of immediate for unoptimized switch. */
2676 switchSize = 0;
2677 noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0);
2678 } else {
2679 JS_ASSERT(switchOp == JSOP_TABLESWITCH);
2681 /* 3 offsets (len, low, high) before the table, 1 per entry. */
2682 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2683 noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0);
2685 if (noteIndex < 0)
2686 return false;
2688 /* Emit switchOp followed by switchSize bytes of jump or lookup table. */
2689 if (EmitN(cx, bce, switchOp, switchSize) < 0)
2690 return false;
2692 off = -1;
2693 if (switchOp == JSOP_CONDSWITCH) {
2694 int caseNoteIndex = -1;
2695 bool beforeCases = true;
2697 /* Emit code for evaluating cases and jumping to case statements. */
2698 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2699 pn4 = pn3->pn_left;
2700 if (pn4 && !EmitTree(cx, bce, pn4))
2701 return false;
2702 if (caseNoteIndex >= 0) {
2703 /* off is the previous JSOP_CASE's bytecode offset. */
2704 if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
2705 return false;
2707 if (!pn4) {
2708 JS_ASSERT(pn3->isKind(PNK_DEFAULT));
2709 continue;
2711 caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0);
2712 if (caseNoteIndex < 0)
2713 return false;
2714 off = EmitJump(cx, bce, JSOP_CASE, 0);
2715 if (off < 0)
2716 return false;
2717 pn3->pn_offset = off;
2718 if (beforeCases) {
2719 unsigned noteCount, noteCountDelta;
2721 /* Switch note's second offset is to first JSOP_CASE. */
2722 noteCount = bce->notes().length();
2723 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top))
2724 return false;
2725 noteCountDelta = bce->notes().length() - noteCount;
2726 if (noteCountDelta != 0)
2727 caseNoteIndex += noteCountDelta;
2728 beforeCases = false;
2733 * If we didn't have an explicit default (which could fall in between
2734 * cases, preventing us from fusing this SetSrcNoteOffset with the call
2735 * in the loop above), link the last case to the implicit default for
2736 * the benefit of IonBuilder.
2738 if (!hasDefault &&
2739 caseNoteIndex >= 0 &&
2740 !SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
2742 return false;
2745 /* Emit default even if no explicit default statement. */
2746 defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0);
2747 if (defaultOffset < 0)
2748 return false;
2749 } else {
2750 JS_ASSERT(switchOp == JSOP_TABLESWITCH);
2751 pc = bce->code(top + JUMP_OFFSET_LEN);
2753 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2754 SET_JUMP_OFFSET(pc, low);
2755 pc += JUMP_OFFSET_LEN;
2756 SET_JUMP_OFFSET(pc, high);
2757 pc += JUMP_OFFSET_LEN;
2760 * Use malloc to avoid arena bloat for programs with many switches.
2761 * UniquePtr takes care of freeing it on exit.
2763 if (tableLength != 0) {
2764 table = cx->make_zeroed_pod_array<ParseNode*>(tableLength);
2765 if (!table)
2766 return false;
2767 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2768 if (pn3->isKind(PNK_DEFAULT))
2769 continue;
2771 JS_ASSERT(pn3->isKind(PNK_CASE));
2773 pn4 = pn3->pn_left;
2774 JS_ASSERT(pn4->getKind() == PNK_NUMBER);
2776 int32_t i = int32_t(pn4->pn_dval);
2777 JS_ASSERT(double(i) == pn4->pn_dval);
2779 i -= low;
2780 JS_ASSERT(uint32_t(i) < tableLength);
2781 table[i] = pn3;
2786 /* Emit code for each case's statements, copying pn_offset up to pn3. */
2787 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2788 if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT))
2789 SetJumpOffsetAt(bce, pn3->pn_offset);
2790 pn4 = pn3->pn_right;
2791 if (!EmitTree(cx, bce, pn4))
2792 return false;
2793 pn3->pn_offset = pn4->pn_offset;
2794 if (pn3->isKind(PNK_DEFAULT))
2795 off = pn3->pn_offset - top;
2798 if (!hasDefault) {
2799 /* If no default case, offset for default is to end of switch. */
2800 off = bce->offset() - top;
2803 /* We better have set "off" by now. */
2804 JS_ASSERT(off != -1);
2806 /* Set the default offset (to end of switch if no default). */
2807 if (switchOp == JSOP_CONDSWITCH) {
2808 pc = nullptr;
2809 JS_ASSERT(defaultOffset != -1);
2810 SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top));
2811 } else {
2812 pc = bce->code(top);
2813 SET_JUMP_OFFSET(pc, off);
2814 pc += JUMP_OFFSET_LEN;
2817 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
2818 off = bce->offset() - top;
2819 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off))
2820 return false;
2822 if (switchOp == JSOP_TABLESWITCH) {
2823 /* Skip over the already-initialized switch bounds. */
2824 pc += 2 * JUMP_OFFSET_LEN;
2826 /* Fill in the jump table, if there is one. */
2827 for (uint32_t i = 0; i < tableLength; i++) {
2828 pn3 = table[i];
2829 off = pn3 ? pn3->pn_offset - top : 0;
2830 SET_JUMP_OFFSET(pc, off);
2831 pc += JUMP_OFFSET_LEN;
2835 if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) {
2836 if (!LeaveNestedScope(cx, bce, &stmtInfo))
2837 return false;
2838 } else {
2839 if (!PopStatementBCE(cx, bce))
2840 return false;
2843 return true;
2846 bool
2847 BytecodeEmitter::isRunOnceLambda()
2849 // The run once lambda flags set by the parser are approximate, and we look
2850 // at properties of the function itself before deciding to emit a function
2851 // as a run once lambda.
2853 if (!(parent && parent->emittingRunOnceLambda) && !lazyRunOnceLambda)
2854 return false;
2856 FunctionBox* funbox = sc->asFunctionBox();
2857 return !funbox->argumentsHasLocalBinding() &&
2858 !funbox->isGenerator() &&
2859 !funbox->function()->name();
2862 bool
2863 frontend::EmitFunctionScript(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* body)
2866 * IonBuilder has assumptions about what may occur immediately after
2867 * script->main (e.g., in the case of destructuring params). Thus, put the
2868 * following ops into the range [script->code, script->main). Note:
2869 * execution starts from script->code, so this has no semantic effect.
2872 FunctionBox* funbox = bce->sc->asFunctionBox();
2873 if (funbox->argumentsHasLocalBinding()) {
2874 JS_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */
2875 bce->switchToProlog();
2876 if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0)
2877 return false;
2878 InternalBindingsHandle bindings(bce->script, &bce->script->bindings);
2879 uint32_t varIndex = Bindings::argumentsVarIndex(cx, bindings);
2880 if (bce->script->varIsAliased(varIndex)) {
2881 ScopeCoordinate sc;
2882 sc.setHops(0);
2883 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce->script, cx->names().arguments, &sc));
2884 if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
2885 return false;
2886 } else {
2887 if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, varIndex, bce))
2888 return false;
2890 if (Emit1(cx, bce, JSOP_POP) < 0)
2891 return false;
2892 bce->switchToMain();
2895 if (funbox->isGenerator()) {
2896 bce->switchToProlog();
2897 if (Emit1(cx, bce, JSOP_GENERATOR) < 0)
2898 return false;
2899 bce->switchToMain();
2903 * Emit a prologue for run-once scripts which will deoptimize JIT code if
2904 * the script ends up running multiple times via foo.caller related
2905 * shenanigans.
2907 bool runOnce = bce->isRunOnceLambda();
2908 if (runOnce) {
2909 bce->switchToProlog();
2910 if (Emit1(cx, bce, JSOP_RUNONCE) < 0)
2911 return false;
2912 bce->switchToMain();
2915 if (!EmitTree(cx, bce, body))
2916 return false;
2918 // If we fall off the end of an ES6 generator, return a boxed iterator
2919 // result object of the form { value: undefined, done: true }.
2920 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) {
2921 if (!EmitPrepareIteratorResult(cx, bce))
2922 return false;
2923 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
2924 return false;
2925 if (!EmitFinishIteratorResult(cx, bce, true))
2926 return false;
2928 // No need to check for finally blocks, etc as in EmitReturn.
2929 if (Emit1(cx, bce, JSOP_RETURN) < 0)
2930 return false;
2934 * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase
2935 * depend on this opcode, e.g. js_InternalInterpret.
2937 if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
2938 return false;
2940 if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce))
2941 return false;
2944 * If this function is only expected to run once, mark the script so that
2945 * initializers created within it may be given more precise types.
2947 if (runOnce) {
2948 bce->script->setTreatAsRunOnce();
2949 JS_ASSERT(!bce->script->hasRunOnce());
2952 /* Initialize fun->script() so that the debugger has a valid fun->script(). */
2953 RootedFunction fun(cx, bce->script->functionNonDelazifying());
2954 JS_ASSERT(fun->isInterpreted());
2956 if (fun->isInterpretedLazy())
2957 fun->setUnlazifiedScript(bce->script);
2958 else
2959 fun->setScript(bce->script);
2961 bce->tellDebuggerAboutCompiledScript(cx);
2963 return true;
2966 static bool
2967 MaybeEmitVarDecl(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn,
2968 jsatomid* result)
2970 jsatomid atomIndex;
2972 if (!pn->pn_cookie.isFree()) {
2973 atomIndex = pn->pn_cookie.slot();
2974 } else {
2975 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
2976 return false;
2979 if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM &&
2980 (!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight()))
2982 bce->switchToProlog();
2983 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
2984 return false;
2985 if (!EmitIndexOp(cx, prologOp, atomIndex, bce))
2986 return false;
2987 bce->switchToMain();
2990 if (result)
2991 *result = atomIndex;
2992 return true;
2996 * This enum tells EmitVariables and the destructuring functions how emit the
2997 * given Parser::variables parse tree. In the base case, DefineVars, the caller
2998 * only wants variables to be defined in the prologue (if necessary). For
2999 * PushInitialValues, variable initializer expressions are evaluated and left
3000 * on the stack. For InitializeVars, the initializer expressions values are
3001 * assigned (to local variables) and popped.
3003 enum VarEmitOption
3005 DefineVars = 0,
3006 PushInitialValues = 1,
3007 InitializeVars = 2
3010 typedef bool
3011 (*DestructuringDeclEmitter)(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn);
3013 static bool
3014 EmitDestructuringDecl(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp, ParseNode* pn)
3016 JS_ASSERT(pn->isKind(PNK_NAME));
3017 if (!BindNameToSlot(cx, bce, pn))
3018 return false;
3020 JS_ASSERT(!pn->isOp(JSOP_CALLEE));
3021 return MaybeEmitVarDecl(cx, bce, prologOp, pn, nullptr);
3024 static bool
3025 EmitDestructuringDecls(ExclusiveContext* cx, BytecodeEmitter* bce, JSOp prologOp,
3026 ParseNode* pattern)
3028 if (pattern->isKind(PNK_ARRAY)) {
3029 for (ParseNode* element = pattern->pn_head; element; element = element->pn_next) {
3030 if (element->isKind(PNK_ELISION))
3031 continue;
3032 ParseNode* target = element;
3033 if (element->isKind(PNK_SPREAD)) {
3034 JS_ASSERT(element->pn_kid->isKind(PNK_NAME));
3035 target = element->pn_kid;
3037 DestructuringDeclEmitter emitter =
3038 target->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls;
3039 if (!emitter(cx, bce, prologOp, target))
3040 return false;
3042 return true;
3045 MOZ_ASSERT(pattern->isKind(PNK_OBJECT));
3046 for (ParseNode* member = pattern->pn_head; member; member = member->pn_next) {
3047 ParseNode* target = member->pn_right;
3048 DestructuringDeclEmitter emitter =
3049 target->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls;
3050 if (!emitter(cx, bce, prologOp, target))
3051 return false;
3053 return true;
3056 static bool
3057 EmitDestructuringOpsHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn,
3058 VarEmitOption emitOption);
3061 * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on
3062 * the stack and emits code to destructure a single lhs expression (either a
3063 * name or a compound []/{} expression).
3065 * If emitOption is InitializeVars, the to-be-destructured value is assigned to
3066 * locals and ultimately the initial slot is popped (-1 total depth change).
3068 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3069 * with the initial values of the N (where 0 <= N) variables assigned in the
3070 * lhs expression. (Same post-condition as EmitDestructuringOpsHelper)
3072 static bool
3073 EmitDestructuringLHS(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, VarEmitOption emitOption)
3075 JS_ASSERT(emitOption != DefineVars);
3077 // Now emit the lvalue opcode sequence. If the lvalue is a nested
3078 // destructuring initialiser-form, call ourselves to handle it, then pop
3079 // the matched value. Otherwise emit an lvalue bytecode sequence followed
3080 // by an assignment op.
3081 if (pn->isKind(PNK_SPREAD))
3082 pn = pn->pn_kid;
3083 if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) {
3084 if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption))
3085 return false;
3086 if (emitOption == InitializeVars) {
3087 // Per its post-condition, EmitDestructuringOpsHelper has left the
3088 // to-be-destructured value on top of the stack.
3089 if (Emit1(cx, bce, JSOP_POP) < 0)
3090 return false;
3092 } else if (emitOption == PushInitialValues) {
3093 // The lhs is a simple name so the to-be-destructured value is
3094 // its initial value and there is nothing to do.
3095 JS_ASSERT(pn->getOp() == JSOP_SETLOCAL);
3096 JS_ASSERT(pn->pn_dflags & PND_BOUND);
3097 } else {
3098 switch (pn->getKind()) {
3099 case PNK_NAME:
3100 if (!BindNameToSlot(cx, bce, pn))
3101 return false;
3103 // Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop.
3104 if (pn->isConst() && !pn->isDefn())
3105 return Emit1(cx, bce, JSOP_POP) >= 0;
3107 switch (pn->getOp()) {
3108 case JSOP_SETNAME:
3109 case JSOP_SETGNAME:
3110 case JSOP_SETCONST: {
3111 // This is like ordinary assignment, but with one difference.
3113 // In `a = b`, we first determine a binding for `a` (using
3114 // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then
3115 // a JSOP_SETNAME instruction.
3117 // In `[a] = [b]`, per spec, `b` is evaluated first, then we
3118 // determine a binding for `a`. Then we need to do assignment--
3119 // but the operands are on the stack in the wrong order for
3120 // JSOP_SETPROP, so we have to add a JSOP_SWAP.
3121 jsatomid atomIndex;
3122 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
3123 return false;
3125 if (!pn->isOp(JSOP_SETCONST)) {
3126 JSOp bindOp = pn->isOp(JSOP_SETNAME) ? JSOP_BINDNAME : JSOP_BINDGNAME;
3127 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
3128 return false;
3129 if (Emit1(cx, bce, JSOP_SWAP) < 0)
3130 return false;
3133 if (!EmitIndexOp(cx, pn->getOp(), atomIndex, bce))
3134 return false;
3135 break;
3138 case JSOP_SETLOCAL:
3139 case JSOP_SETARG:
3140 if (!EmitVarOp(cx, pn, pn->getOp(), bce))
3141 return false;
3142 break;
3144 default:
3145 MOZ_CRASH("EmitDestructuringLHS: bad name op");
3147 break;
3149 case PNK_DOT:
3150 // See the (PNK_NAME, JSOP_SETNAME) case above.
3152 // In `a.x = b`, `a` is evaluated first, then `b`, then a
3153 // JSOP_SETPROP instruction.
3155 // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we
3156 // need a property set -- but the operands are on the stack in the
3157 // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP.
3158 if (!EmitTree(cx, bce, pn->pn_expr))
3159 return false;
3160 if (Emit1(cx, bce, JSOP_SWAP) < 0)
3161 return false;
3162 if (!EmitAtomOp(cx, pn, JSOP_SETPROP, bce))
3163 return false;
3164 break;
3166 case PNK_ELEM:
3167 // See the comment at `case PNK_DOT:` above. This case,
3168 // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP
3169 // is emitted by EmitElemOperands.
3170 if (!EmitElemOp(cx, pn, JSOP_SETELEM, bce))
3171 return false;
3172 break;
3174 case PNK_CALL:
3175 JS_ASSERT(pn->pn_xflags & PNX_SETCALL);
3176 if (!EmitTree(cx, bce, pn))
3177 return false;
3179 // Pop the call return value. Below, we pop the RHS too, balancing
3180 // the stack --- presumably for the benefit of bytecode
3181 // analysis. (The interpreter will never reach these instructions
3182 // since we just emitted JSOP_SETCALL, which always throws. It's
3183 // possible no analyses actually depend on this either.)
3184 if (Emit1(cx, bce, JSOP_POP) < 0)
3185 return false;
3186 break;
3188 default:
3189 MOZ_CRASH("EmitDestructuringLHS: bad lhs kind");
3192 // Pop the assigned value.
3193 if (Emit1(cx, bce, JSOP_POP) < 0)
3194 return false;
3197 return true;
3200 static bool EmitSpread(ExclusiveContext* cx, BytecodeEmitter* bce);
3201 static bool EmitIterator(ExclusiveContext* cx, BytecodeEmitter* bce);
3204 * EmitIteratorNext will pop iterator from the top of the stack.
3205 * It will push the result of |.next()| onto the stack.
3207 static bool
3208 EmitIteratorNext(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn=nullptr)
3210 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... ITER ITER
3211 return false;
3212 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // ... ITER NEXT
3213 return false;
3214 if (Emit1(cx, bce, JSOP_SWAP) < 0) // ... NEXT ITER
3215 return false;
3216 if (EmitCall(cx, bce, JSOP_CALL, 0, pn) < 0) // ... RESULT
3217 return false;
3218 CheckTypeSet(cx, bce, JSOP_CALL);
3219 return true;
3223 * Recursive helper for EmitDestructuringOps.
3224 * EmitDestructuringOpsHelper assumes the to-be-destructured value has been
3225 * pushed on the stack and emits code to destructure each part of a [] or {}
3226 * lhs expression.
3228 * If emitOption is InitializeVars, the initial to-be-destructured value is
3229 * left untouched on the stack and the overall depth is not changed.
3231 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3232 * with the initial values of the N (where 0 <= N) variables assigned in the
3233 * lhs expression. (Same post-condition as EmitDestructuringLHS)
3235 static bool
3236 EmitDestructuringOpsHelper(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn,
3237 VarEmitOption emitOption)
3239 JS_ASSERT(emitOption != DefineVars);
3241 ParseNode* pn2, *pn3;
3242 bool doElemOp;
3243 bool needToPopIterator = false;
3245 #ifdef DEBUG
3246 int stackDepth = bce->stackDepth;
3247 JS_ASSERT(stackDepth != 0);
3248 JS_ASSERT(pn->isArity(PN_LIST));
3249 JS_ASSERT(pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT));
3250 #endif
3253 * When destructuring an array, use an iterator to walk it, instead of index lookup.
3254 * InitializeVars expects us to leave the *original* value on the stack.
3256 if (pn->isKind(PNK_ARRAY)) {
3257 if (emitOption == InitializeVars) {
3258 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
3259 return false;
3261 if (!EmitIterator(cx, bce)) // OBJ? ITER
3262 return false;
3263 needToPopIterator = true;
3266 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3268 * Now push the property name currently being matched, which is the
3269 * current property name "label" on the left of a colon in the object initialiser.
3270 * Set pn3 to the lvalue node, which is in the value-initializing position.
3272 if (pn->isKind(PNK_OBJECT)) {
3273 doElemOp = true;
3274 JS_ASSERT(pn2->isKind(PNK_COLON) || pn2->isKind(PNK_SHORTHAND));
3276 /* Duplicate the value being destructured to use as a reference base. */
3277 if (Emit1(cx, bce, JSOP_DUP) < 0)
3278 return false;
3280 ParseNode* key = pn2->pn_left;
3281 if (key->isKind(PNK_NUMBER)) {
3282 if (!EmitNumberOp(cx, key->pn_dval, bce))
3283 return false;
3284 } else if (key->isKind(PNK_NAME) || key->isKind(PNK_STRING)) {
3285 PropertyName* name = key->pn_atom->asPropertyName();
3287 // The parser already checked for atoms representing indexes and
3288 // used PNK_NUMBER instead, but also watch for ids which TI treats
3289 // as indexes for simplification of downstream analysis.
3290 jsid id = NameToId(name);
3291 if (id != types::IdToTypeId(id)) {
3292 if (!EmitTree(cx, bce, key))
3293 return false;
3294 } else {
3295 if (!EmitAtomOp(cx, name, JSOP_GETPROP, bce))
3296 return false;
3297 doElemOp = false;
3299 } else {
3300 JS_ASSERT(key->isKind(PNK_COMPUTED_NAME));
3301 if (!EmitTree(cx, bce, key->pn_kid))
3302 return false;
3305 if (doElemOp) {
3307 * Ok, get the value of the matching property name. This leaves
3308 * that value on top of the value being destructured, so the stack
3309 * is one deeper than when we started.
3311 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
3312 return false;
3313 JS_ASSERT(bce->stackDepth >= stackDepth + 1);
3316 pn3 = pn2->pn_right;
3317 } else {
3318 JS_ASSERT(pn->isKind(PNK_ARRAY));
3320 if (pn2->isKind(PNK_SPREAD)) {
3321 /* Create a new array with the rest of the iterator */
3322 ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); // ITER ARRAY
3323 if (off < 0)
3324 return false;
3325 CheckTypeSet(cx, bce, JSOP_NEWARRAY);
3326 jsbytecode* pc = bce->code(off);
3327 SET_UINT24(pc, 0);
3329 if (!EmitNumberOp(cx, 0, bce)) // ITER ARRAY INDEX
3330 return false;
3331 if (!EmitSpread(cx, bce)) // ARRAY INDEX
3332 return false;
3333 if (Emit1(cx, bce, JSOP_POP) < 0) // ARRAY
3334 return false;
3335 if (Emit1(cx, bce, JSOP_ENDINIT) < 0)
3336 return false;
3337 needToPopIterator = false;
3338 } else {
3339 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER
3340 return false;
3341 if (!EmitIteratorNext(cx, bce, pn)) // ITER RESULT
3342 return false;
3343 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT
3344 return false;
3345 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE?
3346 return false;
3348 // Emit (result.done ? undefined : result.value)
3349 // This is mostly copied from EmitConditionalExpression, except that this code
3350 // does not push new values onto the stack.
3351 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND);
3352 if (noteIndex < 0)
3353 return false;
3354 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
3355 if (beq < 0)
3356 return false;
3358 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER
3359 return false;
3360 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER UNDEFINED
3361 return false;
3363 /* Jump around else, fixup the branch, emit else, fixup jump. */
3364 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
3365 if (jmp < 0)
3366 return false;
3367 SetJumpOffsetAt(bce, beq);
3369 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ITER VALUE
3370 return false;
3372 SetJumpOffsetAt(bce, jmp);
3373 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
3374 return false;
3377 pn3 = pn2;
3380 /* Elision node makes a hole in the array destructurer. */
3381 if (pn3->isKind(PNK_ELISION)) {
3382 JS_ASSERT(pn->isKind(PNK_ARRAY));
3383 JS_ASSERT(pn2 == pn3);
3384 if (Emit1(cx, bce, JSOP_POP) < 0)
3385 return false;
3386 } else {
3387 int32_t depthBefore = bce->stackDepth;
3388 if (!EmitDestructuringLHS(cx, bce, pn3, emitOption))
3389 return false;
3391 if (emitOption == PushInitialValues &&
3392 (pn->isKind(PNK_OBJECT) || needToPopIterator)) {
3394 * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is
3395 * | to-be-destructured-value | x | y |
3396 * The goal is:
3397 * | x | y | z |
3398 * so emit a pick to produce the intermediate state
3399 * | x | y | to-be-destructured-value |
3400 * before destructuring z. This gives the loop invariant that
3401 * the to-be-destructured-value is always on top of the stack.
3403 JS_ASSERT((bce->stackDepth - bce->stackDepth) >= -1);
3404 uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore);
3405 if (pickDistance > 0) {
3406 if (pickDistance > UINT8_MAX) {
3407 bce->reportError(pn3, JSMSG_TOO_MANY_LOCALS);
3408 return false;
3410 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0)
3411 return false;
3417 if (needToPopIterator && Emit1(cx, bce, JSOP_POP) < 0)
3418 return false;
3420 if (emitOption == PushInitialValues && pn->isKind(PNK_OBJECT)) {
3422 * Per the above loop invariant, to-be-destructured-value is at the top
3423 * of the stack. To achieve the post-condition, pop it.
3424 * In case of array destructuring, the above POP already took care of the iterator.
3426 if (Emit1(cx, bce, JSOP_POP) < 0)
3427 return false;
3430 return true;
3433 static bool
3434 EmitDestructuringOps(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, bool isLet = false)
3437 * Call our recursive helper to emit the destructuring assignments and
3438 * related stack manipulations.
3440 VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars;
3441 return EmitDestructuringOpsHelper(cx, bce, pn, emitOption);
3444 static bool
3445 EmitTemplateString(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
3447 JS_ASSERT(pn->isArity(PN_LIST));
3449 for (ParseNode* pn2 = pn->pn_head; pn2 != NULL; pn2 = pn2->pn_next) {
3450 if (pn2->getKind() != PNK_STRING && pn2->getKind() != PNK_TEMPLATE_STRING) {
3451 // We update source notes before emitting the expression
3452 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
3453 return false;
3455 if (!EmitTree(cx, bce, pn2))
3456 return false;
3458 if (pn2->getKind() != PNK_STRING && pn2->getKind() != PNK_TEMPLATE_STRING) {
3459 // We need to convert the expression to a string
3460 if (Emit1(cx, bce, JSOP_TOSTRING) < 0)
3461 return false;
3464 if (pn2 != pn->pn_head) {
3465 // We've pushed two strings onto the stack. Add them together, leaving just one.
3466 if (Emit1(cx, bce, JSOP_ADD) < 0)
3467 return false;
3471 return true;
3474 static bool
3475 EmitVariables(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, VarEmitOption emitOption,
3476 bool isLet = false)
3478 JS_ASSERT(pn->isArity(PN_LIST));
3479 JS_ASSERT(isLet == (emitOption == PushInitialValues));
3481 ParseNode* next;
3482 for (ParseNode* pn2 = pn->pn_head; ; pn2 = next) {
3483 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
3484 return false;
3485 next = pn2->pn_next;
3487 ParseNode* pn3;
3488 if (!pn2->isKind(PNK_NAME)) {
3489 if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) {
3491 * Emit variable binding ops, but not destructuring ops. The
3492 * parser (see Parser::variables) has ensured that our caller
3493 * will be the PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree, and
3494 * that case will emit the destructuring code only after
3495 * emitting an enumerating opcode and a branch that tests
3496 * whether the enumeration ended.
3498 JS_ASSERT(emitOption == DefineVars);
3499 JS_ASSERT(pn->pn_count == 1);
3500 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2))
3501 return false;
3502 break;
3506 * A destructuring initialiser assignment preceded by var will
3507 * never occur to the left of 'in' in a for-in loop. As with 'for
3508 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3509 * i' to be hoisted out of the loop.
3511 JS_ASSERT(pn2->isKind(PNK_ASSIGN));
3512 JS_ASSERT(pn2->isOp(JSOP_NOP));
3513 JS_ASSERT(emitOption != DefineVars);
3516 * To allow the front end to rewrite var f = x; as f = x; when a
3517 * function f(){} precedes the var, detect simple name assignment
3518 * here and initialize the name.
3520 if (pn2->pn_left->isKind(PNK_NAME)) {
3521 pn3 = pn2->pn_right;
3522 pn2 = pn2->pn_left;
3523 goto do_name;
3526 pn3 = pn2->pn_left;
3527 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3))
3528 return false;
3530 if (!EmitTree(cx, bce, pn2->pn_right))
3531 return false;
3533 if (!EmitDestructuringOps(cx, bce, pn3, isLet))
3534 return false;
3536 /* If we are not initializing, nothing to pop. */
3537 if (emitOption != InitializeVars) {
3538 if (next)
3539 continue;
3540 break;
3542 goto emit_note_pop;
3546 * Load initializer early to share code above that jumps to do_name.
3547 * NB: if this var redeclares an existing binding, then pn2 is linked
3548 * on its definition's use-chain and pn_expr has been overlayed with
3549 * pn_lexdef.
3551 pn3 = pn2->maybeExpr();
3553 do_name:
3554 if (!BindNameToSlot(cx, bce, pn2))
3555 return false;
3558 JSOp op;
3559 op = pn2->getOp();
3560 JS_ASSERT(op != JSOP_CALLEE);
3561 JS_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP));
3563 jsatomid atomIndex;
3564 if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex))
3565 return false;
3567 if (pn3) {
3568 JS_ASSERT(emitOption != DefineVars);
3569 if (op == JSOP_SETNAME || op == JSOP_SETGNAME || op == JSOP_SETINTRINSIC) {
3570 JS_ASSERT(emitOption != PushInitialValues);
3571 JSOp bindOp;
3572 if (op == JSOP_SETNAME)
3573 bindOp = JSOP_BINDNAME;
3574 else if (op == JSOP_SETGNAME)
3575 bindOp = JSOP_BINDGNAME;
3576 else
3577 bindOp = JSOP_BINDINTRINSIC;
3578 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
3579 return false;
3582 bool oldEmittingForInit = bce->emittingForInit;
3583 bce->emittingForInit = false;
3584 if (!EmitTree(cx, bce, pn3))
3585 return false;
3586 bce->emittingForInit = oldEmittingForInit;
3587 } else if (isLet) {
3588 /* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */
3589 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
3590 return false;
3593 /* If we are not initializing, nothing to pop. */
3594 if (emitOption != InitializeVars) {
3595 if (next)
3596 continue;
3597 break;
3600 JS_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr);
3601 if (!pn2->pn_cookie.isFree()) {
3602 if (!EmitVarOp(cx, pn2, op, bce))
3603 return false;
3604 } else {
3605 if (!EmitIndexOp(cx, op, atomIndex, bce))
3606 return false;
3609 emit_note_pop:
3610 if (!next)
3611 break;
3612 if (Emit1(cx, bce, JSOP_POP) < 0)
3613 return false;
3616 if (pn->pn_xflags & PNX_POPVAR) {
3617 if (Emit1(cx, bce, JSOP_POP) < 0)
3618 return false;
3621 return true;
3624 static bool
3625 EmitAssignment(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* lhs, JSOp op, ParseNode* rhs)
3628 * Check left operand type and generate specialized code for it.
3629 * Specialize to avoid ECMA "reference type" values on the operand
3630 * stack, which impose pervasive runtime "GetValue" costs.
3632 jsatomid atomIndex = (jsatomid) -1;
3633 jsbytecode offset = 1;
3635 switch (lhs->getKind()) {
3636 case PNK_NAME:
3637 if (!BindNameToSlot(cx, bce, lhs))
3638 return false;
3639 if (lhs->pn_cookie.isFree()) {
3640 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
3641 return false;
3642 if (!lhs->isConst()) {
3643 JSOp bindOp;
3644 if (lhs->isOp(JSOP_SETNAME))
3645 bindOp = JSOP_BINDNAME;
3646 else if (lhs->isOp(JSOP_SETGNAME))
3647 bindOp = JSOP_BINDGNAME;
3648 else
3649 bindOp = JSOP_BINDINTRINSIC;
3650 if (!EmitIndex32(cx, bindOp, atomIndex, bce))
3651 return false;
3652 offset++;
3655 break;
3656 case PNK_DOT:
3657 if (!EmitTree(cx, bce, lhs->expr()))
3658 return false;
3659 offset++;
3660 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
3661 return false;
3662 break;
3663 case PNK_ELEM:
3664 JS_ASSERT(lhs->isArity(PN_BINARY));
3665 if (!EmitTree(cx, bce, lhs->pn_left))
3666 return false;
3667 if (!EmitTree(cx, bce, lhs->pn_right))
3668 return false;
3669 offset += 2;
3670 break;
3671 case PNK_ARRAY:
3672 case PNK_OBJECT:
3673 break;
3674 case PNK_CALL:
3675 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
3676 if (!EmitTree(cx, bce, lhs))
3677 return false;
3678 if (Emit1(cx, bce, JSOP_POP) < 0)
3679 return false;
3680 break;
3681 default:
3682 JS_ASSERT(0);
3685 if (op != JSOP_NOP) {
3686 JS_ASSERT(rhs);
3687 switch (lhs->getKind()) {
3688 case PNK_NAME:
3689 if (lhs->isConst()) {
3690 if (lhs->isOp(JSOP_CALLEE)) {
3691 if (Emit1(cx, bce, JSOP_CALLEE) < 0)
3692 return false;
3693 } else if (lhs->isOp(JSOP_NAME) || lhs->isOp(JSOP_GETGNAME)) {
3694 if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce))
3695 return false;
3696 } else {
3697 JS_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM);
3698 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
3699 return false;
3701 } else if (lhs->isOp(JSOP_SETNAME)) {
3702 if (Emit1(cx, bce, JSOP_DUP) < 0)
3703 return false;
3704 if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce))
3705 return false;
3706 } else if (lhs->isOp(JSOP_SETGNAME)) {
3707 JS_ASSERT(lhs->pn_cookie.isFree());
3708 if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce))
3709 return false;
3710 } else if (lhs->isOp(JSOP_SETINTRINSIC)) {
3711 JS_ASSERT(lhs->pn_cookie.isFree());
3712 if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce))
3713 return false;
3714 } else {
3715 JSOp op;
3716 switch (lhs->getOp()) {
3717 case JSOP_SETARG: op = JSOP_GETARG; break;
3718 case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break;
3719 case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break;
3720 default: MOZ_CRASH("Bad op");
3722 if (!EmitVarOp(cx, lhs, op, bce))
3723 return false;
3725 break;
3726 case PNK_DOT: {
3727 if (Emit1(cx, bce, JSOP_DUP) < 0)
3728 return false;
3729 bool isLength = (lhs->pn_atom == cx->names().length);
3730 if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce))
3731 return false;
3732 break;
3734 case PNK_ELEM:
3735 if (Emit1(cx, bce, JSOP_DUP2) < 0)
3736 return false;
3737 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
3738 return false;
3739 break;
3740 case PNK_CALL:
3742 * We just emitted a JSOP_SETCALL (which will always throw) and
3743 * popped the call's return value. Push a random value to make sure
3744 * the stack depth is correct.
3746 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
3747 if (Emit1(cx, bce, JSOP_NULL) < 0)
3748 return false;
3749 break;
3750 default:;
3754 /* Now emit the right operand (it may affect the namespace). */
3755 if (rhs) {
3756 if (!EmitTree(cx, bce, rhs))
3757 return false;
3758 } else {
3760 * The value to assign is the next enumeration value in a for-in or
3761 * for-of loop. That value has already been emitted: by JSOP_ITERNEXT
3762 * in the for-in case, or via a GETPROP "value" on the result object in
3763 * the for-of case. If offset == 1, that slot is already at the top of
3764 * the stack. Otherwise, rearrange the stack to put that value on top.
3766 if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0)
3767 return false;
3770 /* If += etc., emit the binary operator with a source note. */
3771 if (op != JSOP_NOP) {
3773 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
3774 * declared in the current compilation unit, as in this case (just
3775 * a bit further below) we will avoid emitting the assignment op.
3777 if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) {
3778 if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0)
3779 return false;
3781 if (Emit1(cx, bce, op) < 0)
3782 return false;
3785 /* Finally, emit the specialized assignment bytecode. */
3786 switch (lhs->getKind()) {
3787 case PNK_NAME:
3788 if (lhs->isConst()) {
3789 if (!rhs) {
3790 bce->reportError(lhs, JSMSG_BAD_FOR_LEFTSIDE);
3791 return false;
3793 break;
3795 if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) {
3796 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
3797 return false;
3798 } else {
3799 if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce))
3800 return false;
3802 break;
3803 case PNK_DOT:
3804 if (!EmitIndexOp(cx, JSOP_SETPROP, atomIndex, bce))
3805 return false;
3806 break;
3807 case PNK_CALL:
3808 /* Do nothing. The JSOP_SETCALL we emitted will always throw. */
3809 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
3810 break;
3811 case PNK_ELEM:
3812 if (Emit1(cx, bce, JSOP_SETELEM) < 0)
3813 return false;
3814 break;
3815 case PNK_ARRAY:
3816 case PNK_OBJECT:
3817 if (!EmitDestructuringOps(cx, bce, lhs))
3818 return false;
3819 break;
3820 default:
3821 JS_ASSERT(0);
3823 return true;
3826 bool
3827 ParseNode::getConstantValue(ExclusiveContext* cx, AllowConstantObjects allowObjects, MutableHandleValue vp)
3829 switch (getKind()) {
3830 case PNK_NUMBER:
3831 vp.setNumber(pn_dval);
3832 return true;
3833 case PNK_TEMPLATE_STRING:
3834 case PNK_STRING:
3835 vp.setString(pn_atom);
3836 return true;
3837 case PNK_TRUE:
3838 vp.setBoolean(true);
3839 return true;
3840 case PNK_FALSE:
3841 vp.setBoolean(false);
3842 return true;
3843 case PNK_NULL:
3844 vp.setNull();
3845 return true;
3846 case PNK_SPREAD:
3847 return false;
3848 case PNK_CALLSITEOBJ:
3849 case PNK_ARRAY: {
3850 RootedValue value(cx);
3851 unsigned count;
3852 ParseNode* pn;
3854 if (allowObjects == DontAllowObjects)
3855 return false;
3856 if (allowObjects == DontAllowNestedObjects)
3857 allowObjects = DontAllowObjects;
3859 if (getKind() == PNK_CALLSITEOBJ) {
3860 count = pn_count - 1;
3861 pn = pn_head->pn_next;
3862 } else {
3863 JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
3864 count = pn_count;
3865 pn = pn_head;
3868 RootedObject obj(cx, NewDenseAllocatedArray(cx, count, nullptr, MaybeSingletonObject));
3869 if (!obj)
3870 return false;
3872 unsigned idx = 0;
3873 RootedId id(cx);
3874 for (; pn; idx++, pn = pn->pn_next) {
3875 if (!pn->getConstantValue(cx, allowObjects, &value))
3876 return false;
3877 id = INT_TO_JSID(idx);
3878 if (!JSObject::defineGeneric(cx, obj, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
3879 return false;
3881 JS_ASSERT(idx == count);
3883 types::FixArrayType(cx, obj);
3884 vp.setObject(*obj);
3885 return true;
3887 case PNK_OBJECT: {
3888 JS_ASSERT(isOp(JSOP_NEWINIT));
3889 JS_ASSERT(!(pn_xflags & PNX_NONCONST));
3891 if (allowObjects == DontAllowObjects)
3892 return false;
3893 if (allowObjects == DontAllowNestedObjects)
3894 allowObjects = DontAllowObjects;
3896 gc::AllocKind kind = GuessObjectGCKind(pn_count);
3897 RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, kind, MaybeSingletonObject));
3898 if (!obj)
3899 return false;
3901 RootedValue value(cx), idvalue(cx);
3902 for (ParseNode* pn = pn_head; pn; pn = pn->pn_next) {
3903 if (!pn->pn_right->getConstantValue(cx, allowObjects, &value))
3904 return false;
3906 ParseNode* pnid = pn->pn_left;
3907 if (pnid->isKind(PNK_NUMBER)) {
3908 idvalue = NumberValue(pnid->pn_dval);
3909 } else {
3910 JS_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING));
3911 JS_ASSERT(pnid->pn_atom != cx->names().proto);
3912 idvalue = StringValue(pnid->pn_atom);
3915 uint32_t index;
3916 if (IsDefinitelyIndex(idvalue, &index)) {
3917 if (!JSObject::defineElement(cx, obj, index, value, nullptr, nullptr,
3918 JSPROP_ENUMERATE))
3920 return false;
3923 continue;
3926 JSAtom* name = ToAtom<CanGC>(cx, idvalue);
3927 if (!name)
3928 return false;
3930 if (name->isIndex(&index)) {
3931 if (!JSObject::defineElement(cx, obj, index, value,
3932 nullptr, nullptr, JSPROP_ENUMERATE))
3933 return false;
3934 } else {
3935 if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value,
3936 nullptr, nullptr, JSPROP_ENUMERATE))
3938 return false;
3943 types::FixObjectType(cx, obj);
3944 vp.setObject(*obj);
3945 return true;
3947 default:
3948 MOZ_CRASH("Unexpected node");
3950 return false;
3953 static bool
3954 EmitSingletonInitialiser(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
3956 RootedValue value(cx);
3957 if (!pn->getConstantValue(cx, ParseNode::AllowObjects, &value))
3958 return false;
3960 RootedObject obj(cx, &value.toObject());
3961 if (!obj->is<ArrayObject>() && !JSObject::setSingletonType(cx, obj))
3962 return false;
3964 ObjectBox* objbox = bce->parser->newObjectBox(obj);
3965 if (!objbox)
3966 return false;
3968 return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce);
3971 static bool
3972 EmitCallSiteObject(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
3974 RootedValue value(cx);
3975 if (!pn->getConstantValue(cx, ParseNode::AllowObjects, &value))
3976 return false;
3978 JS_ASSERT(value.isObject());
3980 ObjectBox* objbox1 = bce->parser->newObjectBox(&value.toObject());
3981 if (!objbox1)
3982 return false;
3984 if (!pn->as<CallSiteNode>().getRawArrayValue(cx, &value))
3985 return false;
3987 JS_ASSERT(value.isObject());
3989 ObjectBox* objbox2 = bce->parser->newObjectBox(&value.toObject());
3990 if (!objbox2)
3991 return false;
3993 return EmitObjectPairOp(cx, objbox1, objbox2, JSOP_CALLSITEOBJ, bce);
3996 /* See the SRC_FOR source note offsetBias comments later in this file. */
3997 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
3998 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4000 namespace {
4002 class EmitLevelManager
4004 BytecodeEmitter* bce;
4005 public:
4006 explicit EmitLevelManager(BytecodeEmitter* bce) : bce(bce) { bce->emitLevel++; }
4007 ~EmitLevelManager() { bce->emitLevel--; }
4010 } /* anonymous namespace */
4012 static bool
4013 EmitCatch(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4016 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4017 * and save the block object atom.
4019 StmtInfoBCE* stmt = bce->topStmt;
4020 JS_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope);
4021 stmt->type = STMT_CATCH;
4023 /* Go up one statement info record to the TRY or FINALLY record. */
4024 stmt = stmt->down;
4025 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
4027 /* Pick up the pending exception and bind it to the catch variable. */
4028 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
4029 return false;
4032 * Dup the exception object if there is a guard for rethrowing to use
4033 * it later when rethrowing or in other catches.
4035 if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0)
4036 return false;
4038 ParseNode* pn2 = pn->pn_kid1;
4039 switch (pn2->getKind()) {
4040 case PNK_ARRAY:
4041 case PNK_OBJECT:
4042 if (!EmitDestructuringOps(cx, bce, pn2))
4043 return false;
4044 if (Emit1(cx, bce, JSOP_POP) < 0)
4045 return false;
4046 break;
4048 case PNK_NAME:
4049 /* Inline and specialize BindNameToSlot for pn2. */
4050 JS_ASSERT(!pn2->pn_cookie.isFree());
4051 if (!EmitVarOp(cx, pn2, JSOP_SETLOCAL, bce))
4052 return false;
4053 if (Emit1(cx, bce, JSOP_POP) < 0)
4054 return false;
4055 break;
4057 default:
4058 JS_ASSERT(0);
4061 // If there is a guard expression, emit it and arrange to jump to the next
4062 // catch block if the guard expression is false.
4063 if (pn->pn_kid2) {
4064 if (!EmitTree(cx, bce, pn->pn_kid2))
4065 return false;
4067 // If the guard expression is false, fall through, pop the block scope,
4068 // and jump to the next catch block. Otherwise jump over that code and
4069 // pop the dupped exception.
4070 ptrdiff_t guardCheck = EmitJump(cx, bce, JSOP_IFNE, 0);
4071 if (guardCheck < 0)
4072 return false;
4075 NonLocalExitScope nle(cx, bce);
4077 // Move exception back to cx->exception to prepare for
4078 // the next catch.
4079 if (Emit1(cx, bce, JSOP_THROWING) < 0)
4080 return false;
4082 // Leave the scope for this catch block.
4083 if (!nle.prepareForNonLocalJump(stmt))
4084 return false;
4086 // Jump to the next handler. The jump target is backpatched by EmitTry.
4087 ptrdiff_t guardJump = EmitJump(cx, bce, JSOP_GOTO, 0);
4088 if (guardJump < 0)
4089 return false;
4090 stmt->guardJump() = guardJump;
4093 // Back to normal control flow.
4094 SetJumpOffsetAt(bce, guardCheck);
4096 // Pop duplicated exception object as we no longer need it.
4097 if (Emit1(cx, bce, JSOP_POP) < 0)
4098 return false;
4101 /* Emit the catch body. */
4102 return EmitTree(cx, bce, pn->pn_kid3);
4105 // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the
4106 // comment on EmitSwitch.
4108 MOZ_NEVER_INLINE static bool
4109 EmitTry(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4111 StmtInfoBCE stmtInfo(cx);
4113 // Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4114 // for later fixup.
4116 // When a finally block is active (STMT_FINALLY in our parse context),
4117 // non-local jumps (including jumps-over-catches) result in a GOSUB
4118 // being written into the bytecode stream and fixed-up later (c.f.
4119 // EmitBackPatchOp and BackPatch).
4121 PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset());
4123 // Since an exception can be thrown at any place inside the try block,
4124 // we need to restore the stack and the scope chain before we transfer
4125 // the control to the exception handler.
4127 // For that we store in a try note associated with the catch or
4128 // finally block the stack depth upon the try entry. The interpreter
4129 // uses this depth to properly unwind the stack and the scope chain.
4131 int depth = bce->stackDepth;
4133 // Record the try location, then emit the try block.
4134 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY);
4135 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0)
4136 return false;
4137 ptrdiff_t tryStart = bce->offset();
4138 if (!EmitTree(cx, bce, pn->pn_kid1))
4139 return false;
4140 JS_ASSERT(depth == bce->stackDepth);
4142 // GOSUB to finally, if present.
4143 if (pn->pn_kid3) {
4144 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
4145 return false;
4148 // Source note points to the jump at the end of the try block.
4149 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH))
4150 return false;
4152 // Emit jump over catch and/or finally.
4153 ptrdiff_t catchJump = -1;
4154 if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
4155 return false;
4157 ptrdiff_t tryEnd = bce->offset();
4159 // If this try has a catch block, emit it.
4160 if (ParseNode* pn2 = pn->pn_kid2) {
4161 // The emitted code for a catch block looks like:
4163 // [pushblockscope] only if any local aliased
4164 // exception
4165 // if there is a catchguard:
4166 // dup
4167 // setlocal 0; pop assign or possibly destructure exception
4168 // if there is a catchguard:
4169 // < catchguard code >
4170 // ifne POST
4171 // debugleaveblock
4172 // [popblockscope] only if any local aliased
4173 // throwing pop exception to cx->exception
4174 // goto <next catch block>
4175 // POST: pop
4176 // < catch block contents >
4177 // debugleaveblock
4178 // [popblockscope] only if any local aliased
4179 // goto <end of catch blocks> non-local; finally applies
4181 // If there's no catch block without a catchguard, the last <next catch
4182 // block> points to rethrow code. This code will [gosub] to the finally
4183 // code if appropriate, and is also used for the catch-all trynote for
4184 // capturing exceptions thrown from catch{} blocks.
4186 for (ParseNode* pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
4187 JS_ASSERT(bce->stackDepth == depth);
4189 // Emit the lexical scope and catch body.
4190 JS_ASSERT(pn3->isKind(PNK_LEXICALSCOPE));
4191 if (!EmitTree(cx, bce, pn3))
4192 return false;
4194 // gosub <finally>, if required.
4195 if (pn->pn_kid3) {
4196 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
4197 return false;
4198 JS_ASSERT(bce->stackDepth == depth);
4201 // Jump over the remaining catch blocks. This will get fixed
4202 // up to jump to after catch/finally.
4203 if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
4204 return false;
4206 // If this catch block had a guard clause, patch the guard jump to
4207 // come here.
4208 if (stmtInfo.guardJump() != -1) {
4209 SetJumpOffsetAt(bce, stmtInfo.guardJump());
4210 stmtInfo.guardJump() = -1;
4212 // If this catch block is the last one, rethrow, delegating
4213 // execution of any finally block to the exception handler.
4214 if (!pn3->pn_next) {
4215 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
4216 return false;
4217 if (Emit1(cx, bce, JSOP_THROW) < 0)
4218 return false;
4224 JS_ASSERT(bce->stackDepth == depth);
4226 // Emit the finally handler, if there is one.
4227 ptrdiff_t finallyStart = 0;
4228 if (pn->pn_kid3) {
4229 // Fix up the gosubs that might have been emitted before non-local
4230 // jumps to the finally code.
4231 if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB))
4232 return false;
4234 finallyStart = bce->offset();
4236 // Indicate that we're emitting a subroutine body.
4237 stmtInfo.type = STMT_SUBROUTINE;
4238 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin))
4239 return false;
4240 if (Emit1(cx, bce, JSOP_FINALLY) < 0 ||
4241 !EmitTree(cx, bce, pn->pn_kid3) ||
4242 Emit1(cx, bce, JSOP_RETSUB) < 0)
4244 return false;
4246 JS_ASSERT(bce->stackDepth == depth);
4248 if (!PopStatementBCE(cx, bce))
4249 return false;
4251 // ReconstructPCStack needs a NOP here to mark the end of the last catch block.
4252 if (Emit1(cx, bce, JSOP_NOP) < 0)
4253 return false;
4255 // Fix up the end-of-try/catch jumps to come here.
4256 if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO))
4257 return false;
4259 // Add the try note last, to let post-order give us the right ordering
4260 // (first to last for a given nesting level, inner to outer by level).
4261 if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd))
4262 return false;
4264 // If we've got a finally, mark try+catch region with additional
4265 // trynote to catch exceptions (re)thrown from a catch block or
4266 // for the try{}finally{} case.
4267 if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart))
4268 return false;
4270 return true;
4273 static bool
4274 EmitIf(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4276 StmtInfoBCE stmtInfo(cx);
4278 /* Initialize so we can detect else-if chains and avoid recursion. */
4279 stmtInfo.type = STMT_IF;
4280 ptrdiff_t beq = -1;
4281 ptrdiff_t jmp = -1;
4282 ptrdiff_t noteIndex = -1;
4284 if_again:
4285 /* Emit code for the condition before pushing stmtInfo. */
4286 if (!EmitTree(cx, bce, pn->pn_kid1))
4287 return false;
4288 ptrdiff_t top = bce->offset();
4289 if (stmtInfo.type == STMT_IF) {
4290 PushStatementBCE(bce, &stmtInfo, STMT_IF, top);
4291 } else {
4293 * We came here from the goto further below that detects else-if
4294 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4295 * Also we need a note offset for SRC_IF_ELSE to help IonMonkey.
4297 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4298 stmtInfo.type = STMT_IF;
4299 stmtInfo.update = top;
4300 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
4301 return false;
4304 /* Emit an annotated branch-if-false around the then part. */
4305 ParseNode* pn3 = pn->pn_kid3;
4306 noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF);
4307 if (noteIndex < 0)
4308 return false;
4309 beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
4310 if (beq < 0)
4311 return false;
4313 /* Emit code for the then and optional else parts. */
4314 if (!EmitTree(cx, bce, pn->pn_kid2))
4315 return false;
4316 if (pn3) {
4317 /* Modify stmtInfo so we know we're in the else part. */
4318 stmtInfo.type = STMT_ELSE;
4321 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4322 * around the else part. The PopStatementBCE call at the bottom of
4323 * this function will fix up the backpatch chain linked from
4324 * stmtInfo.breaks.
4326 jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks);
4327 if (jmp < 0)
4328 return false;
4330 /* Ensure the branch-if-false comes here, then emit the else. */
4331 SetJumpOffsetAt(bce, beq);
4332 if (pn3->isKind(PNK_IF)) {
4333 pn = pn3;
4334 goto if_again;
4337 if (!EmitTree(cx, bce, pn3))
4338 return false;
4341 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4342 * IonMonkey's benefit. We can't just "back up" from the pc
4343 * of the else clause, because we don't know whether an extended
4344 * jump was required to leap from the end of the then clause over
4345 * the else clause.
4347 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
4348 return false;
4349 } else {
4350 /* No else part, fixup the branch-if-false to come here. */
4351 SetJumpOffsetAt(bce, beq);
4353 return PopStatementBCE(cx, bce);
4357 * pnLet represents one of:
4359 * let-expression: (let (x = y) EXPR)
4360 * let-statement: let (x = y) { ... }
4362 * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces:
4364 * bytecode stackDepth srcnotes
4365 * evaluate a +1
4366 * evaluate b +1
4367 * dup +1
4368 * destructure y
4369 * pick 1
4370 * dup +1
4371 * destructure z
4372 * pick 1
4373 * pop -1
4374 * setlocal 2 -1
4375 * setlocal 1 -1
4376 * setlocal 0 -1
4377 * pushblockscope (if needed)
4378 * evaluate e +1
4379 * debugleaveblock
4380 * popblockscope (if needed)
4382 * Note that, since pushblockscope simply changes fp->scopeChain and does not
4383 * otherwise touch the stack, evaluation of the let-var initializers must leave
4384 * the initial value in the let-var's future slot.
4387 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4388 * the comment on EmitSwitch.
4390 MOZ_NEVER_INLINE static bool
4391 EmitLet(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pnLet)
4393 JS_ASSERT(pnLet->isArity(PN_BINARY));
4394 ParseNode* varList = pnLet->pn_left;
4395 JS_ASSERT(varList->isArity(PN_LIST));
4396 ParseNode* letBody = pnLet->pn_right;
4397 JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE));
4399 int letHeadDepth = bce->stackDepth;
4401 if (!EmitVariables(cx, bce, varList, PushInitialValues, true))
4402 return false;
4404 /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
4405 uint32_t alreadyPushed = bce->stackDepth - letHeadDepth;
4406 StmtInfoBCE stmtInfo(cx);
4407 if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, alreadyPushed))
4408 return false;
4410 if (!EmitTree(cx, bce, letBody->pn_expr))
4411 return false;
4413 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4414 return false;
4416 return true;
4420 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4421 * the comment on EmitSwitch.
4423 MOZ_NEVER_INLINE static bool
4424 EmitLexicalScope(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4426 JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE));
4428 StmtInfoBCE stmtInfo(cx);
4429 if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, 0))
4430 return false;
4432 if (!EmitTree(cx, bce, pn->pn_expr))
4433 return false;
4435 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4436 return false;
4438 return true;
4441 static bool
4442 EmitWith(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4444 StmtInfoBCE stmtInfo(cx);
4445 if (!EmitTree(cx, bce, pn->pn_left))
4446 return false;
4447 if (!EnterNestedScope(cx, bce, &stmtInfo, pn->pn_binary_obj, STMT_WITH))
4448 return false;
4449 if (!EmitTree(cx, bce, pn->pn_right))
4450 return false;
4451 if (!LeaveNestedScope(cx, bce, &stmtInfo))
4452 return false;
4453 return true;
4457 * EmitIterator expects the iterable to already be on the stack.
4458 * It will replace that stack value with the corresponding iterator
4460 static bool
4461 EmitIterator(ExclusiveContext* cx, BytecodeEmitter* bce)
4463 // Convert iterable to iterator.
4464 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
4465 return false;
4466 if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // OBJ @@ITERATOR
4467 return false;
4468 if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR OBJ
4469 return false;
4470 if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER
4471 return false;
4472 CheckTypeSet(cx, bce, JSOP_CALL);
4473 return true;
4477 * If type is STMT_FOR_OF_LOOP, it emits bytecode for for-of loop.
4478 * pn should be PNK_FOR, and pn->pn_left should be PNK_FOROF.
4480 * If type is STMT_SPREAD, it emits bytecode for spread operator.
4481 * pn should be nullptr.
4482 * Please refer the comment above EmitSpread for additional information about
4483 * stack convention.
4485 static bool
4486 EmitForOf(ExclusiveContext* cx, BytecodeEmitter* bce, StmtType type, ParseNode* pn, ptrdiff_t top)
4488 JS_ASSERT(type == STMT_FOR_OF_LOOP || type == STMT_SPREAD);
4489 JS_ASSERT_IF(type == STMT_FOR_OF_LOOP, pn && pn->pn_left->isKind(PNK_FOROF));
4490 JS_ASSERT_IF(type == STMT_SPREAD, !pn);
4492 ParseNode* forHead = pn ? pn->pn_left : nullptr;
4493 ParseNode* forBody = pn ? pn->pn_right : nullptr;
4495 ParseNode* pn1 = forHead ? forHead->pn_kid1 : nullptr;
4496 bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE);
4497 JS_ASSERT_IF(letDecl, pn1->isLet());
4499 // If the left part is 'var x', emit code to define x if necessary using a
4500 // prolog opcode, but do not emit a pop.
4501 if (pn1) {
4502 ParseNode* decl = letDecl ? pn1->pn_expr : pn1;
4503 JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET));
4504 bce->emittingForInit = true;
4505 if (!EmitVariables(cx, bce, decl, DefineVars))
4506 return false;
4507 bce->emittingForInit = false;
4510 if (type == STMT_FOR_OF_LOOP) {
4511 // For-of loops run with two values on the stack: the iterator and the
4512 // current result object.
4514 // Compile the object expression to the right of 'of'.
4515 if (!EmitTree(cx, bce, forHead->pn_kid3))
4516 return false;
4517 if (!EmitIterator(cx, bce))
4518 return false;
4520 // Push a dummy result so that we properly enter iteration midstream.
4521 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RESULT
4522 return false;
4525 // Enter the block before the loop body, after evaluating the obj.
4526 StmtInfoBCE letStmt(cx);
4527 if (letDecl) {
4528 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0))
4529 return false;
4532 LoopStmtInfo stmtInfo(cx);
4533 PushLoopStatement(bce, &stmtInfo, type, top);
4535 // Jump down to the loop condition to minimize overhead assuming at least
4536 // one iteration, as the other loop forms do. Annotate so IonMonkey can
4537 // find the loop-closing jump.
4538 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_OF);
4539 if (noteIndex < 0)
4540 return false;
4541 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
4542 if (jmp < 0)
4543 return false;
4545 top = bce->offset();
4546 SET_STATEMENT_TOP(&stmtInfo, top);
4547 if (EmitLoopHead(cx, bce, nullptr) < 0)
4548 return false;
4550 if (type == STMT_SPREAD)
4551 bce->stackDepth++;
4553 #ifdef DEBUG
4554 int loopDepth = bce->stackDepth;
4555 #endif
4557 // Emit code to assign result.value to the iteration variable.
4558 if (type == STMT_FOR_OF_LOOP) {
4559 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT
4560 return false;
4562 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ... RESULT VALUE
4563 return false;
4564 if (type == STMT_FOR_OF_LOOP) {
4565 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) // ITER RESULT VALUE
4566 return false;
4567 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER RESULT
4568 return false;
4570 // The stack should be balanced around the assignment opcode sequence.
4571 JS_ASSERT(bce->stackDepth == loopDepth);
4573 // Emit code for the loop body.
4574 if (!EmitTree(cx, bce, forBody))
4575 return false;
4577 // Set loop and enclosing "update" offsets, for continue.
4578 StmtInfoBCE* stmt = &stmtInfo;
4579 do {
4580 stmt->update = bce->offset();
4581 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
4582 } else {
4583 if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0) // ITER ARR (I+1)
4584 return false;
4586 JS_ASSERT(bce->stackDepth == loopDepth - 1);
4588 // STMT_SPREAD never contain continue, so do not set "update" offset.
4591 // COME FROM the beginning of the loop to here.
4592 SetJumpOffsetAt(bce, jmp);
4593 if (!EmitLoopEntry(cx, bce, nullptr))
4594 return false;
4596 if (type == STMT_FOR_OF_LOOP) {
4597 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER
4598 return false;
4599 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER
4600 return false;
4601 } else {
4602 if (!EmitDupAt(cx, bce, bce->stackDepth - 1 - 2)) // ITER ARR I ITER
4603 return false;
4605 if (!EmitIteratorNext(cx, bce, forHead)) // ... RESULT
4606 return false;
4607 if (Emit1(cx, bce, JSOP_DUP) < 0) // ... RESULT RESULT
4608 return false;
4609 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ... RESULT DONE?
4610 return false;
4612 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset()); // ... RESULT
4613 if (beq < 0)
4614 return false;
4616 JS_ASSERT(bce->stackDepth == loopDepth);
4618 // Let Ion know where the closing jump of this loop is.
4619 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp))
4620 return false;
4622 // Fixup breaks and continues.
4623 // For STMT_SPREAD, just pop pc->topStmt.
4624 if (!PopStatementBCE(cx, bce))
4625 return false;
4627 if (letDecl) {
4628 if (!LeaveNestedScope(cx, bce, &letStmt))
4629 return false;
4632 if (type == STMT_SPREAD) {
4633 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ARR I RESULT ITER
4634 return false;
4637 // Pop the result and the iter.
4638 EMIT_UINT16_IMM_OP(JSOP_POPN, 2);
4640 return true;
4643 static bool
4644 EmitForIn(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
4646 ParseNode* forHead = pn->pn_left;
4647 ParseNode* forBody = pn->pn_right;
4649 ParseNode* pn1 = forHead->pn_kid1;
4650 bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE);
4651 JS_ASSERT_IF(letDecl, pn1->isLet());
4654 * If the left part is 'var x', emit code to define x if necessary
4655 * using a prolog opcode, but do not emit a pop. If the left part was
4656 * originally 'var x = i', the parser will have rewritten it; see
4657 * Parser::forStatement. 'for (let x = i in o)' is mercifully banned.
4659 if (pn1) {
4660 ParseNode* decl = letDecl ? pn1->pn_expr : pn1;
4661 JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET));
4662 bce->emittingForInit = true;
4663 if (!EmitVariables(cx, bce, decl, DefineVars))
4664 return false;
4665 bce->emittingForInit = false;
4668 /* Compile the object expression to the right of 'in'. */
4669 if (!EmitTree(cx, bce, forHead->pn_kid3))
4670 return false;
4673 * Emit a bytecode to convert top of stack value to the iterator
4674 * object depending on the loop variant (for-in, for-each-in, or
4675 * destructuring for-in).
4677 JS_ASSERT(pn->isOp(JSOP_ITER));
4678 if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0)
4679 return false;
4681 /* Enter the block before the loop body, after evaluating the obj. */
4682 StmtInfoBCE letStmt(cx);
4683 if (letDecl) {
4684 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0))
4685 return false;
4688 LoopStmtInfo stmtInfo(cx);
4689 PushLoopStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top);
4691 /* Annotate so IonMonkey can find the loop-closing jump. */
4692 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN);
4693 if (noteIndex < 0)
4694 return false;
4697 * Jump down to the loop condition to minimize overhead assuming at
4698 * least one iteration, as the other loop forms do.
4700 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
4701 if (jmp < 0)
4702 return false;
4704 top = bce->offset();
4705 SET_STATEMENT_TOP(&stmtInfo, top);
4706 if (EmitLoopHead(cx, bce, nullptr) < 0)
4707 return false;
4709 #ifdef DEBUG
4710 int loopDepth = bce->stackDepth;
4711 #endif
4714 * Emit code to get the next enumeration value and assign it to the
4715 * left hand side.
4717 if (Emit1(cx, bce, JSOP_ITERNEXT) < 0)
4718 return false;
4719 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr))
4720 return false;
4722 if (Emit1(cx, bce, JSOP_POP) < 0)
4723 return false;
4725 /* The stack should be balanced around the assignment opcode sequence. */
4726 JS_ASSERT(bce->stackDepth == loopDepth);
4728 /* Emit code for the loop body. */
4729 if (!EmitTree(cx, bce, forBody))
4730 return false;
4732 /* Set loop and enclosing "update" offsets, for continue. */
4733 StmtInfoBCE* stmt = &stmtInfo;
4734 do {
4735 stmt->update = bce->offset();
4736 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
4739 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4741 SetJumpOffsetAt(bce, jmp);
4742 if (!EmitLoopEntry(cx, bce, nullptr))
4743 return false;
4744 if (Emit1(cx, bce, JSOP_MOREITER) < 0)
4745 return false;
4746 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
4747 if (beq < 0)
4748 return false;
4750 /* Set the srcnote offset so we can find the closing jump. */
4751 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp))
4752 return false;
4754 // Fix up breaks and continues.
4755 if (!PopStatementBCE(cx, bce))
4756 return false;
4758 if (!bce->tryNoteList.append(JSTRY_ITER, bce->stackDepth, top, bce->offset()))
4759 return false;
4760 if (Emit1(cx, bce, JSOP_ENDITER) < 0)
4761 return false;
4763 if (letDecl) {
4764 if (!LeaveNestedScope(cx, bce, &letStmt))
4765 return false;
4768 return true;
4771 static bool
4772 EmitNormalFor(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
4774 LoopStmtInfo stmtInfo(cx);
4775 PushLoopStatement(bce, &stmtInfo, STMT_FOR_LOOP, top);
4777 ParseNode* forHead = pn->pn_left;
4778 ParseNode* forBody = pn->pn_right;
4780 /* C-style for (init; cond; update) ... loop. */
4781 JSOp op = JSOP_POP;
4782 ParseNode* pn3 = forHead->pn_kid1;
4783 if (!pn3) {
4784 // No initializer, but emit a nop so that there's somewhere to put the
4785 // SRC_FOR annotation that IonBuilder will look for.
4786 op = JSOP_NOP;
4787 } else {
4788 bce->emittingForInit = true;
4789 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
4790 return false;
4791 if (!EmitTree(cx, bce, pn3))
4792 return false;
4793 bce->emittingForInit = false;
4797 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4798 * Use tmp to hold the biased srcnote "top" offset, which differs
4799 * from the top local variable by the length of the JSOP_GOTO
4800 * emitted in between tmp and top if this loop has a condition.
4802 int noteIndex = NewSrcNote(cx, bce, SRC_FOR);
4803 if (noteIndex < 0 || Emit1(cx, bce, op) < 0)
4804 return false;
4805 ptrdiff_t tmp = bce->offset();
4807 ptrdiff_t jmp = -1;
4808 if (forHead->pn_kid2) {
4809 /* Goto the loop condition, which branches back to iterate. */
4810 jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
4811 if (jmp < 0)
4812 return false;
4813 } else {
4814 if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0)
4815 return false;
4818 top = bce->offset();
4819 SET_STATEMENT_TOP(&stmtInfo, top);
4821 /* Emit code for the loop body. */
4822 if (EmitLoopHead(cx, bce, forBody) < 0)
4823 return false;
4824 if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody))
4825 return false;
4826 if (!EmitTree(cx, bce, forBody))
4827 return false;
4829 /* Set the second note offset so we can find the update part. */
4830 JS_ASSERT(noteIndex != -1);
4831 ptrdiff_t tmp2 = bce->offset();
4833 /* Set loop and enclosing "update" offsets, for continue. */
4834 StmtInfoBCE* stmt = &stmtInfo;
4835 do {
4836 stmt->update = bce->offset();
4837 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
4839 /* Check for update code to do before the condition (if any). */
4840 pn3 = forHead->pn_kid3;
4841 if (pn3) {
4842 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
4843 return false;
4844 op = JSOP_POP;
4845 if (!EmitTree(cx, bce, pn3))
4846 return false;
4848 /* Always emit the POP or NOP to help IonBuilder. */
4849 if (Emit1(cx, bce, op) < 0)
4850 return false;
4852 /* Restore the absolute line number for source note readers. */
4853 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end);
4854 if (bce->currentLine() != lineNum) {
4855 if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0)
4856 return false;
4857 bce->current->currentLine = lineNum;
4858 bce->current->lastColumn = 0;
4862 ptrdiff_t tmp3 = bce->offset();
4864 if (forHead->pn_kid2) {
4865 /* Fix up the goto from top to target the loop condition. */
4866 JS_ASSERT(jmp >= 0);
4867 SetJumpOffsetAt(bce, jmp);
4868 if (!EmitLoopEntry(cx, bce, forHead->pn_kid2))
4869 return false;
4871 if (!EmitTree(cx, bce, forHead->pn_kid2))
4872 return false;
4875 /* Set the first note offset so we can find the loop condition. */
4876 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp))
4877 return false;
4878 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp))
4879 return false;
4880 /* The third note offset helps us find the loop-closing jump. */
4881 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp))
4882 return false;
4884 /* If no loop condition, just emit a loop-closing jump. */
4885 op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO;
4886 if (EmitJump(cx, bce, op, top - bce->offset()) < 0)
4887 return false;
4889 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
4890 return false;
4892 /* Now fixup all breaks and continues. */
4893 return PopStatementBCE(cx, bce);
4896 static inline bool
4897 EmitFor(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
4899 if (pn->pn_left->isKind(PNK_FORIN))
4900 return EmitForIn(cx, bce, pn, top);
4902 if (pn->pn_left->isKind(PNK_FOROF))
4903 return EmitForOf(cx, bce, STMT_FOR_OF_LOOP, pn, top);
4905 JS_ASSERT(pn->pn_left->isKind(PNK_FORHEAD));
4906 return EmitNormalFor(cx, bce, pn, top);
4909 static MOZ_NEVER_INLINE bool
4910 EmitFunc(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
4912 FunctionBox* funbox = pn->pn_funbox;
4913 RootedFunction fun(cx, funbox->function());
4914 JS_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript());
4917 * Set the EMITTEDFUNCTION flag in function definitions once they have been
4918 * emitted. Function definitions that need hoisting to the top of the
4919 * function will be seen by EmitFunc in two places.
4921 if (pn->pn_dflags & PND_EMITTEDFUNCTION) {
4922 JS_ASSERT_IF(fun->hasScript(), fun->nonLazyScript());
4923 JS_ASSERT(pn->functionIsHoisted());
4924 JS_ASSERT(bce->sc->isFunctionBox());
4925 return true;
4928 pn->pn_dflags |= PND_EMITTEDFUNCTION;
4931 * Mark as singletons any function which will only be executed once, or
4932 * which is inner to a lambda we only expect to run once. In the latter
4933 * case, if the lambda runs multiple times then CloneFunctionObject will
4934 * make a deep clone of its contents.
4936 if (fun->isInterpreted()) {
4937 bool singleton =
4938 bce->script->compileAndGo() &&
4939 fun->isInterpreted() &&
4940 (bce->checkSingletonContext() ||
4941 (!bce->isInLoop() && bce->isRunOnceLambda()));
4942 if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton))
4943 return false;
4945 if (fun->isInterpretedLazy()) {
4946 if (!fun->lazyScript()->sourceObject()) {
4947 JSObject* scope = bce->staticScope;
4948 if (!scope && bce->sc->isFunctionBox())
4949 scope = bce->sc->asFunctionBox()->function();
4950 JSObject* source = bce->script->sourceObject();
4951 fun->lazyScript()->setParent(scope, &source->as<ScriptSourceObject>());
4953 if (bce->emittingRunOnceLambda)
4954 fun->lazyScript()->setTreatAsRunOnce();
4955 } else {
4956 SharedContext* outersc = bce->sc;
4958 if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals())
4959 funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent
4960 JS_ASSERT_IF(outersc->strict, funbox->strict);
4962 // Inherit most things (principals, version, etc) from the parent.
4963 Rooted<JSScript*> parent(cx, bce->script);
4964 CompileOptions options(cx, bce->parser->options());
4965 options.setOriginPrincipals(parent->originPrincipals())
4966 .setCompileAndGo(parent->compileAndGo())
4967 .setSelfHostingMode(parent->selfHosted())
4968 .setNoScriptRval(false)
4969 .setForEval(false)
4970 .setVersion(parent->getVersion());
4972 Rooted<JSObject*> enclosingScope(cx, EnclosingStaticScope(bce));
4973 Rooted<JSObject*> sourceObject(cx, bce->script->sourceObject());
4974 Rooted<JSScript*> script(cx, JSScript::Create(cx, enclosingScope, false, options,
4975 parent->staticLevel() + 1,
4976 sourceObject,
4977 funbox->bufStart, funbox->bufEnd));
4978 if (!script)
4979 return false;
4981 script->bindings = funbox->bindings;
4983 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
4984 BytecodeEmitter bce2(bce, bce->parser, funbox, script, bce->insideEval,
4985 bce->evalCaller, bce->hasGlobalScope, lineNum,
4986 bce->emitterMode);
4987 if (!bce2.init())
4988 return false;
4990 /* We measured the max scope depth when we parsed the function. */
4991 if (!EmitFunctionScript(cx, &bce2, pn->pn_body))
4992 return false;
4994 if (funbox->usesArguments && funbox->usesApply)
4995 script->setUsesArgumentsAndApply();
4997 } else {
4998 JS_ASSERT(IsAsmJSModuleNative(fun->native()));
5001 /* Make the function object a literal in the outer script's pool. */
5002 unsigned index = bce->objectList.add(pn->pn_funbox);
5004 /* Non-hoisted functions simply emit their respective op. */
5005 if (!pn->functionIsHoisted()) {
5006 /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */
5007 MOZ_ASSERT(fun->isArrow() == (pn->getOp() == JSOP_LAMBDA_ARROW));
5008 if (fun->isArrow() && Emit1(cx, bce, JSOP_THIS) < 0)
5009 return false;
5010 return EmitIndex32(cx, pn->getOp(), index, bce);
5014 * For a script we emit the code as we parse. Thus the bytecode for
5015 * top-level functions should go in the prolog to predefine their
5016 * names in the variable object before the already-generated main code
5017 * is executed. This extra work for top-level scripts is not necessary
5018 * when we emit the code for a function. It is fully parsed prior to
5019 * invocation of the emitter and calls to EmitTree for function
5020 * definitions can be scheduled before generating the rest of code.
5022 if (!bce->sc->isFunctionBox()) {
5023 JS_ASSERT(pn->pn_cookie.isFree());
5024 JS_ASSERT(pn->getOp() == JSOP_NOP);
5025 JS_ASSERT(!bce->topStmt);
5026 bce->switchToProlog();
5027 if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce))
5028 return false;
5029 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5030 return false;
5031 bce->switchToMain();
5032 } else {
5033 #ifdef DEBUG
5034 BindingIter bi(bce->script);
5035 while (bi->name() != fun->atom())
5036 bi++;
5037 JS_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT ||
5038 bi->kind() == Binding::ARGUMENT);
5039 JS_ASSERT(bi.frameIndex() < JS_BIT(20));
5040 #endif
5041 pn->pn_index = index;
5042 if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce))
5043 return false;
5044 JS_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG);
5045 JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG;
5046 if (!EmitVarOp(cx, pn, setOp, bce))
5047 return false;
5048 if (Emit1(cx, bce, JSOP_POP) < 0)
5049 return false;
5052 return true;
5055 static bool
5056 EmitDo(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5058 /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */
5059 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
5060 if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
5061 return false;
5063 ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE);
5064 if (noteIndex2 < 0)
5065 return false;
5067 /* Compile the loop body. */
5068 ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left);
5069 if (top < 0)
5070 return false;
5072 LoopStmtInfo stmtInfo(cx);
5073 PushLoopStatement(bce, &stmtInfo, STMT_DO_LOOP, top);
5075 if (!EmitLoopEntry(cx, bce, nullptr))
5076 return false;
5078 if (!EmitTree(cx, bce, pn->pn_left))
5079 return false;
5081 /* Set loop and enclosing label update offsets, for continue. */
5082 ptrdiff_t off = bce->offset();
5083 StmtInfoBCE* stmt = &stmtInfo;
5084 do {
5085 stmt->update = off;
5086 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL);
5088 /* Compile the loop condition, now that continues know where to go. */
5089 if (!EmitTree(cx, bce, pn->pn_right))
5090 return false;
5092 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
5093 if (beq < 0)
5094 return false;
5096 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
5097 return false;
5100 * Update the annotations with the update and back edge positions, for
5101 * IonBuilder.
5103 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5104 * note gets bigger.
5106 if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top))
5107 return false;
5108 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top)))
5109 return false;
5111 return PopStatementBCE(cx, bce);
5114 static bool
5115 EmitWhile(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5118 * Minimize bytecodes issued for one or more iterations by jumping to
5119 * the condition below the body and closing the loop if the condition
5120 * is true with a backward branch. For iteration count i:
5122 * i test at the top test at the bottom
5123 * = =============== ==================
5124 * 0 ifeq-pass goto; ifne-fail
5125 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
5126 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
5127 * . . .
5128 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
5130 LoopStmtInfo stmtInfo(cx);
5131 PushLoopStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top);
5133 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
5134 if (noteIndex < 0)
5135 return false;
5137 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
5138 if (jmp < 0)
5139 return false;
5141 top = EmitLoopHead(cx, bce, pn->pn_right);
5142 if (top < 0)
5143 return false;
5145 if (!EmitTree(cx, bce, pn->pn_right))
5146 return false;
5148 SetJumpOffsetAt(bce, jmp);
5149 if (!EmitLoopEntry(cx, bce, pn->pn_left))
5150 return false;
5151 if (!EmitTree(cx, bce, pn->pn_left))
5152 return false;
5154 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
5155 if (beq < 0)
5156 return false;
5158 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
5159 return false;
5161 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp))
5162 return false;
5164 return PopStatementBCE(cx, bce);
5167 static bool
5168 EmitBreak(ExclusiveContext* cx, BytecodeEmitter* bce, PropertyName* label)
5170 StmtInfoBCE* stmt = bce->topStmt;
5171 SrcNoteType noteType;
5172 if (label) {
5173 while (stmt->type != STMT_LABEL || stmt->label != label)
5174 stmt = stmt->down;
5175 noteType = SRC_BREAK2LABEL;
5176 } else {
5177 while (!stmt->isLoop() && stmt->type != STMT_SWITCH)
5178 stmt = stmt->down;
5179 noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK;
5182 return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0;
5185 static bool
5186 EmitContinue(ExclusiveContext* cx, BytecodeEmitter* bce, PropertyName* label)
5188 StmtInfoBCE* stmt = bce->topStmt;
5189 if (label) {
5190 /* Find the loop statement enclosed by the matching label. */
5191 StmtInfoBCE* loop = nullptr;
5192 while (stmt->type != STMT_LABEL || stmt->label != label) {
5193 if (stmt->isLoop())
5194 loop = stmt;
5195 stmt = stmt->down;
5197 stmt = loop;
5198 } else {
5199 while (!stmt->isLoop())
5200 stmt = stmt->down;
5203 return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0;
5206 static bool
5207 EmitReturn(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5209 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5210 return false;
5212 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) {
5213 if (!EmitPrepareIteratorResult(cx, bce))
5214 return false;
5217 /* Push a return value */
5218 if (ParseNode* pn2 = pn->pn_kid) {
5219 if (!EmitTree(cx, bce, pn2))
5220 return false;
5221 } else {
5222 /* No explicit return value provided */
5223 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
5224 return false;
5227 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) {
5228 if (!EmitFinishIteratorResult(cx, bce, true))
5229 return false;
5233 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5234 * blocks having finally clauses and to exit intermingled let blocks.
5235 * We can't simply transfer control flow to our caller in that case,
5236 * because we must gosub to those finally clauses from inner to outer,
5237 * with the correct stack pointer (i.e., after popping any with,
5238 * for/in, etc., slots nested inside the finally's try).
5240 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5241 * extra JSOP_RETRVAL after the fixups.
5243 ptrdiff_t top = bce->offset();
5245 if (Emit1(cx, bce, JSOP_RETURN) < 0)
5246 return false;
5248 NonLocalExitScope nle(cx, bce);
5250 if (!nle.prepareForNonLocalJump(nullptr))
5251 return false;
5253 if (top + static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH) != bce->offset()) {
5254 bce->code()[top] = JSOP_SETRVAL;
5255 if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
5256 return false;
5259 return true;
5262 static bool
5263 EmitYieldStar(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* iter)
5265 JS_ASSERT(bce->sc->isFunctionBox());
5266 JS_ASSERT(bce->sc->asFunctionBox()->isStarGenerator());
5268 if (!EmitTree(cx, bce, iter)) // ITERABLE
5269 return false;
5271 // Convert iterable to iterator.
5272 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITERABLE ITERABLE
5273 return false;
5274 if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // ITERABLE @@ITERATOR
5275 return false;
5276 if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR ITERABLE
5277 return false;
5278 if (EmitCall(cx, bce, JSOP_CALL, 0, iter) < 0) // ITER
5279 return false;
5280 CheckTypeSet(cx, bce, JSOP_CALL);
5282 int depth = bce->stackDepth;
5283 JS_ASSERT(depth >= 1);
5285 // Initial send value is undefined.
5286 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RECEIVED
5287 return false;
5288 ptrdiff_t initialSend = -1;
5289 if (EmitBackPatchOp(cx, bce, &initialSend) < 0) // goto initialSend
5290 return false;
5292 // Try prologue. // ITER RESULT
5293 StmtInfoBCE stmtInfo(cx);
5294 PushStatementBCE(bce, &stmtInfo, STMT_TRY, bce->offset());
5295 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY);
5296 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0)
5297 return false;
5298 ptrdiff_t tryStart = bce->offset(); // tryStart:
5299 JS_ASSERT(bce->stackDepth == depth + 1);
5301 // Yield RESULT as-is, without re-boxing.
5302 if (Emit1(cx, bce, JSOP_YIELD) < 0) // ITER RECEIVED
5303 return false;
5305 // Try epilogue.
5306 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH))
5307 return false;
5308 ptrdiff_t subsequentSend = -1;
5309 if (EmitBackPatchOp(cx, bce, &subsequentSend) < 0) // goto subsequentSend
5310 return false;
5311 ptrdiff_t tryEnd = bce->offset(); // tryEnd:
5313 // Catch location.
5314 // THROW? = 'throw' in ITER // ITER
5315 bce->stackDepth = (uint32_t) depth;
5316 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) // ITER EXCEPTION
5317 return false;
5318 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER
5319 return false;
5320 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER
5321 return false;
5322 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_STRING, bce)) // EXCEPTION ITER ITER "throw"
5323 return false;
5324 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER "throw" ITER
5325 return false;
5326 if (Emit1(cx, bce, JSOP_IN) < 0) // EXCEPTION ITER THROW?
5327 return false;
5328 // if (THROW?) goto delegate
5329 ptrdiff_t checkThrow = EmitJump(cx, bce, JSOP_IFNE, 0); // EXCEPTION ITER
5330 if (checkThrow < 0)
5331 return false;
5332 if (Emit1(cx, bce, JSOP_POP) < 0) // EXCEPTION
5333 return false;
5334 if (Emit1(cx, bce, JSOP_THROW) < 0) // throw EXCEPTION
5335 return false;
5337 SetJumpOffsetAt(bce, checkThrow); // delegate:
5338 // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER
5339 bce->stackDepth = (uint32_t) depth + 1;
5340 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER
5341 return false;
5342 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER ITER
5343 return false;
5344 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_CALLPROP, bce)) // EXCEPTION ITER ITER THROW
5345 return false;
5346 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER THROW ITER
5347 return false;
5348 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER THROW ITER EXCEPTION
5349 return false;
5350 if (EmitCall(cx, bce, JSOP_CALL, 1, iter) < 0) // ITER RESULT
5351 return false;
5352 CheckTypeSet(cx, bce, JSOP_CALL);
5353 JS_ASSERT(bce->stackDepth == depth + 1);
5354 ptrdiff_t checkResult = -1;
5355 if (EmitBackPatchOp(cx, bce, &checkResult) < 0) // goto checkResult
5356 return false;
5358 // Catch epilogue.
5359 if (!PopStatementBCE(cx, bce))
5360 return false;
5361 // This is a peace offering to ReconstructPCStack. See the note in EmitTry.
5362 if (Emit1(cx, bce, JSOP_NOP) < 0)
5363 return false;
5364 if (!bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd))
5365 return false;
5367 // After the try/catch block: send the received value to the iterator.
5368 if (!BackPatch(cx, bce, initialSend, bce->code().end(), JSOP_GOTO)) // initialSend:
5369 return false;
5370 if (!BackPatch(cx, bce, subsequentSend, bce->code().end(), JSOP_GOTO)) // subsequentSend:
5371 return false;
5373 // Send location.
5374 // result = iter.next(received) // ITER RECEIVED
5375 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER
5376 return false;
5377 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER
5378 return false;
5379 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER ITER
5380 return false;
5381 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // RECEIVED ITER ITER NEXT
5382 return false;
5383 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER NEXT ITER
5384 return false;
5385 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER NEXT ITER RECEIVED
5386 return false;
5387 if (EmitCall(cx, bce, JSOP_CALL, 1, iter) < 0) // ITER RESULT
5388 return false;
5389 CheckTypeSet(cx, bce, JSOP_CALL);
5390 JS_ASSERT(bce->stackDepth == depth + 1);
5392 if (!BackPatch(cx, bce, checkResult, bce->code().end(), JSOP_GOTO)) // checkResult:
5393 return false;
5394 // if (!result.done) goto tryStart; // ITER RESULT
5395 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT
5396 return false;
5397 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE
5398 return false;
5399 // if (!DONE) goto tryStart;
5400 if (EmitJump(cx, bce, JSOP_IFEQ, tryStart - bce->offset()) < 0) // ITER RESULT
5401 return false;
5403 // result.value
5404 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RESULT ITER
5405 return false;
5406 if (Emit1(cx, bce, JSOP_POP) < 0) // RESULT
5407 return false;
5408 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // VALUE
5409 return false;
5411 JS_ASSERT(bce->stackDepth == depth);
5413 return true;
5416 static bool
5417 EmitStatementList(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5419 JS_ASSERT(pn->isArity(PN_LIST));
5421 StmtInfoBCE stmtInfo(cx);
5422 PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top);
5424 ParseNode* pnchild = pn->pn_head;
5426 if (pn->pn_xflags & PNX_DESTRUCT)
5427 pnchild = pnchild->pn_next;
5429 for (ParseNode* pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5430 if (!EmitTree(cx, bce, pn2))
5431 return false;
5434 return PopStatementBCE(cx, bce);
5437 static bool
5438 EmitStatement(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5440 JS_ASSERT(pn->isKind(PNK_SEMI));
5442 ParseNode* pn2 = pn->pn_kid;
5443 if (!pn2)
5444 return true;
5446 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
5447 return false;
5450 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5451 * debugger, and eval frames may need the value of the ultimate
5452 * expression statement as the script's result, despite the fact
5453 * that it appears useless to the compiler.
5455 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5456 * calling JS_Compile* to suppress JSOP_SETRVAL.
5458 bool wantval = false;
5459 bool useful = false;
5460 if (bce->sc->isFunctionBox()) {
5461 JS_ASSERT(!bce->script->noScriptRval());
5462 } else {
5463 useful = wantval = !bce->script->noScriptRval();
5466 /* Don't eliminate expressions with side effects. */
5467 if (!useful) {
5468 if (!CheckSideEffects(cx, bce, pn2, &useful))
5469 return false;
5472 * Don't eliminate apparently useless expressions if they are
5473 * labeled expression statements. The pc->topStmt->update test
5474 * catches the case where we are nesting in EmitTree for a labeled
5475 * compound statement.
5477 if (bce->topStmt &&
5478 bce->topStmt->type == STMT_LABEL &&
5479 bce->topStmt->update >= bce->offset())
5481 useful = true;
5485 if (useful) {
5486 JSOp op = wantval ? JSOP_SETRVAL : JSOP_POP;
5487 JS_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP));
5488 if (!EmitTree(cx, bce, pn2))
5489 return false;
5490 if (Emit1(cx, bce, op) < 0)
5491 return false;
5492 } else if (pn->isDirectivePrologueMember()) {
5493 // Don't complain about directive prologue members; just don't emit
5494 // their code.
5495 } else {
5496 if (JSAtom* atom = pn->isStringExprStatement()) {
5497 // Warn if encountering a non-directive prologue member string
5498 // expression statement, that is inconsistent with the current
5499 // directive prologue. That is, a script *not* starting with
5500 // "use strict" should warn for any "use strict" statements seen
5501 // later in the script, because such statements are misleading.
5502 const char* directive = nullptr;
5503 if (atom == cx->names().useStrict) {
5504 if (!bce->sc->strict)
5505 directive = js_useStrict_str;
5506 } else if (atom == cx->names().useAsm) {
5507 if (bce->sc->isFunctionBox()) {
5508 JSFunction* fun = bce->sc->asFunctionBox()->function();
5509 if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
5510 directive = js_useAsm_str;
5514 if (directive) {
5515 if (!bce->reportStrictWarning(pn2, JSMSG_CONTRARY_NONDIRECTIVE, directive))
5516 return false;
5518 } else {
5519 bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin);
5520 bce->current->lastColumn = 0;
5521 if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR))
5522 return false;
5526 return true;
5529 static bool
5530 EmitDelete(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5533 * Under ECMA 3, deleting a non-reference returns true -- but alas we
5534 * must evaluate the operand if it appears it might have side effects.
5536 ParseNode* pn2 = pn->pn_kid;
5537 switch (pn2->getKind()) {
5538 case PNK_NAME:
5540 if (!BindNameToSlot(cx, bce, pn2))
5541 return false;
5542 JSOp op = pn2->getOp();
5543 if (op == JSOP_FALSE) {
5544 if (Emit1(cx, bce, op) < 0)
5545 return false;
5546 } else {
5547 if (!EmitAtomOp(cx, pn2, op, bce))
5548 return false;
5550 break;
5552 case PNK_DOT:
5553 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce))
5554 return false;
5555 break;
5556 case PNK_ELEM:
5557 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce))
5558 return false;
5559 break;
5560 default:
5563 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
5564 * to foo(), true (a comma expression).
5566 bool useful = false;
5567 if (!CheckSideEffects(cx, bce, pn2, &useful))
5568 return false;
5570 if (useful) {
5571 JS_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL));
5572 if (!EmitTree(cx, bce, pn2))
5573 return false;
5574 if (Emit1(cx, bce, JSOP_POP) < 0)
5575 return false;
5578 if (Emit1(cx, bce, JSOP_TRUE) < 0)
5579 return false;
5583 return true;
5586 static bool
5587 EmitArray(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, uint32_t count);
5589 static bool
5590 EmitCallOrNew(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5592 bool callop = pn->isKind(PNK_CALL) || pn->isKind(PNK_TAGGED_TEMPLATE);
5594 * Emit callable invocation or operator new (constructor call) code.
5595 * First, emit code for the left operand to evaluate the callable or
5596 * constructable object expression.
5598 * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc.
5599 * This is necessary to interpose the lambda-initialized method read
5600 * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by
5601 * JSOP_{SET,INIT}PROP.
5603 * Then (or in a call case that has no explicit reference-base
5604 * object) we emit JSOP_UNDEFINED to produce the undefined |this|
5605 * value required for calls (which non-strict mode functions
5606 * will box into the global object).
5608 uint32_t argc = pn->pn_count - 1;
5610 if (argc >= ARGC_LIMIT) {
5611 bce->parser->tokenStream.reportError(callop
5612 ? JSMSG_TOO_MANY_FUN_ARGS
5613 : JSMSG_TOO_MANY_CON_ARGS);
5614 return false;
5617 bool emitArgs = true;
5618 ParseNode* pn2 = pn->pn_head;
5619 bool spread = JOF_OPTYPE(pn->getOp()) == JOF_BYTE;
5620 switch (pn2->getKind()) {
5621 case PNK_NAME:
5622 if (bce->emitterMode == BytecodeEmitter::SelfHosting &&
5623 pn2->name() == cx->names().callFunction &&
5624 !spread)
5627 * Special-casing of callFunction to emit bytecode that directly
5628 * invokes the callee with the correct |this| object and arguments.
5629 * callFunction(fun, thisArg, arg0, arg1) thus becomes:
5630 * - emit lookup for fun
5631 * - emit lookup for thisArg
5632 * - emit lookups for arg0, arg1
5634 * argc is set to the amount of actually emitted args and the
5635 * emitting of args below is disabled by setting emitArgs to false.
5637 if (pn->pn_count < 3) {
5638 bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s");
5639 return false;
5641 ParseNode* funNode = pn2->pn_next;
5642 if (!EmitTree(cx, bce, funNode))
5643 return false;
5644 ParseNode* thisArg = funNode->pn_next;
5645 if (!EmitTree(cx, bce, thisArg))
5646 return false;
5647 bool oldEmittingForInit = bce->emittingForInit;
5648 bce->emittingForInit = false;
5649 for (ParseNode* argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) {
5650 if (!EmitTree(cx, bce, argpn))
5651 return false;
5653 bce->emittingForInit = oldEmittingForInit;
5654 argc -= 2;
5655 emitArgs = false;
5656 break;
5658 if (!EmitNameOp(cx, bce, pn2, callop))
5659 return false;
5660 break;
5661 case PNK_DOT:
5662 if (!EmitPropOp(cx, pn2, callop ? JSOP_CALLPROP : JSOP_GETPROP, bce))
5663 return false;
5664 break;
5665 case PNK_ELEM:
5666 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce))
5667 return false;
5668 break;
5669 case PNK_FUNCTION:
5671 * Top level lambdas which are immediately invoked should be
5672 * treated as only running once. Every time they execute we will
5673 * create new types and scripts for their contents, to increase
5674 * the quality of type information within them and enable more
5675 * backend optimizations. Note that this does not depend on the
5676 * lambda being invoked at most once (it may be named or be
5677 * accessed via foo.caller indirection), as multiple executions
5678 * will just cause the inner scripts to be repeatedly cloned.
5680 JS_ASSERT(!bce->emittingRunOnceLambda);
5681 if (bce->checkSingletonContext() || (!bce->isInLoop() && bce->isRunOnceLambda())) {
5682 bce->emittingRunOnceLambda = true;
5683 if (!EmitTree(cx, bce, pn2))
5684 return false;
5685 bce->emittingRunOnceLambda = false;
5686 } else {
5687 if (!EmitTree(cx, bce, pn2))
5688 return false;
5690 callop = false;
5691 break;
5692 default:
5693 if (!EmitTree(cx, bce, pn2))
5694 return false;
5695 callop = false; /* trigger JSOP_UNDEFINED after */
5696 break;
5698 if (!callop) {
5699 JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED;
5700 if (Emit1(cx, bce, thisop) < 0)
5701 return false;
5704 if (emitArgs) {
5706 * Emit code for each argument in order, then emit the JSOP_*CALL or
5707 * JSOP_NEW bytecode with a two-byte immediate telling how many args
5708 * were pushed on the operand stack.
5710 bool oldEmittingForInit = bce->emittingForInit;
5711 bce->emittingForInit = false;
5712 if (!spread) {
5713 for (ParseNode* pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
5714 if (!EmitTree(cx, bce, pn3))
5715 return false;
5717 } else {
5718 if (!EmitArray(cx, bce, pn2->pn_next, argc))
5719 return false;
5721 bce->emittingForInit = oldEmittingForInit;
5724 if (!spread) {
5725 if (EmitCall(cx, bce, pn->getOp(), argc, pn) < 0)
5726 return false;
5727 } else {
5728 if (Emit1(cx, bce, pn->getOp()) < 0)
5729 return false;
5731 CheckTypeSet(cx, bce, pn->getOp());
5732 if (pn->isOp(JSOP_EVAL) || pn->isOp(JSOP_SPREADEVAL)) {
5733 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
5734 EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum);
5736 if (pn->pn_xflags & PNX_SETCALL) {
5737 if (Emit1(cx, bce, JSOP_SETCALL) < 0)
5738 return false;
5740 return true;
5743 static bool
5744 EmitLogical(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5747 * JSOP_OR converts the operand on the stack to boolean, leaves the original
5748 * value on the stack and jumps if true; otherwise it falls into the next
5749 * bytecode, which pops the left operand and then evaluates the right operand.
5750 * The jump goes around the right operand evaluation.
5752 * JSOP_AND converts the operand on the stack to boolean and jumps if false;
5753 * otherwise it falls into the right operand's bytecode.
5756 if (pn->isArity(PN_BINARY)) {
5757 if (!EmitTree(cx, bce, pn->pn_left))
5758 return false;
5759 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
5760 if (top < 0)
5761 return false;
5762 if (Emit1(cx, bce, JSOP_POP) < 0)
5763 return false;
5764 if (!EmitTree(cx, bce, pn->pn_right))
5765 return false;
5766 ptrdiff_t off = bce->offset();
5767 jsbytecode* pc = bce->code(top);
5768 SET_JUMP_OFFSET(pc, off - top);
5769 *pc = pn->getOp();
5770 return true;
5773 JS_ASSERT(pn->isArity(PN_LIST));
5774 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5776 /* Left-associative operator chain: avoid too much recursion. */
5777 ParseNode* pn2 = pn->pn_head;
5778 if (!EmitTree(cx, bce, pn2))
5779 return false;
5780 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
5781 if (top < 0)
5782 return false;
5783 if (Emit1(cx, bce, JSOP_POP) < 0)
5784 return false;
5786 /* Emit nodes between the head and the tail. */
5787 ptrdiff_t jmp = top;
5788 while ((pn2 = pn2->pn_next)->pn_next) {
5789 if (!EmitTree(cx, bce, pn2))
5790 return false;
5791 ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
5792 if (off < 0)
5793 return false;
5794 if (Emit1(cx, bce, JSOP_POP) < 0)
5795 return false;
5796 SET_JUMP_OFFSET(bce->code(jmp), off - jmp);
5797 jmp = off;
5799 if (!EmitTree(cx, bce, pn2))
5800 return false;
5802 pn2 = pn->pn_head;
5803 ptrdiff_t off = bce->offset();
5804 do {
5805 jsbytecode* pc = bce->code(top);
5806 ptrdiff_t tmp = GET_JUMP_OFFSET(pc);
5807 SET_JUMP_OFFSET(pc, off - top);
5808 *pc = pn->getOp();
5809 top += tmp;
5810 } while ((pn2 = pn2->pn_next)->pn_next);
5812 return true;
5816 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5817 * the comment on EmitSwitch.
5819 MOZ_NEVER_INLINE static bool
5820 EmitIncOrDec(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5822 /* Emit lvalue-specialized code for ++/-- operators. */
5823 ParseNode* pn2 = pn->pn_kid;
5824 switch (pn2->getKind()) {
5825 case PNK_DOT:
5826 if (!EmitPropIncDec(cx, pn, bce))
5827 return false;
5828 break;
5829 case PNK_ELEM:
5830 if (!EmitElemIncDec(cx, pn, bce))
5831 return false;
5832 break;
5833 case PNK_CALL:
5834 JS_ASSERT(pn2->pn_xflags & PNX_SETCALL);
5835 if (!EmitTree(cx, bce, pn2))
5836 return false;
5837 break;
5838 default:
5839 JS_ASSERT(pn2->isKind(PNK_NAME));
5840 pn2->setOp(JSOP_SETNAME);
5841 if (!BindNameToSlot(cx, bce, pn2))
5842 return false;
5843 JSOp op = pn2->getOp();
5844 bool maySet;
5845 switch (op) {
5846 case JSOP_SETLOCAL:
5847 case JSOP_SETARG:
5848 case JSOP_SETALIASEDVAR:
5849 case JSOP_SETNAME:
5850 case JSOP_SETGNAME:
5851 maySet = true;
5852 break;
5853 default:
5854 maySet = false;
5856 if (op == JSOP_CALLEE) {
5857 if (Emit1(cx, bce, op) < 0)
5858 return false;
5859 } else if (!pn2->pn_cookie.isFree()) {
5860 if (maySet) {
5861 if (!EmitVarIncDec(cx, pn, bce))
5862 return false;
5863 } else {
5864 if (!EmitVarOp(cx, pn2, op, bce))
5865 return false;
5867 } else {
5868 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
5869 if (maySet) {
5870 if (!EmitNameIncDec(cx, pn, bce))
5871 return false;
5872 } else {
5873 if (!EmitAtomOp(cx, pn2, op, bce))
5874 return false;
5876 break;
5878 if (pn2->isConst()) {
5879 if (Emit1(cx, bce, JSOP_POS) < 0)
5880 return false;
5881 bool post;
5882 JSOp binop = GetIncDecInfo(pn->getKind(), &post);
5883 if (!post) {
5884 if (Emit1(cx, bce, JSOP_ONE) < 0)
5885 return false;
5886 if (Emit1(cx, bce, binop) < 0)
5887 return false;
5891 return true;
5895 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5896 * the comment on EmitSwitch.
5898 MOZ_NEVER_INLINE static bool
5899 EmitLabeledStatement(ExclusiveContext* cx, BytecodeEmitter* bce, const LabeledStatement* pn)
5902 * Emit a JSOP_LABEL instruction. The argument is the offset to the statement
5903 * following the labeled statement.
5905 jsatomid index;
5906 if (!bce->makeAtomIndex(pn->label(), &index))
5907 return false;
5909 ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0);
5910 if (top < 0)
5911 return false;
5913 /* Emit code for the labeled statement. */
5914 StmtInfoBCE stmtInfo(cx);
5915 PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset());
5916 stmtInfo.label = pn->label();
5917 if (!EmitTree(cx, bce, pn->statement()))
5918 return false;
5919 if (!PopStatementBCE(cx, bce))
5920 return false;
5922 /* Patch the JSOP_LABEL offset. */
5923 SetJumpOffsetAt(bce, top);
5924 return true;
5927 static bool
5928 EmitSyntheticStatements(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, ptrdiff_t top)
5930 JS_ASSERT(pn->isArity(PN_LIST));
5931 StmtInfoBCE stmtInfo(cx);
5932 PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top);
5933 ParseNode* pn2 = pn->pn_head;
5934 if (pn->pn_xflags & PNX_DESTRUCT)
5935 pn2 = pn2->pn_next;
5936 for (; pn2; pn2 = pn2->pn_next) {
5937 if (!EmitTree(cx, bce, pn2))
5938 return false;
5940 return PopStatementBCE(cx, bce);
5943 static bool
5944 EmitConditionalExpression(ExclusiveContext* cx, BytecodeEmitter* bce, ConditionalExpression& conditional)
5946 /* Emit the condition, then branch if false to the else part. */
5947 if (!EmitTree(cx, bce, &conditional.condition()))
5948 return false;
5949 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND);
5950 if (noteIndex < 0)
5951 return false;
5952 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
5953 if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression()))
5954 return false;
5956 /* Jump around else, fixup the branch, emit else, fixup jump. */
5957 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
5958 if (jmp < 0)
5959 return false;
5960 SetJumpOffsetAt(bce, beq);
5963 * Because each branch pushes a single value, but our stack budgeting
5964 * analysis ignores branches, we now have to adjust bce->stackDepth to
5965 * ignore the value pushed by the first branch. Execution will follow
5966 * only one path, so we must decrement bce->stackDepth.
5968 * Failing to do this will foil code, such as let expression and block
5969 * code generation, which must use the stack depth to compute local
5970 * stack indexes correctly.
5972 JS_ASSERT(bce->stackDepth > 0);
5973 bce->stackDepth--;
5974 if (!EmitTree(cx, bce, &conditional.elseExpression()))
5975 return false;
5976 SetJumpOffsetAt(bce, jmp);
5977 return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq);
5981 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5982 * the comment on EmitSwitch.
5984 MOZ_NEVER_INLINE static bool
5985 EmitObject(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
5987 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext())
5988 return EmitSingletonInitialiser(cx, bce, pn);
5991 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
5992 * a new object and defining (in source order) each property on the object
5993 * (or mutating the object's [[Prototype]], in the case of __proto__).
5995 ptrdiff_t offset = bce->offset();
5996 if (!EmitNewInit(cx, bce, JSProto_Object))
5997 return false;
6000 * Try to construct the shape of the object as we go, so we can emit a
6001 * JSOP_NEWOBJECT with the final shape instead.
6003 RootedObject obj(cx);
6004 if (bce->script->compileAndGo()) {
6005 gc::AllocKind kind = GuessObjectGCKind(pn->pn_count);
6006 obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind, TenuredObject);
6007 if (!obj)
6008 return false;
6011 for (ParseNode* pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6012 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
6013 return false;
6015 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6016 ParseNode* pn3 = pn2->pn_left;
6017 bool isIndex = false;
6018 if (pn3->isKind(PNK_NUMBER)) {
6019 if (!EmitNumberOp(cx, pn3->pn_dval, bce))
6020 return false;
6021 isIndex = true;
6022 } else if (pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)) {
6023 // The parser already checked for atoms representing indexes and
6024 // used PNK_NUMBER instead, but also watch for ids which TI treats
6025 // as indexes for simpliciation of downstream analysis.
6026 jsid id = NameToId(pn3->pn_atom->asPropertyName());
6027 if (id != types::IdToTypeId(id)) {
6028 if (!EmitTree(cx, bce, pn3))
6029 return false;
6030 isIndex = true;
6032 } else {
6033 JS_ASSERT(pn3->isKind(PNK_COMPUTED_NAME));
6034 if (!EmitTree(cx, bce, pn3->pn_kid))
6035 return false;
6036 isIndex = true;
6039 /* Emit code for the property initializer. */
6040 if (!EmitTree(cx, bce, pn2->pn_right))
6041 return false;
6043 JSOp op = pn2->getOp();
6044 JS_ASSERT(op == JSOP_INITPROP ||
6045 op == JSOP_INITPROP_GETTER ||
6046 op == JSOP_INITPROP_SETTER);
6048 if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER)
6049 obj = nullptr;
6051 if (isIndex) {
6052 obj = nullptr;
6053 switch (op) {
6054 case JSOP_INITPROP: op = JSOP_INITELEM; break;
6055 case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break;
6056 case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break;
6057 default: MOZ_CRASH("Invalid op");
6059 if (Emit1(cx, bce, op) < 0)
6060 return false;
6061 } else {
6062 JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING));
6064 // If we have { __proto__: expr }, implement prototype mutation.
6065 if (op == JSOP_INITPROP && pn3->pn_atom == cx->names().proto) {
6066 obj = nullptr;
6067 if (Emit1(cx, bce, JSOP_MUTATEPROTO) < 0)
6068 return false;
6069 continue;
6072 jsatomid index;
6073 if (!bce->makeAtomIndex(pn3->pn_atom, &index))
6074 return false;
6076 MOZ_ASSERT(op == JSOP_INITPROP ||
6077 op == JSOP_INITPROP_GETTER ||
6078 op == JSOP_INITPROP_SETTER);
6080 if (obj) {
6081 JS_ASSERT(!obj->inDictionaryMode());
6082 Rooted<jsid> id(cx, AtomToId(pn3->pn_atom));
6083 RootedValue undefinedValue(cx, UndefinedValue());
6084 if (!DefineNativeProperty(cx, obj, id, undefinedValue, nullptr,
6085 nullptr, JSPROP_ENUMERATE))
6087 return false;
6089 if (obj->inDictionaryMode())
6090 obj = nullptr;
6093 if (!EmitIndex32(cx, op, index, bce))
6094 return false;
6098 if (Emit1(cx, bce, JSOP_ENDINIT) < 0)
6099 return false;
6101 if (obj) {
6103 * The object survived and has a predictable shape: update the original
6104 * bytecode.
6106 ObjectBox* objbox = bce->parser->newObjectBox(obj);
6107 if (!objbox)
6108 return false;
6110 static_assert(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH,
6111 "newinit and newobject must have equal length to edit in-place");
6113 uint32_t index = bce->objectList.add(objbox);
6114 jsbytecode* code = bce->code(offset);
6115 code[0] = JSOP_NEWOBJECT;
6116 code[1] = jsbytecode(index >> 24);
6117 code[2] = jsbytecode(index >> 16);
6118 code[3] = jsbytecode(index >> 8);
6119 code[4] = jsbytecode(index);
6122 return true;
6125 static bool
6126 EmitArrayComp(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6128 if (!EmitNewInit(cx, bce, JSProto_Array))
6129 return false;
6132 * Pass the new array's stack index to the PNK_ARRAYPUSH case via
6133 * bce->arrayCompDepth, then simply traverse the PNK_FOR node and
6134 * its kids under pn2 to generate this comprehension.
6136 JS_ASSERT(bce->stackDepth > 0);
6137 uint32_t saveDepth = bce->arrayCompDepth;
6138 bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1);
6139 if (!EmitTree(cx, bce, pn->pn_head))
6140 return false;
6141 bce->arrayCompDepth = saveDepth;
6143 /* Emit the usual op needed for decompilation. */
6144 return Emit1(cx, bce, JSOP_ENDINIT) >= 0;
6148 * EmitSpread expects the current index (I) of the array, the array itself and the iterator to be
6149 * on the stack in that order (iterator on the bottom).
6150 * It will pop the iterator and I, then iterate over the iterator by calling |.next()|
6151 * and put the results into the I-th element of array with incrementing I, then
6152 * push the result I (it will be original I + iteration count).
6153 * The stack after iteration will look like |ARRAY INDEX|.
6155 static bool
6156 EmitSpread(ExclusiveContext* cx, BytecodeEmitter* bce)
6158 return EmitForOf(cx, bce, STMT_SPREAD, nullptr, -1);
6161 static bool
6162 EmitArray(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn, uint32_t count)
6165 * Emit code for [a, b, c] that is equivalent to constructing a new
6166 * array and in source order evaluating each element value and adding
6167 * it to the array, without invoking latent setters. We use the
6168 * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and
6169 * to avoid dup'ing and popping the array as each element is added, as
6170 * JSOP_SETELEM/JSOP_SETPROP would do.
6173 int32_t nspread = 0;
6174 for (ParseNode* elt = pn; elt; elt = elt->pn_next) {
6175 if (elt->isKind(PNK_SPREAD))
6176 nspread++;
6179 ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); // ARRAY
6180 if (off < 0)
6181 return false;
6182 CheckTypeSet(cx, bce, JSOP_NEWARRAY);
6183 jsbytecode* pc = bce->code(off);
6185 // For arrays with spread, this is a very pessimistic allocation, the
6186 // minimum possible final size.
6187 SET_UINT24(pc, count - nspread);
6189 ParseNode* pn2 = pn;
6190 jsatomid atomIndex;
6191 bool afterSpread = false;
6192 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6193 if (!afterSpread && pn2->isKind(PNK_SPREAD)) {
6194 afterSpread = true;
6195 if (!EmitNumberOp(cx, atomIndex, bce)) // ARRAY INDEX
6196 return false;
6198 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
6199 return false;
6200 if (pn2->isKind(PNK_ELISION)) {
6201 if (Emit1(cx, bce, JSOP_HOLE) < 0)
6202 return false;
6203 } else {
6204 ParseNode* expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2;
6205 if (!EmitTree(cx, bce, expr)) // ARRAY INDEX? VALUE
6206 return false;
6208 if (pn2->isKind(PNK_SPREAD)) {
6209 if (!EmitIterator(cx, bce)) // ARRAY INDEX ITER
6210 return false;
6211 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // INDEX ITER ARRAY
6212 return false;
6213 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // ITER ARRAY INDEX
6214 return false;
6215 if (!EmitSpread(cx, bce)) // ARRAY INDEX
6216 return false;
6217 } else if (afterSpread) {
6218 if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0)
6219 return false;
6220 } else {
6221 off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3);
6222 if (off < 0)
6223 return false;
6224 SET_UINT24(bce->code(off), atomIndex);
6227 JS_ASSERT(atomIndex == count);
6228 if (afterSpread) {
6229 if (Emit1(cx, bce, JSOP_POP) < 0) // ARRAY
6230 return false;
6233 /* Emit an op to finish the array and aid in decompilation. */
6234 return Emit1(cx, bce, JSOP_ENDINIT) >= 0;
6237 static bool
6238 EmitUnary(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6240 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
6241 return false;
6242 /* Unary op, including unary +/-. */
6243 JSOp op = pn->getOp();
6244 ParseNode* pn2 = pn->pn_kid;
6246 if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME))
6247 op = JSOP_TYPEOFEXPR;
6249 bool oldEmittingForInit = bce->emittingForInit;
6250 bce->emittingForInit = false;
6251 if (!EmitTree(cx, bce, pn2))
6252 return false;
6254 bce->emittingForInit = oldEmittingForInit;
6255 return Emit1(cx, bce, op) >= 0;
6258 static bool
6259 EmitDefaults(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6261 JS_ASSERT(pn->isKind(PNK_ARGSBODY));
6263 ParseNode* arg, *pnlast = pn->last();
6264 for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) {
6265 if (!(arg->pn_dflags & PND_DEFAULT))
6266 continue;
6267 if (!BindNameToSlot(cx, bce, arg))
6268 return false;
6269 if (!EmitVarOp(cx, arg, JSOP_GETARG, bce))
6270 return false;
6271 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6272 return false;
6273 if (Emit1(cx, bce, JSOP_STRICTEQ) < 0)
6274 return false;
6275 // Emit source note to enable ion compilation.
6276 if (NewSrcNote(cx, bce, SRC_IF) < 0)
6277 return false;
6278 ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0);
6279 if (jump < 0)
6280 return false;
6281 if (!EmitTree(cx, bce, arg->expr()))
6282 return false;
6283 if (!EmitVarOp(cx, arg, JSOP_SETARG, bce))
6284 return false;
6285 if (Emit1(cx, bce, JSOP_POP) < 0)
6286 return false;
6287 SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump);
6290 return true;
6293 bool
6294 frontend::EmitTree(ExclusiveContext* cx, BytecodeEmitter* bce, ParseNode* pn)
6296 JS_CHECK_RECURSION(cx, return false);
6298 EmitLevelManager elm(bce);
6300 bool ok = true;
6301 ptrdiff_t top = bce->offset();
6302 pn->pn_offset = top;
6304 /* Emit notes to tell the current bytecode's source line number. */
6305 if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin))
6306 return false;
6308 switch (pn->getKind()) {
6309 case PNK_FUNCTION:
6310 ok = EmitFunc(cx, bce, pn);
6311 break;
6313 case PNK_ARGSBODY:
6315 RootedFunction fun(cx, bce->sc->asFunctionBox()->function());
6316 ParseNode* pnlast = pn->last();
6318 // Carefully emit everything in the right order:
6319 // 1. Destructuring
6320 // 2. Defaults
6321 // 3. Functions
6322 ParseNode* pnchild = pnlast->pn_head;
6323 if (pnlast->pn_xflags & PNX_DESTRUCT) {
6324 // Assign the destructuring arguments before defining any functions,
6325 // see bug 419662.
6326 JS_ASSERT(pnchild->isKind(PNK_SEMI));
6327 JS_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_CONST));
6328 if (!EmitTree(cx, bce, pnchild))
6329 return false;
6330 pnchild = pnchild->pn_next;
6332 bool hasDefaults = bce->sc->asFunctionBox()->hasDefaults();
6333 if (hasDefaults) {
6334 ParseNode* rest = nullptr;
6335 bool restIsDefn = false;
6336 if (fun->hasRest()) {
6337 JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
6339 // Defaults with a rest parameter need special handling. The
6340 // rest parameter needs to be undefined while defaults are being
6341 // processed. To do this, we create the rest argument and let it
6342 // sit on the stack while processing defaults. The rest
6343 // parameter's slot is set to undefined for the course of
6344 // default processing.
6345 rest = pn->pn_head;
6346 while (rest->pn_next != pnlast)
6347 rest = rest->pn_next;
6348 restIsDefn = rest->isDefn();
6349 if (Emit1(cx, bce, JSOP_REST) < 0)
6350 return false;
6351 CheckTypeSet(cx, bce, JSOP_REST);
6353 // Only set the rest parameter if it's not aliased by a nested
6354 // function in the body.
6355 if (restIsDefn) {
6356 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6357 return false;
6358 if (!BindNameToSlot(cx, bce, rest))
6359 return false;
6360 if (!EmitVarOp(cx, rest, JSOP_SETARG, bce))
6361 return false;
6362 if (Emit1(cx, bce, JSOP_POP) < 0)
6363 return false;
6366 if (!EmitDefaults(cx, bce, pn))
6367 return false;
6368 if (fun->hasRest()) {
6369 if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce))
6370 return false;
6371 if (Emit1(cx, bce, JSOP_POP) < 0)
6372 return false;
6375 for (ParseNode* pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) {
6376 // Only bind the parameter if it's not aliased by a nested function
6377 // in the body.
6378 if (!pn2->isDefn())
6379 continue;
6380 if (!BindNameToSlot(cx, bce, pn2))
6381 return false;
6382 if (pn2->pn_next == pnlast && fun->hasRest() && !hasDefaults) {
6383 // Fill rest parameter. We handled the case with defaults above.
6384 JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
6385 bce->switchToProlog();
6386 if (Emit1(cx, bce, JSOP_REST) < 0)
6387 return false;
6388 CheckTypeSet(cx, bce, JSOP_REST);
6389 if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce))
6390 return false;
6391 if (Emit1(cx, bce, JSOP_POP) < 0)
6392 return false;
6393 bce->switchToMain();
6396 if (pnlast->pn_xflags & PNX_FUNCDEFS) {
6397 // This block contains top-level function definitions. To ensure
6398 // that we emit the bytecode defining them before the rest of code
6399 // in the block we use a separate pass over functions. During the
6400 // main pass later the emitter will add JSOP_NOP with source notes
6401 // for the function to preserve the original functions position
6402 // when decompiling.
6404 // Currently this is used only for functions, as compile-as-we go
6405 // mode for scripts does not allow separate emitter passes.
6406 for (ParseNode* pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
6407 if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) {
6408 if (!EmitTree(cx, bce, pn2))
6409 return false;
6413 ok = EmitTree(cx, bce, pnlast);
6414 break;
6417 case PNK_IF:
6418 ok = EmitIf(cx, bce, pn);
6419 break;
6421 case PNK_SWITCH:
6422 ok = EmitSwitch(cx, bce, pn);
6423 break;
6425 case PNK_WHILE:
6426 ok = EmitWhile(cx, bce, pn, top);
6427 break;
6429 case PNK_DOWHILE:
6430 ok = EmitDo(cx, bce, pn);
6431 break;
6433 case PNK_FOR:
6434 ok = EmitFor(cx, bce, pn, top);
6435 break;
6437 case PNK_BREAK:
6438 ok = EmitBreak(cx, bce, pn->as<BreakStatement>().label());
6439 break;
6441 case PNK_CONTINUE:
6442 ok = EmitContinue(cx, bce, pn->as<ContinueStatement>().label());
6443 break;
6445 case PNK_WITH:
6446 ok = EmitWith(cx, bce, pn);
6447 break;
6449 case PNK_TRY:
6450 if (!EmitTry(cx, bce, pn))
6451 return false;
6452 break;
6454 case PNK_CATCH:
6455 if (!EmitCatch(cx, bce, pn))
6456 return false;
6457 break;
6459 case PNK_VAR:
6460 case PNK_CONST:
6461 if (!EmitVariables(cx, bce, pn, InitializeVars))
6462 return false;
6463 break;
6465 case PNK_RETURN:
6466 ok = EmitReturn(cx, bce, pn);
6467 break;
6469 case PNK_YIELD_STAR:
6470 ok = EmitYieldStar(cx, bce, pn->pn_kid);
6471 break;
6473 case PNK_YIELD:
6474 JS_ASSERT(bce->sc->isFunctionBox());
6475 if (bce->sc->asFunctionBox()->isStarGenerator()) {
6476 if (!EmitPrepareIteratorResult(cx, bce))
6477 return false;
6479 if (pn->pn_kid) {
6480 if (!EmitTree(cx, bce, pn->pn_kid))
6481 return false;
6482 } else {
6483 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
6484 return false;
6486 if (bce->sc->asFunctionBox()->isStarGenerator()) {
6487 if (!EmitFinishIteratorResult(cx, bce, false))
6488 return false;
6490 if (Emit1(cx, bce, JSOP_YIELD) < 0)
6491 return false;
6492 break;
6494 case PNK_STATEMENTLIST:
6495 ok = EmitStatementList(cx, bce, pn, top);
6496 break;
6498 case PNK_SEQ:
6499 ok = EmitSyntheticStatements(cx, bce, pn, top);
6500 break;
6502 case PNK_SEMI:
6503 ok = EmitStatement(cx, bce, pn);
6504 break;
6506 case PNK_LABEL:
6507 ok = EmitLabeledStatement(cx, bce, &pn->as<LabeledStatement>());
6508 break;
6510 case PNK_COMMA:
6512 for (ParseNode* pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
6513 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin))
6514 return false;
6515 if (!EmitTree(cx, bce, pn2))
6516 return false;
6517 if (!pn2->pn_next)
6518 break;
6519 if (Emit1(cx, bce, JSOP_POP) < 0)
6520 return false;
6522 break;
6525 case PNK_ASSIGN:
6526 case PNK_ADDASSIGN:
6527 case PNK_SUBASSIGN:
6528 case PNK_BITORASSIGN:
6529 case PNK_BITXORASSIGN:
6530 case PNK_BITANDASSIGN:
6531 case PNK_LSHASSIGN:
6532 case PNK_RSHASSIGN:
6533 case PNK_URSHASSIGN:
6534 case PNK_MULASSIGN:
6535 case PNK_DIVASSIGN:
6536 case PNK_MODASSIGN:
6537 if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right))
6538 return false;
6539 break;
6541 case PNK_CONDITIONAL:
6542 ok = EmitConditionalExpression(cx, bce, pn->as<ConditionalExpression>());
6543 break;
6545 case PNK_OR:
6546 case PNK_AND:
6547 ok = EmitLogical(cx, bce, pn);
6548 break;
6550 case PNK_ADD:
6551 case PNK_SUB:
6552 case PNK_BITOR:
6553 case PNK_BITXOR:
6554 case PNK_BITAND:
6555 case PNK_STRICTEQ:
6556 case PNK_EQ:
6557 case PNK_STRICTNE:
6558 case PNK_NE:
6559 case PNK_LT:
6560 case PNK_LE:
6561 case PNK_GT:
6562 case PNK_GE:
6563 case PNK_IN:
6564 case PNK_INSTANCEOF:
6565 case PNK_LSH:
6566 case PNK_RSH:
6567 case PNK_URSH:
6568 case PNK_STAR:
6569 case PNK_DIV:
6570 case PNK_MOD:
6571 if (pn->isArity(PN_LIST)) {
6572 /* Left-associative operator chain: avoid too much recursion. */
6573 ParseNode* pn2 = pn->pn_head;
6574 if (!EmitTree(cx, bce, pn2))
6575 return false;
6576 JSOp op = pn->getOp();
6577 while ((pn2 = pn2->pn_next) != nullptr) {
6578 if (!EmitTree(cx, bce, pn2))
6579 return false;
6580 if (Emit1(cx, bce, op) < 0)
6581 return false;
6583 } else {
6584 /* Binary operators that evaluate both operands unconditionally. */
6585 if (!EmitTree(cx, bce, pn->pn_left))
6586 return false;
6587 if (!EmitTree(cx, bce, pn->pn_right))
6588 return false;
6589 if (Emit1(cx, bce, pn->getOp()) < 0)
6590 return false;
6592 break;
6594 case PNK_THROW:
6595 case PNK_TYPEOF:
6596 case PNK_VOID:
6597 case PNK_NOT:
6598 case PNK_BITNOT:
6599 case PNK_POS:
6600 case PNK_NEG:
6601 ok = EmitUnary(cx, bce, pn);
6602 break;
6604 case PNK_PREINCREMENT:
6605 case PNK_PREDECREMENT:
6606 case PNK_POSTINCREMENT:
6607 case PNK_POSTDECREMENT:
6608 ok = EmitIncOrDec(cx, bce, pn);
6609 break;
6611 case PNK_DELETE:
6612 ok = EmitDelete(cx, bce, pn);
6613 break;
6615 case PNK_DOT:
6616 ok = EmitPropOp(cx, pn, JSOP_GETPROP, bce);
6617 break;
6619 case PNK_ELEM:
6620 ok = EmitElemOp(cx, pn, JSOP_GETELEM, bce);
6621 break;
6623 case PNK_NEW:
6624 case PNK_TAGGED_TEMPLATE:
6625 case PNK_CALL:
6626 case PNK_GENEXP:
6627 ok = EmitCallOrNew(cx, bce, pn);
6628 break;
6630 case PNK_LEXICALSCOPE:
6631 ok = EmitLexicalScope(cx, bce, pn);
6632 break;
6634 case PNK_LET:
6635 ok = pn->isArity(PN_BINARY)
6636 ? EmitLet(cx, bce, pn)
6637 : EmitVariables(cx, bce, pn, InitializeVars);
6638 break;
6640 case PNK_IMPORT:
6641 case PNK_EXPORT:
6642 // TODO: Implement emitter support for modules
6643 bce->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED);
6644 return false;
6646 case PNK_ARRAYPUSH: {
6648 * The array object's stack index is in bce->arrayCompDepth. See below
6649 * under the array initialiser code generator for array comprehension
6650 * special casing. Note that the array object is a pure stack value,
6651 * unaliased by blocks, so we can EmitUnaliasedVarOp.
6653 if (!EmitTree(cx, bce, pn->pn_kid))
6654 return false;
6655 if (!EmitDupAt(cx, bce, bce->arrayCompDepth))
6656 return false;
6657 if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0)
6658 return false;
6659 break;
6662 case PNK_CALLSITEOBJ:
6663 ok = EmitCallSiteObject(cx, bce, pn);
6664 break;
6666 case PNK_ARRAY:
6667 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head) {
6668 if (bce->checkSingletonContext()) {
6669 // Bake in the object entirely if it will only be created once.
6670 ok = EmitSingletonInitialiser(cx, bce, pn);
6671 break;
6674 // If the array consists entirely of primitive values, make a
6675 // template object with copy on write elements that can be reused
6676 // every time the initializer executes.
6677 RootedValue value(cx);
6678 if (bce->emitterMode != BytecodeEmitter::SelfHosting &&
6679 pn->pn_count != 0 &&
6680 pn->getConstantValue(cx, ParseNode::DontAllowNestedObjects, &value))
6682 // Note: the type of the template object might not yet reflect
6683 // that the object has copy on write elements. When the
6684 // interpreter or JIT compiler fetches the template, it should
6685 // use types::GetOrFixupCopyOnWriteObject to make sure the type
6686 // for the template is accurate. We don't do this here as we
6687 // want to use types::InitObject, which requires a finished
6688 // script.
6689 JSObject* obj = &value.toObject();
6690 if (!ObjectElements::MakeElementsCopyOnWrite(cx, obj))
6691 return false;
6693 ObjectBox* objbox = bce->parser->newObjectBox(obj);
6694 if (!objbox)
6695 return false;
6697 ok = EmitObjectOp(cx, objbox, JSOP_NEWARRAY_COPYONWRITE, bce);
6698 break;
6702 ok = EmitArray(cx, bce, pn->pn_head, pn->pn_count);
6703 break;
6705 case PNK_ARRAYCOMP:
6706 ok = EmitArrayComp(cx, bce, pn);
6707 break;
6709 case PNK_OBJECT:
6710 ok = EmitObject(cx, bce, pn);
6711 break;
6713 case PNK_NAME:
6714 if (!EmitNameOp(cx, bce, pn, false))
6715 return false;
6716 break;
6718 case PNK_TEMPLATE_STRING_LIST:
6719 ok = EmitTemplateString(cx, bce, pn);
6720 break;
6722 case PNK_TEMPLATE_STRING:
6723 case PNK_STRING:
6724 ok = EmitAtomOp(cx, pn, JSOP_STRING, bce);
6725 break;
6727 case PNK_NUMBER:
6728 ok = EmitNumberOp(cx, pn->pn_dval, bce);
6729 break;
6731 case PNK_REGEXP:
6732 ok = EmitRegExp(cx, bce->regexpList.add(pn->as<RegExpLiteral>().objbox()), bce);
6733 break;
6735 case PNK_TRUE:
6736 case PNK_FALSE:
6737 case PNK_THIS:
6738 case PNK_NULL:
6739 if (Emit1(cx, bce, pn->getOp()) < 0)
6740 return false;
6741 break;
6743 case PNK_DEBUGGER:
6744 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
6745 return false;
6746 if (Emit1(cx, bce, JSOP_DEBUGGER) < 0)
6747 return false;
6748 break;
6750 case PNK_NOP:
6751 JS_ASSERT(pn->getArity() == PN_NULLARY);
6752 break;
6754 default:
6755 JS_ASSERT(0);
6758 /* bce->emitLevel == 1 means we're last on the stack, so finish up. */
6759 if (ok && bce->emitLevel == 1) {
6760 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end))
6761 return false;
6764 return ok;
6767 static int
6768 AllocSrcNote(ExclusiveContext* cx, SrcNotesVector& notes)
6770 // Start it off moderately large to avoid repeated resizings early on.
6771 if (notes.capacity() == 0 && !notes.reserve(1024))
6772 return -1;
6774 jssrcnote dummy = 0;
6775 if (!notes.append(dummy)) {
6776 js_ReportOutOfMemory(cx);
6777 return -1;
6779 return notes.length() - 1;
6783 frontend::NewSrcNote(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type)
6785 SrcNotesVector& notes = bce->notes();
6786 int index;
6788 index = AllocSrcNote(cx, notes);
6789 if (index < 0)
6790 return -1;
6793 * Compute delta from the last annotated bytecode's offset. If it's too
6794 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6796 ptrdiff_t offset = bce->offset();
6797 ptrdiff_t delta = offset - bce->lastNoteOffset();
6798 bce->current->lastNoteOffset = offset;
6799 if (delta >= SN_DELTA_LIMIT) {
6800 do {
6801 ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK);
6802 SN_MAKE_XDELTA(&notes[index], xdelta);
6803 delta -= xdelta;
6804 index = AllocSrcNote(cx, notes);
6805 if (index < 0)
6806 return -1;
6807 } while (delta >= SN_DELTA_LIMIT);
6811 * Initialize type and delta, then allocate the minimum number of notes
6812 * needed for type's arity. Usually, we won't need more, but if an offset
6813 * does take two bytes, SetSrcNoteOffset will grow notes.
6815 SN_MAKE_NOTE(&notes[index], type, delta);
6816 for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) {
6817 if (NewSrcNote(cx, bce, SRC_NULL) < 0)
6818 return -1;
6820 return index;
6824 frontend::NewSrcNote2(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type, ptrdiff_t offset)
6826 int index;
6828 index = NewSrcNote(cx, bce, type);
6829 if (index >= 0) {
6830 if (!SetSrcNoteOffset(cx, bce, index, 0, offset))
6831 return -1;
6833 return index;
6837 frontend::NewSrcNote3(ExclusiveContext* cx, BytecodeEmitter* bce, SrcNoteType type, ptrdiff_t offset1,
6838 ptrdiff_t offset2)
6840 int index;
6842 index = NewSrcNote(cx, bce, type);
6843 if (index >= 0) {
6844 if (!SetSrcNoteOffset(cx, bce, index, 0, offset1))
6845 return -1;
6846 if (!SetSrcNoteOffset(cx, bce, index, 1, offset2))
6847 return -1;
6849 return index;
6852 bool
6853 frontend::AddToSrcNoteDelta(ExclusiveContext* cx, BytecodeEmitter* bce, jssrcnote* sn, ptrdiff_t delta)
6856 * Called only from FinishTakingSrcNotes to add to main script note
6857 * deltas, and only by a small positive amount.
6859 JS_ASSERT(bce->current == &bce->main);
6860 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
6862 ptrdiff_t base = SN_DELTA(sn);
6863 ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
6864 ptrdiff_t newdelta = base + delta;
6865 if (newdelta < limit) {
6866 SN_SET_DELTA(sn, newdelta);
6867 } else {
6868 jssrcnote xdelta;
6869 SN_MAKE_XDELTA(&xdelta, delta);
6870 if (!(sn = bce->main.notes.insert(sn, xdelta)))
6871 return false;
6873 return true;
6876 static bool
6877 SetSrcNoteOffset(ExclusiveContext* cx, BytecodeEmitter* bce, unsigned index, unsigned which,
6878 ptrdiff_t offset)
6880 if (size_t(offset) > SN_MAX_OFFSET) {
6881 ReportStatementTooLarge(bce->parser->tokenStream, bce->topStmt);
6882 return false;
6885 SrcNotesVector& notes = bce->notes();
6887 /* Find the offset numbered which (i.e., skip exactly which offsets). */
6888 jssrcnote* sn = notes.begin() + index;
6889 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
6890 JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
6891 for (sn++; which; sn++, which--) {
6892 if (*sn & SN_4BYTE_OFFSET_FLAG)
6893 sn += 3;
6897 * See if the new offset requires three bytes either by being too big or if
6898 * the offset has already been inflated (in which case, we need to stay big
6899 * to not break the srcnote encoding if this isn't the last srcnote).
6901 if (offset > (ptrdiff_t)SN_4BYTE_OFFSET_MASK || (*sn & SN_4BYTE_OFFSET_FLAG)) {
6902 /* Maybe this offset was already set to a three-byte value. */
6903 if (!(*sn & SN_4BYTE_OFFSET_FLAG)) {
6904 /* Insert two dummy bytes that will be overwritten shortly. */
6905 jssrcnote dummy = 0;
6906 if (!(sn = notes.insert(sn, dummy)) ||
6907 !(sn = notes.insert(sn, dummy)) ||
6908 !(sn = notes.insert(sn, dummy)))
6910 js_ReportOutOfMemory(cx);
6911 return false;
6914 *sn++ = (jssrcnote)(SN_4BYTE_OFFSET_FLAG | (offset >> 24));
6915 *sn++ = (jssrcnote)(offset >> 16);
6916 *sn++ = (jssrcnote)(offset >> 8);
6918 *sn = (jssrcnote)offset;
6919 return true;
6923 * Finish taking source notes in cx's notePool.
6924 * If successful, the final source note count is stored in the out outparam.
6926 bool
6927 frontend::FinishTakingSrcNotes(ExclusiveContext* cx, BytecodeEmitter* bce, uint32_t* out)
6929 JS_ASSERT(bce->current == &bce->main);
6931 unsigned prologCount = bce->prolog.notes.length();
6932 if (prologCount && bce->prolog.currentLine != bce->firstLine) {
6933 bce->switchToProlog();
6934 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0)
6935 return false;
6936 bce->switchToMain();
6937 } else {
6939 * Either no prolog srcnotes, or no line number change over prolog.
6940 * We don't need a SRC_SETLINE, but we may need to adjust the offset
6941 * of the first main note, by adding to its delta and possibly even
6942 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
6943 * that came at and after the last annotated bytecode.
6945 ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset;
6946 JS_ASSERT(offset >= 0);
6947 if (offset > 0 && bce->main.notes.length() != 0) {
6948 /* NB: Use as much of the first main note's delta as we can. */
6949 jssrcnote* sn = bce->main.notes.begin();
6950 ptrdiff_t delta = SN_IS_XDELTA(sn)
6951 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
6952 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
6953 if (offset < delta)
6954 delta = offset;
6955 for (;;) {
6956 if (!AddToSrcNoteDelta(cx, bce, sn, delta))
6957 return false;
6958 offset -= delta;
6959 if (offset == 0)
6960 break;
6961 delta = Min(offset, SN_XDELTA_MASK);
6962 sn = bce->main.notes.begin();
6967 // The prolog count might have changed, so we can't reuse prologCount.
6968 // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended
6969 // when the notes are copied to their final destination by CopySrcNotes.
6970 *out = bce->prolog.notes.length() + bce->main.notes.length() + 1;
6971 return true;
6974 void
6975 frontend::CopySrcNotes(BytecodeEmitter* bce, jssrcnote* destination, uint32_t nsrcnotes)
6977 unsigned prologCount = bce->prolog.notes.length();
6978 unsigned mainCount = bce->main.notes.length();
6979 unsigned totalCount = prologCount + mainCount;
6980 MOZ_ASSERT(totalCount == nsrcnotes - 1);
6981 if (prologCount)
6982 PodCopy(destination, bce->prolog.notes.begin(), prologCount);
6983 PodCopy(destination + prologCount, bce->main.notes.begin(), mainCount);
6984 SN_MAKE_TERMINATOR(&destination[totalCount]);
6987 void
6988 CGConstList::finish(ConstArray* array)
6990 JS_ASSERT(length() == array->length);
6992 for (unsigned i = 0; i < length(); i++)
6993 array->vector[i] = list[i];
6997 * Find the index of the given object for code generator.
6999 * Since the emitter refers to each parsed object only once, for the index we
7000 * use the number of already indexes objects. We also add the object to a list
7001 * to convert the list to a fixed-size array when we complete code generation,
7002 * see js::CGObjectList::finish below.
7004 * Most of the objects go to BytecodeEmitter::objectList but for regexp we use
7005 * a separated BytecodeEmitter::regexpList. In this way the emitted index can
7006 * be directly used to store and fetch a reference to a cloned RegExp object
7007 * that shares the same JSRegExp private data created for the object literal in
7008 * objbox. We need a cloned object to hold lastIndex and other direct
7009 * properties that should not be shared among threads sharing a precompiled
7010 * function or script.
7012 * If the code being compiled is function code, allocate a reserved slot in
7013 * the cloned function object that shares its precompiled script with other
7014 * cloned function objects and with the compiler-created clone-parent. There
7015 * are nregexps = script->regexps()->length such reserved slots in each
7016 * function object cloned from fun->object. NB: during compilation, a funobj
7017 * slots element must never be allocated, because JSObject::allocSlot could
7018 * hand out one of the slots that should be given to a regexp clone.
7020 * If the code being compiled is global code, the cloned regexp are stored in
7021 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to
7022 * nregexps.
7024 * The slots initially contain undefined or null. We populate them lazily when
7025 * JSOP_REGEXP is executed for the first time.
7027 * Why clone regexp objects? ECMA specifies that when a regular expression
7028 * literal is scanned, a RegExp object is created. In the spec, compilation
7029 * and execution happen indivisibly, but in this implementation and many of
7030 * its embeddings, code is precompiled early and re-executed in multiple
7031 * threads, or using multiple global objects, or both, for efficiency.
7033 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7034 * objects, which makes for collisions on the lastIndex property (especially
7035 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7036 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7038 unsigned
7039 CGObjectList::add(ObjectBox* objbox)
7041 JS_ASSERT(!objbox->emitLink);
7042 objbox->emitLink = lastbox;
7043 lastbox = objbox;
7044 return length++;
7047 unsigned
7048 CGObjectList::indexOf(JSObject* obj)
7050 JS_ASSERT(length > 0);
7051 unsigned index = length - 1;
7052 for (ObjectBox* box = lastbox; box->object != obj; box = box->emitLink)
7053 index--;
7054 return index;
7057 void
7058 CGObjectList::finish(ObjectArray* array)
7060 JS_ASSERT(length <= INDEX_LIMIT);
7061 JS_ASSERT(length == array->length);
7063 js::HeapPtrObject* cursor = array->vector + array->length;
7064 ObjectBox* objbox = lastbox;
7065 do {
7066 --cursor;
7067 JS_ASSERT(!*cursor);
7068 *cursor = objbox->object;
7069 } while ((objbox = objbox->emitLink) != nullptr);
7070 JS_ASSERT(cursor == array->vector);
7073 ObjectBox*
7074 CGObjectList::find(uint32_t index)
7076 JS_ASSERT(index < length);
7077 ObjectBox* box = lastbox;
7078 for (unsigned n = length - 1; n > index; n--)
7079 box = box->emitLink;
7080 return box;
7083 bool
7084 CGTryNoteList::append(JSTryNoteKind kind, uint32_t stackDepth, size_t start, size_t end)
7086 JS_ASSERT(start <= end);
7087 JS_ASSERT(size_t(uint32_t(start)) == start);
7088 JS_ASSERT(size_t(uint32_t(end)) == end);
7090 JSTryNote note;
7091 note.kind = kind;
7092 note.stackDepth = stackDepth;
7093 note.start = uint32_t(start);
7094 note.length = uint32_t(end - start);
7096 return list.append(note);
7099 void
7100 CGTryNoteList::finish(TryNoteArray* array)
7102 JS_ASSERT(length() == array->length);
7104 for (unsigned i = 0; i < length(); i++)
7105 array->vector[i] = list[i];
7108 bool
7109 CGBlockScopeList::append(uint32_t scopeObject, uint32_t offset, uint32_t parent)
7111 BlockScopeNote note;
7112 mozilla::PodZero(&note);
7114 note.index = scopeObject;
7115 note.start = offset;
7116 note.parent = parent;
7118 return list.append(note);
7121 uint32_t
7122 CGBlockScopeList::findEnclosingScope(uint32_t index)
7124 JS_ASSERT(index < length());
7125 JS_ASSERT(list[index].index != BlockScopeNote::NoBlockScopeIndex);
7127 DebugOnly<uint32_t> pos = list[index].start;
7128 while (index--) {
7129 JS_ASSERT(list[index].start <= pos);
7130 if (list[index].length == 0) {
7131 // We are looking for the nearest enclosing live scope. If the
7132 // scope contains POS, it should still be open, so its length should
7133 // be zero.
7134 return list[index].index;
7135 } else {
7136 // Conversely, if the length is not zero, it should not contain
7137 // POS.
7138 JS_ASSERT(list[index].start + list[index].length <= pos);
7142 return BlockScopeNote::NoBlockScopeIndex;
7145 void
7146 CGBlockScopeList::recordEnd(uint32_t index, uint32_t offset)
7148 JS_ASSERT(index < length());
7149 JS_ASSERT(offset >= list[index].start);
7150 JS_ASSERT(list[index].length == 0);
7152 list[index].length = offset - list[index].start;
7155 void
7156 CGBlockScopeList::finish(BlockScopeArray* array)
7158 JS_ASSERT(length() == array->length);
7160 for (unsigned i = 0; i < length(); i++)
7161 array->vector[i] = list[i];
7165 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
7166 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR.
7168 const JSSrcNoteSpec js_SrcNoteSpec[] = {
7169 #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity },
7170 FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC)
7171 #undef DEFINE_SRC_NOTE_SPEC
7174 static int
7175 SrcNoteArity(jssrcnote* sn)
7177 JS_ASSERT(SN_TYPE(sn) < SRC_LAST);
7178 return js_SrcNoteSpec[SN_TYPE(sn)].arity;
7181 JS_FRIEND_API(unsigned)
7182 js_SrcNoteLength(jssrcnote* sn)
7184 unsigned arity;
7185 jssrcnote* base;
7187 arity = SrcNoteArity(sn);
7188 for (base = sn++; arity; sn++, arity--) {
7189 if (*sn & SN_4BYTE_OFFSET_FLAG)
7190 sn += 3;
7192 return sn - base;
7195 JS_FRIEND_API(ptrdiff_t)
7196 js_GetSrcNoteOffset(jssrcnote* sn, unsigned which)
7198 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7199 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7200 JS_ASSERT((int) which < SrcNoteArity(sn));
7201 for (sn++; which; sn++, which--) {
7202 if (*sn & SN_4BYTE_OFFSET_FLAG)
7203 sn += 3;
7205 if (*sn & SN_4BYTE_OFFSET_FLAG) {
7206 return (ptrdiff_t)(((uint32_t)(sn[0] & SN_4BYTE_OFFSET_MASK) << 24)
7207 | (sn[1] << 16)
7208 | (sn[2] << 8)
7209 | sn[3]);
7211 return (ptrdiff_t)*sn;