1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
8 * JS bytecode generation.
11 #include "frontend/BytecodeEmitter.h"
13 #include "mozilla/DebugOnly.h"
14 #include "mozilla/FloatingPoint.h"
15 #include "mozilla/PodOperations.h"
16 #include "mozilla/UniquePtr.h"
30 #include "asmjs/AsmJSLink.h"
31 #include "frontend/Parser.h"
32 #include "frontend/TokenStream.h"
33 #include "vm/Debugger.h"
35 #include "jsatominlines.h"
36 #include "jsobjinlines.h"
37 #include "jsscriptinlines.h"
39 #include "frontend/ParseMaps-inl.h"
40 #include "frontend/ParseNode-inl.h"
41 #include "vm/ScopeObject-inl.h"
44 using namespace js::gc
;
45 using namespace js::frontend
;
47 using mozilla::DebugOnly
;
48 using mozilla::NumberIsInt32
;
49 using mozilla::PodCopy
;
50 using mozilla::UniquePtr
;
53 SetSrcNoteOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned index
, unsigned which
, ptrdiff_t offset
);
56 UpdateSourceCoordNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
);
58 struct frontend::StmtInfoBCE
: public StmtInfoBase
60 StmtInfoBCE
* down
; /* info for enclosing statement */
61 StmtInfoBCE
* downScope
; /* next enclosing lexical scope */
63 ptrdiff_t update
; /* loop update offset (top if none) */
64 ptrdiff_t breaks
; /* offset of last break in loop */
65 ptrdiff_t continues
; /* offset of last continue in loop */
66 uint32_t blockScopeIndex
; /* index of scope in BlockScopeArray */
68 explicit StmtInfoBCE(ExclusiveContext
* cx
) : StmtInfoBase(cx
) {}
71 * To reuse space, alias two of the ptrdiff_t fields for use during
72 * try/catch/finally code generation and backpatching.
74 * Only a loop, switch, or label statement info record can have breaks and
75 * continues, and only a for loop has an update backpatch chain, so it's
76 * safe to overlay these for the "trying" StmtTypes.
80 JS_ASSERT(type
== STMT_FINALLY
);
84 ptrdiff_t& guardJump() {
85 JS_ASSERT(type
== STMT_TRY
|| type
== STMT_FINALLY
);
93 struct LoopStmtInfo
: public StmtInfoBCE
95 int32_t stackDepth
; // Stack depth when this loop was pushed.
96 uint32_t loopDepth
; // Loop depth.
98 // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
101 explicit LoopStmtInfo(ExclusiveContext
* cx
) : StmtInfoBCE(cx
) {}
103 static LoopStmtInfo
* fromStmtInfo(StmtInfoBCE
* stmt
) {
104 JS_ASSERT(stmt
->isLoop());
105 return static_cast<LoopStmtInfo
*>(stmt
);
109 } // anonymous namespace
111 BytecodeEmitter::BytecodeEmitter(BytecodeEmitter
* parent
,
112 Parser
<FullParseHandler
>* parser
, SharedContext
* sc
,
113 HandleScript script
, bool insideEval
, HandleScript evalCaller
,
114 bool hasGlobalScope
, uint32_t lineNum
, EmitterMode emitterMode
)
117 script(sc
->context
, script
),
118 prolog(sc
->context
, lineNum
),
119 main(sc
->context
, lineNum
),
122 evalCaller(evalCaller
),
124 topScopeStmt(nullptr),
125 staticScope(sc
->context
),
126 atomIndices(sc
->context
),
128 stackDepth(0), maxStackDepth(0),
131 constList(sc
->context
),
132 tryNoteList(sc
->context
),
133 blockScopeList(sc
->context
),
135 hasSingletons(false),
136 emittingForInit(false),
137 emittingRunOnceLambda(false),
138 lazyRunOnceLambda(false),
139 insideEval(insideEval
),
140 hasGlobalScope(hasGlobalScope
),
141 emitterMode(emitterMode
)
143 JS_ASSERT_IF(evalCaller
, insideEval
);
147 BytecodeEmitter::init()
149 return atomIndices
.ensureMap(sc
->context
);
153 EmitCheck(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t delta
)
155 ptrdiff_t offset
= bce
->code().length();
157 // Start it off moderately large to avoid repeated resizings early on.
158 if (bce
->code().capacity() == 0 && !bce
->code().reserve(1024))
161 jsbytecode dummy
= 0;
162 if (!bce
->code().appendN(dummy
, delta
)) {
163 js_ReportOutOfMemory(cx
);
170 UpdateDepth(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t target
)
172 jsbytecode
* pc
= bce
->code(target
);
173 JSOp op
= (JSOp
) *pc
;
174 const JSCodeSpec
* cs
= &js_CodeSpec
[op
];
176 if (cs
->format
& JOF_TMPSLOT_MASK
) {
178 * An opcode may temporarily consume stack space during execution.
179 * Account for this in maxStackDepth separately from uses/defs here.
181 uint32_t depth
= (uint32_t) bce
->stackDepth
+
182 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
);
183 if (depth
> bce
->maxStackDepth
)
184 bce
->maxStackDepth
= depth
;
187 int nuses
= StackUses(nullptr, pc
);
188 int ndefs
= StackDefs(nullptr, pc
);
190 bce
->stackDepth
-= nuses
;
191 JS_ASSERT(bce
->stackDepth
>= 0);
192 bce
->stackDepth
+= ndefs
;
193 if ((uint32_t)bce
->stackDepth
> bce
->maxStackDepth
)
194 bce
->maxStackDepth
= bce
->stackDepth
;
198 frontend::Emit1(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
200 ptrdiff_t offset
= EmitCheck(cx
, bce
, 1);
204 jsbytecode
* code
= bce
->code(offset
);
205 code
[0] = jsbytecode(op
);
206 UpdateDepth(cx
, bce
, offset
);
211 frontend::Emit2(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, jsbytecode op1
)
213 ptrdiff_t offset
= EmitCheck(cx
, bce
, 2);
217 jsbytecode
* code
= bce
->code(offset
);
218 code
[0] = jsbytecode(op
);
220 UpdateDepth(cx
, bce
, offset
);
225 frontend::Emit3(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, jsbytecode op1
,
228 /* These should filter through EmitVarOp. */
229 JS_ASSERT(!IsArgOp(op
));
230 JS_ASSERT(!IsLocalOp(op
));
232 ptrdiff_t offset
= EmitCheck(cx
, bce
, 3);
236 jsbytecode
* code
= bce
->code(offset
);
237 code
[0] = jsbytecode(op
);
240 UpdateDepth(cx
, bce
, offset
);
245 frontend::EmitN(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, size_t extra
)
247 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
248 ptrdiff_t offset
= EmitCheck(cx
, bce
, length
);
252 jsbytecode
* code
= bce
->code(offset
);
253 code
[0] = jsbytecode(op
);
254 /* The remaining |extra| bytes are set by the caller */
257 * Don't UpdateDepth if op's use-count comes from the immediate
258 * operand yet to be stored in the extra bytes after op.
260 if (js_CodeSpec
[op
].nuses
>= 0)
261 UpdateDepth(cx
, bce
, offset
);
267 EmitJump(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, ptrdiff_t off
)
269 ptrdiff_t offset
= EmitCheck(cx
, bce
, 5);
273 jsbytecode
* code
= bce
->code(offset
);
274 code
[0] = jsbytecode(op
);
275 SET_JUMP_OFFSET(code
, off
);
276 UpdateDepth(cx
, bce
, offset
);
281 EmitCall(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, uint16_t argc
, ParseNode
* pn
=nullptr)
283 if (pn
&& !UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
285 return Emit3(cx
, bce
, op
, ARGC_HI(argc
), ARGC_LO(argc
));
288 // Dup the var in operand stack slot "slot". The first item on the operand
289 // stack is one slot past the last fixed slot. The last (most recent) item is
290 // slot bce->stackDepth - 1.
292 // The instruction that is written (JSOP_DUPAT) switches the depth around so
293 // that it is addressed from the sp instead of from the fp. This is useful when
294 // you don't know the size of the fixed stack segment (nfixed), as is the case
295 // when compiling scripts (because each statement is parsed and compiled
296 // separately, but they all together form one script with one fixed stack
299 EmitDupAt(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned slot
)
301 JS_ASSERT(slot
< unsigned(bce
->stackDepth
));
302 // The slot's position on the operand stack, measured from the top.
303 unsigned slotFromTop
= bce
->stackDepth
- 1 - slot
;
304 if (slotFromTop
>= JS_BIT(24)) {
305 bce
->reportError(nullptr, JSMSG_TOO_MANY_LOCALS
);
308 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_DUPAT
, 3);
311 jsbytecode
* pc
= bce
->code(off
);
312 SET_UINT24(pc
, slotFromTop
);
316 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
317 const char js_with_statement_str
[] = "with statement";
318 const char js_finally_block_str
[] = "finally block";
319 const char js_script_str
[] = "script";
321 static const char * const statementName
[] = {
322 "label statement", /* LABEL */
323 "if statement", /* IF */
324 "else statement", /* ELSE */
325 "destructuring body", /* BODY */
326 "switch statement", /* SWITCH */
328 js_with_statement_str
, /* WITH */
329 "catch block", /* CATCH */
330 "try block", /* TRY */
331 js_finally_block_str
, /* FINALLY */
332 js_finally_block_str
, /* SUBROUTINE */
333 "do loop", /* DO_LOOP */
334 "for loop", /* FOR_LOOP */
335 "for/in loop", /* FOR_IN_LOOP */
336 "for/of loop", /* FOR_OF_LOOP */
337 "while loop", /* WHILE_LOOP */
338 "spread", /* SPREAD */
341 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
344 StatementName(StmtInfoBCE
* topStmt
)
347 return js_script_str
;
348 return statementName
[topStmt
->type
];
352 ReportStatementTooLarge(TokenStream
& ts
, StmtInfoBCE
* topStmt
)
354 ts
.reportError(JSMSG_NEED_DIET
, StatementName(topStmt
));
358 * Emit a backpatch op with offset pointing to the previous jump of this type,
359 * so that we can walk back up the chain fixing up the op and jump offset.
362 EmitBackPatchOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t* lastp
)
364 ptrdiff_t offset
, delta
;
366 offset
= bce
->offset();
367 delta
= offset
- *lastp
;
369 JS_ASSERT(delta
> 0);
370 return EmitJump(cx
, bce
, JSOP_BACKPATCH
, delta
);
373 static inline unsigned
374 LengthOfSetLine(unsigned line
)
376 return 1 /* SN_SETLINE */ + (line
> SN_4BYTE_OFFSET_MASK
? 4 : 1);
379 /* Updates line number notes, not column notes. */
381 UpdateLineNumberNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
)
383 TokenStream
* ts
= &bce
->parser
->tokenStream
;
385 if (!ts
->srcCoords
.isOnThisLine(offset
, bce
->currentLine(), &onThisLine
))
386 return ts
->reportError(JSMSG_OUT_OF_MEMORY
);
388 unsigned line
= ts
->srcCoords
.lineNum(offset
);
389 unsigned delta
= line
- bce
->currentLine();
392 * Encode any change in the current source line number by using
393 * either several SRC_NEWLINE notes or just one SRC_SETLINE note,
394 * whichever consumes less space.
396 * NB: We handle backward line number deltas (possible with for
397 * loops where the update part is emitted after the body, but its
398 * line number is <= any line number in the body) here by letting
399 * unsigned delta_ wrap to a very large number, which triggers a
402 bce
->current
->currentLine
= line
;
403 bce
->current
->lastColumn
= 0;
404 if (delta
>= LengthOfSetLine(line
)) {
405 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, (ptrdiff_t)line
) < 0)
409 if (NewSrcNote(cx
, bce
, SRC_NEWLINE
) < 0)
411 } while (--delta
!= 0);
417 /* Updates the line number and column number information in the source notes. */
419 UpdateSourceCoordNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
)
421 if (!UpdateLineNumberNotes(cx
, bce
, offset
))
424 uint32_t columnIndex
= bce
->parser
->tokenStream
.srcCoords
.columnIndex(offset
);
425 ptrdiff_t colspan
= ptrdiff_t(columnIndex
) - ptrdiff_t(bce
->current
->lastColumn
);
428 colspan
+= SN_COLSPAN_DOMAIN
;
429 } else if (colspan
>= SN_COLSPAN_DOMAIN
/ 2) {
430 // If the column span is so large that we can't store it, then just
431 // discard this information because column information would most
432 // likely be useless anyway once the column numbers are ~4000000.
433 // This has been known to happen with scripts that have been
434 // minimized and put into all one line.
437 if (NewSrcNote2(cx
, bce
, SRC_COLSPAN
, colspan
) < 0)
439 bce
->current
->lastColumn
= columnIndex
;
445 EmitLoopHead(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* nextpn
)
449 * Try to give the JSOP_LOOPHEAD the same line number as the next
450 * instruction. nextpn is often a block, in which case the next
451 * instruction typically comes from the first statement inside.
453 JS_ASSERT_IF(nextpn
->isKind(PNK_STATEMENTLIST
), nextpn
->isArity(PN_LIST
));
454 if (nextpn
->isKind(PNK_STATEMENTLIST
) && nextpn
->pn_head
)
455 nextpn
= nextpn
->pn_head
;
456 if (!UpdateSourceCoordNotes(cx
, bce
, nextpn
->pn_pos
.begin
))
460 return Emit1(cx
, bce
, JSOP_LOOPHEAD
);
464 EmitLoopEntry(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* nextpn
)
467 /* Update the line number, as for LOOPHEAD. */
468 JS_ASSERT_IF(nextpn
->isKind(PNK_STATEMENTLIST
), nextpn
->isArity(PN_LIST
));
469 if (nextpn
->isKind(PNK_STATEMENTLIST
) && nextpn
->pn_head
)
470 nextpn
= nextpn
->pn_head
;
471 if (!UpdateSourceCoordNotes(cx
, bce
, nextpn
->pn_pos
.begin
))
475 LoopStmtInfo
* loop
= LoopStmtInfo::fromStmtInfo(bce
->topStmt
);
476 JS_ASSERT(loop
->loopDepth
> 0);
478 uint8_t loopDepthAndFlags
= PackLoopEntryDepthHintAndFlags(loop
->loopDepth
, loop
->canIonOsr
);
479 return Emit2(cx
, bce
, JSOP_LOOPENTRY
, loopDepthAndFlags
) >= 0;
483 * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
484 * a type set to store its result.
487 CheckTypeSet(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
489 if (js_CodeSpec
[op
].format
& JOF_TYPESET
) {
490 if (bce
->typesetCount
< UINT16_MAX
)
496 * Macro to emit a bytecode followed by a uint16_t immediate operand stored in
499 * NB: We use cx and bce from our caller's lexical environment, and return
502 #define EMIT_UINT16_IMM_OP(op, i) \
504 if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
506 CheckTypeSet(cx, bce, op); \
510 FlushPops(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, int* npops
)
512 JS_ASSERT(*npops
!= 0);
513 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
519 PopIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
521 if (Emit1(cx
, bce
, JSOP_ENDITER
) < 0)
528 class NonLocalExitScope
{
529 ExclusiveContext
* cx
;
530 BytecodeEmitter
* bce
;
531 const uint32_t savedScopeIndex
;
532 const int savedDepth
;
533 uint32_t openScopeIndex
;
535 NonLocalExitScope(const NonLocalExitScope
&) MOZ_DELETE
;
538 explicit NonLocalExitScope(ExclusiveContext
* cx_
, BytecodeEmitter
* bce_
)
541 savedScopeIndex(bce
->blockScopeList
.length()),
542 savedDepth(bce
->stackDepth
),
543 openScopeIndex(UINT32_MAX
) {
544 if (bce
->staticScope
) {
545 StmtInfoBCE
* stmt
= bce
->topStmt
;
548 if (stmt
->isNestedScope
) {
549 openScopeIndex
= stmt
->blockScopeIndex
;
557 ~NonLocalExitScope() {
558 for (uint32_t n
= savedScopeIndex
; n
< bce
->blockScopeList
.length(); n
++)
559 bce
->blockScopeList
.recordEnd(n
, bce
->offset());
560 bce
->stackDepth
= savedDepth
;
563 bool popScopeForNonLocalExit(uint32_t blockScopeIndex
) {
564 uint32_t scopeObjectIndex
= bce
->blockScopeList
.findEnclosingScope(blockScopeIndex
);
565 uint32_t parent
= openScopeIndex
;
567 if (!bce
->blockScopeList
.append(scopeObjectIndex
, bce
->offset(), parent
))
569 openScopeIndex
= bce
->blockScopeList
.length() - 1;
573 bool prepareForNonLocalJump(StmtInfoBCE
* toStmt
);
577 * Emit additional bytecode(s) for non-local jumps.
580 NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE
* toStmt
)
584 #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false
586 for (StmtInfoBCE
* stmt
= bce
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
587 switch (stmt
->type
) {
590 if (EmitBackPatchOp(cx
, bce
, &stmt
->gosubs()) < 0)
595 if (Emit1(cx
, bce
, JSOP_LEAVEWITH
) < 0)
597 JS_ASSERT(stmt
->isNestedScope
);
598 if (!popScopeForNonLocalExit(stmt
->blockScopeIndex
))
602 case STMT_FOR_OF_LOOP
:
606 case STMT_FOR_IN_LOOP
:
608 if (!PopIterator(cx
, bce
))
613 MOZ_ASSERT_UNREACHABLE("can't break/continue/return from inside a spread");
616 case STMT_SUBROUTINE
:
618 * There's a [exception or hole, retsub pc-index] pair on the
619 * stack that we need to pop.
627 if (stmt
->isBlockScope
) {
628 JS_ASSERT(stmt
->isNestedScope
);
629 StaticBlockObject
& blockObj
= stmt
->staticBlock();
630 if (Emit1(cx
, bce
, JSOP_DEBUGLEAVEBLOCK
) < 0)
632 if (!popScopeForNonLocalExit(stmt
->blockScopeIndex
))
634 if (blockObj
.needsClone()) {
635 if (Emit1(cx
, bce
, JSOP_POPBLOCKSCOPE
) < 0)
647 } // anonymous namespace
650 EmitGoto(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* toStmt
, ptrdiff_t* lastp
,
651 SrcNoteType noteType
= SRC_NULL
)
653 NonLocalExitScope
nle(cx
, bce
);
655 if (!nle
.prepareForNonLocalJump(toStmt
))
658 if (noteType
!= SRC_NULL
) {
659 if (NewSrcNote(cx
, bce
, noteType
) < 0)
663 return EmitBackPatchOp(cx
, bce
, lastp
);
667 BackPatch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t last
, jsbytecode
* target
, jsbytecode op
)
669 jsbytecode
* pc
, *stop
;
670 ptrdiff_t delta
, span
;
672 pc
= bce
->code(last
);
673 stop
= bce
->code(-1);
675 delta
= GET_JUMP_OFFSET(pc
);
677 SET_JUMP_OFFSET(pc
, span
);
684 #define SET_STATEMENT_TOP(stmt, top) \
685 ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
688 PushStatementInner(BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, StmtType type
, ptrdiff_t top
)
690 SET_STATEMENT_TOP(stmt
, top
);
691 PushStatement(bce
, stmt
, type
);
695 PushStatementBCE(BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, StmtType type
, ptrdiff_t top
)
697 PushStatementInner(bce
, stmt
, type
, top
);
698 JS_ASSERT(!stmt
->isLoop());
702 PushLoopStatement(BytecodeEmitter
* bce
, LoopStmtInfo
* stmt
, StmtType type
, ptrdiff_t top
)
704 PushStatementInner(bce
, stmt
, type
, top
);
705 JS_ASSERT(stmt
->isLoop());
707 LoopStmtInfo
* downLoop
= nullptr;
708 for (StmtInfoBCE
* outer
= stmt
->down
; outer
; outer
= outer
->down
) {
709 if (outer
->isLoop()) {
710 downLoop
= LoopStmtInfo::fromStmtInfo(outer
);
715 stmt
->stackDepth
= bce
->stackDepth
;
716 stmt
->loopDepth
= downLoop
? downLoop
->loopDepth
+ 1 : 1;
719 if (type
== STMT_SPREAD
)
721 else if (type
== STMT_FOR_OF_LOOP
)
723 else if (type
== STMT_FOR_IN_LOOP
)
728 MOZ_ASSERT(loopSlots
<= stmt
->stackDepth
);
731 stmt
->canIonOsr
= (downLoop
->canIonOsr
&&
732 stmt
->stackDepth
== downLoop
->stackDepth
+ loopSlots
);
734 stmt
->canIonOsr
= stmt
->stackDepth
== loopSlots
;
738 * Return the enclosing lexical scope, which is the innermost enclosing static
739 * block object or compiler created function.
742 EnclosingStaticScope(BytecodeEmitter
* bce
)
744 if (bce
->staticScope
)
745 return bce
->staticScope
;
747 if (!bce
->sc
->isFunctionBox()) {
748 JS_ASSERT(!bce
->parent
);
752 return bce
->sc
->asFunctionBox()->function();
757 AllLocalsAliased(StaticBlockObject
& obj
)
759 for (unsigned i
= 0; i
< obj
.numVariables(); i
++)
760 if (!obj
.isAliased(i
))
767 ComputeAliasedSlots(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, Handle
<StaticBlockObject
*> blockObj
)
769 for (unsigned i
= 0; i
< blockObj
->numVariables(); i
++) {
770 Definition
* dn
= blockObj
->definitionParseNode(i
);
772 JS_ASSERT(dn
->isDefn());
773 if (!dn
->pn_cookie
.set(bce
->parser
->tokenStream
, dn
->pn_cookie
.level(),
774 blockObj
->blockIndexToLocalIndex(dn
->frameSlot())))
780 for (ParseNode
* pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
781 JS_ASSERT(pnu
->pn_lexdef
== dn
);
782 JS_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
783 JS_ASSERT(pnu
->pn_cookie
.isFree());
787 blockObj
->setAliased(i
, bce
->isAliasedName(dn
));
790 JS_ASSERT_IF(bce
->sc
->allLocalsAliased(), AllLocalsAliased(*blockObj
));
796 EmitInternedObjectOp(ExclusiveContext
* cx
, uint32_t index
, JSOp op
, BytecodeEmitter
* bce
);
798 // In a function, block-scoped locals go after the vars, and form part of the
799 // fixed part of a stack frame. Outside a function, there are no fixed vars,
800 // but block-scoped locals still form part of the fixed part of a stack frame
801 // and are thus addressable via GETLOCAL and friends.
803 ComputeLocalOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, Handle
<StaticBlockObject
*> blockObj
)
805 unsigned nfixedvars
= bce
->sc
->isFunctionBox() ? bce
->script
->bindings
.numVars() : 0;
806 unsigned localOffset
= nfixedvars
;
808 if (bce
->staticScope
) {
809 Rooted
<NestedScopeObject
*> outer(cx
, bce
->staticScope
);
810 for (; outer
; outer
= outer
->enclosingNestedScope()) {
811 if (outer
->is
<StaticBlockObject
>()) {
812 StaticBlockObject
& outerBlock
= outer
->as
<StaticBlockObject
>();
813 localOffset
= outerBlock
.localOffset() + outerBlock
.numVariables();
819 JS_ASSERT(localOffset
+ blockObj
->numVariables()
820 <= nfixedvars
+ bce
->script
->bindings
.numBlockScoped());
822 blockObj
->setLocalOffset(localOffset
);
827 // A nested scope is a region of a compilation unit (function, script, or eval
828 // code) with an additional node on the scope chain. This node may either be a
829 // "with" object or a "block" object. "With" objects represent "with" scopes.
830 // Block objects represent lexical scopes, and contain named block-scoped
831 // bindings, for example "let" bindings or the exception in a catch block.
832 // Those variables may be local and thus accessible directly from the stack, or
833 // "aliased" (accessed by name from nested functions, or dynamically via nested
834 // "eval" or "with") and only accessible through the scope chain.
836 // All nested scopes are present on the "static scope chain". A nested scope
837 // that is a "with" scope will be present on the scope chain at run-time as
838 // well. A block scope may or may not have a corresponding link on the run-time
839 // scope chain; if no variable declared in the block scope is "aliased", then no
840 // scope chain node is allocated.
842 // To help debuggers, the bytecode emitter arranges to record the PC ranges
843 // comprehended by a nested scope, and ultimately attach them to the JSScript.
844 // An element in the "block scope array" specifies the PC range, and links to a
845 // NestedScopeObject in the object list of the script. That scope object is
846 // linked to the previous link in the static scope chain, if any. The static
847 // scope chain at any pre-retire PC can be retrieved using
848 // JSScript::getStaticScope(jsbytecode* pc).
850 // Block scopes store their locals in the fixed part of a stack frame, after the
851 // "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
852 // binding that occurs in a function (as opposed to a script or in eval code).
853 // Only functions have fixed var bindings.
855 // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
856 // block scope, even if the block has no aliased locals. This allows
857 // DebugScopes to invalidate any association between a debugger scope object,
858 // which can proxy access to unaliased stack locals, and the actual live frame.
859 // In normal, non-debug mode, this opcode does not cause any baseline code to be
862 // Enter a nested scope with EnterNestedScope. It will emit
863 // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
864 // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks,
865 // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
866 // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE
867 // object, and pass that same object to the corresponding LeaveNestedScope. If
868 // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for
869 // with scopes pass STMT_WITH.
872 EnterNestedScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, ObjectBox
* objbox
,
875 Rooted
<NestedScopeObject
*> scopeObj(cx
, &objbox
->object
->as
<NestedScopeObject
>());
876 uint32_t scopeObjectIndex
= bce
->objectList
.add(objbox
);
880 Rooted
<StaticBlockObject
*> blockObj(cx
, &scopeObj
->as
<StaticBlockObject
>());
882 ComputeLocalOffset(cx
, bce
, blockObj
);
884 if (!ComputeAliasedSlots(cx
, bce
, blockObj
))
887 if (blockObj
->needsClone()) {
888 if (!EmitInternedObjectOp(cx
, scopeObjectIndex
, JSOP_PUSHBLOCKSCOPE
, bce
))
894 JS_ASSERT(scopeObj
->is
<StaticWithObject
>());
895 if (!EmitInternedObjectOp(cx
, scopeObjectIndex
, JSOP_ENTERWITH
, bce
))
899 MOZ_CRASH("Unexpected scope statement");
902 uint32_t parent
= BlockScopeNote::NoBlockScopeIndex
;
903 if (StmtInfoBCE
* stmt
= bce
->topScopeStmt
) {
904 for (; stmt
->staticScope
!= bce
->staticScope
; stmt
= stmt
->down
) {}
905 parent
= stmt
->blockScopeIndex
;
908 stmt
->blockScopeIndex
= bce
->blockScopeList
.length();
909 if (!bce
->blockScopeList
.append(scopeObjectIndex
, bce
->offset(), parent
))
912 PushStatementBCE(bce
, stmt
, stmtType
, bce
->offset());
913 scopeObj
->initEnclosingNestedScope(EnclosingStaticScope(bce
));
914 FinishPushNestedScope(bce
, stmt
, *scopeObj
);
915 JS_ASSERT(stmt
->isNestedScope
);
916 stmt
->isBlockScope
= (stmtType
== STMT_BLOCK
);
921 // Patches |breaks| and |continues| unless the top statement info record
922 // represents a try-catch-finally suite. May fail if a jump offset overflows.
924 PopStatementBCE(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
926 StmtInfoBCE
* stmt
= bce
->topStmt
;
927 if (!stmt
->isTrying() &&
928 (!BackPatch(cx
, bce
, stmt
->breaks
, bce
->code().end(), JSOP_GOTO
) ||
929 !BackPatch(cx
, bce
, stmt
->continues
, bce
->code(stmt
->update
), JSOP_GOTO
)))
934 FinishPopStatement(bce
);
939 LeaveNestedScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
)
941 JS_ASSERT(stmt
== bce
->topStmt
);
942 JS_ASSERT(stmt
->isNestedScope
);
943 JS_ASSERT(stmt
->isBlockScope
== !(stmt
->type
== STMT_WITH
));
944 uint32_t blockScopeIndex
= stmt
->blockScopeIndex
;
947 JS_ASSERT(bce
->blockScopeList
.list
[blockScopeIndex
].length
== 0);
948 uint32_t blockObjIndex
= bce
->blockScopeList
.list
[blockScopeIndex
].index
;
949 ObjectBox
* blockObjBox
= bce
->objectList
.find(blockObjIndex
);
950 NestedScopeObject
* staticScope
= &blockObjBox
->object
->as
<NestedScopeObject
>();
951 JS_ASSERT(stmt
->staticScope
== staticScope
);
952 JS_ASSERT(staticScope
== bce
->staticScope
);
953 JS_ASSERT_IF(!stmt
->isBlockScope
, staticScope
->is
<StaticWithObject
>());
956 if (!PopStatementBCE(cx
, bce
))
959 if (Emit1(cx
, bce
, stmt
->isBlockScope
? JSOP_DEBUGLEAVEBLOCK
: JSOP_LEAVEWITH
) < 0)
962 bce
->blockScopeList
.recordEnd(blockScopeIndex
, bce
->offset());
964 if (stmt
->isBlockScope
&& stmt
->staticScope
->as
<StaticBlockObject
>().needsClone()) {
965 if (Emit1(cx
, bce
, JSOP_POPBLOCKSCOPE
) < 0)
973 EmitIndex32(ExclusiveContext
* cx
, JSOp op
, uint32_t index
, BytecodeEmitter
* bce
)
975 const size_t len
= 1 + UINT32_INDEX_LEN
;
976 JS_ASSERT(len
== size_t(js_CodeSpec
[op
].length
));
977 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
981 jsbytecode
* code
= bce
->code(offset
);
982 code
[0] = jsbytecode(op
);
983 SET_UINT32_INDEX(code
, index
);
984 UpdateDepth(cx
, bce
, offset
);
985 CheckTypeSet(cx
, bce
, op
);
990 EmitIndexOp(ExclusiveContext
* cx
, JSOp op
, uint32_t index
, BytecodeEmitter
* bce
)
992 const size_t len
= js_CodeSpec
[op
].length
;
993 JS_ASSERT(len
>= 1 + UINT32_INDEX_LEN
);
994 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
998 jsbytecode
* code
= bce
->code(offset
);
999 code
[0] = jsbytecode(op
);
1000 SET_UINT32_INDEX(code
, index
);
1001 UpdateDepth(cx
, bce
, offset
);
1002 CheckTypeSet(cx
, bce
, op
);
1007 EmitAtomOp(ExclusiveContext
* cx
, JSAtom
* atom
, JSOp op
, BytecodeEmitter
* bce
)
1009 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1011 if (op
== JSOP_GETPROP
&& atom
== cx
->names().length
) {
1012 /* Specialize length accesses for the interpreter. */
1017 if (!bce
->makeAtomIndex(atom
, &index
))
1020 return EmitIndexOp(cx
, op
, index
, bce
);
1024 EmitAtomOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
1026 JS_ASSERT(pn
->pn_atom
!= nullptr);
1027 return EmitAtomOp(cx
, pn
->pn_atom
, op
, bce
);
1031 EmitInternedObjectOp(ExclusiveContext
* cx
, uint32_t index
, JSOp op
, BytecodeEmitter
* bce
)
1033 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1034 JS_ASSERT(index
< bce
->objectList
.length
);
1035 return EmitIndex32(cx
, op
, index
, bce
);
1039 EmitObjectOp(ExclusiveContext
* cx
, ObjectBox
* objbox
, JSOp op
, BytecodeEmitter
* bce
)
1041 return EmitInternedObjectOp(cx
, bce
->objectList
.add(objbox
), op
, bce
);
1045 EmitObjectPairOp(ExclusiveContext
* cx
, ObjectBox
* objbox1
, ObjectBox
* objbox2
, JSOp op
,
1046 BytecodeEmitter
* bce
)
1048 uint32_t index
= bce
->objectList
.add(objbox1
);
1049 bce
->objectList
.add(objbox2
);
1050 return EmitInternedObjectOp(cx
, index
, op
, bce
);
1054 EmitRegExp(ExclusiveContext
* cx
, uint32_t index
, BytecodeEmitter
* bce
)
1056 return EmitIndex32(cx
, JSOP_REGEXP
, index
, bce
);
1060 * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are
1061 * not used to unconditionally emit JSOP_GETLOCAL. Variable access should
1062 * instead be emitted using EmitVarOp. In special cases, when the caller
1063 * definitely knows that a given local slot is unaliased, this function may be
1064 * used as a non-asserting version of EMIT_UINT16_IMM_OP.
1067 EmitUnaliasedVarOp(ExclusiveContext
* cx
, JSOp op
, uint32_t slot
, BytecodeEmitter
* bce
)
1069 JS_ASSERT(JOF_OPTYPE(op
) != JOF_SCOPECOORD
);
1071 if (IsLocalOp(op
)) {
1072 ptrdiff_t off
= EmitN(cx
, bce
, op
, LOCALNO_LEN
);
1076 SET_LOCALNO(bce
->code(off
), slot
);
1080 JS_ASSERT(IsArgOp(op
));
1081 ptrdiff_t off
= EmitN(cx
, bce
, op
, ARGNO_LEN
);
1085 SET_ARGNO(bce
->code(off
), slot
);
1090 EmitAliasedVarOp(ExclusiveContext
* cx
, JSOp op
, ScopeCoordinate sc
, BytecodeEmitter
* bce
)
1092 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SCOPECOORD
);
1094 unsigned n
= SCOPECOORD_HOPS_LEN
+ SCOPECOORD_SLOT_LEN
;
1095 JS_ASSERT(int(n
) + 1 /* op */ == js_CodeSpec
[op
].length
);
1097 ptrdiff_t off
= EmitN(cx
, bce
, op
, n
);
1101 jsbytecode
* pc
= bce
->code(off
);
1102 SET_SCOPECOORD_HOPS(pc
, sc
.hops());
1103 pc
+= SCOPECOORD_HOPS_LEN
;
1104 SET_SCOPECOORD_SLOT(pc
, sc
.slot());
1105 pc
+= SCOPECOORD_SLOT_LEN
;
1106 CheckTypeSet(cx
, bce
, op
);
1110 // Compute the number of nested scope objects that will actually be on the scope
1111 // chain at runtime, given the BCE's current staticScope.
1113 DynamicNestedScopeDepth(BytecodeEmitter
* bce
)
1116 for (NestedScopeObject
* b
= bce
->staticScope
; b
; b
= b
->enclosingNestedScope()) {
1117 if (!b
->is
<StaticBlockObject
>() || b
->as
<StaticBlockObject
>().needsClone())
1125 LookupAliasedName(HandleScript script
, PropertyName
* name
, uint32_t* pslot
)
1128 * Beware: BindingIter may contain more than one Binding for a given name
1129 * (in the case of |function f(x,x) {}|) but only one will be aliased.
1131 uint32_t slot
= CallObject::RESERVED_SLOTS
;
1132 for (BindingIter
bi(script
); !bi
.done(); bi
++) {
1133 if (bi
->aliased()) {
1134 if (bi
->name() == name
) {
1145 LookupAliasedNameSlot(HandleScript script
, PropertyName
* name
, ScopeCoordinate
* sc
)
1148 if (!LookupAliasedName(script
, name
, &slot
))
1156 * Use this function instead of assigning directly to 'hops' to guard for
1157 * uint8_t overflows.
1160 AssignHops(BytecodeEmitter
* bce
, ParseNode
* pn
, unsigned src
, ScopeCoordinate
* dst
)
1162 if (src
> UINT8_MAX
) {
1163 bce
->reportError(pn
, JSMSG_TOO_DEEP
, js_function_str
);
1172 EmitAliasedVarOp(ExclusiveContext
* cx
, JSOp op
, ParseNode
* pn
, BytecodeEmitter
* bce
)
1175 * While pn->pn_cookie tells us how many function scopes are between the use and the def this
1176 * is not the same as how many hops up the dynamic scope chain are needed. In particular:
1177 * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic
1179 * - a heavyweight named function scope contributes an extra scope to the scope chain (a
1180 * DeclEnvObject that holds just the name).
1181 * - all the intervening let/catch blocks must be counted.
1183 unsigned skippedScopes
= 0;
1184 BytecodeEmitter
* bceOfDef
= bce
;
1187 * As explained in BindNameToSlot, the 'level' of a use indicates how
1188 * many function scopes (i.e., BytecodeEmitters) to skip to find the
1189 * enclosing function scope of the definition being accessed.
1191 for (unsigned i
= pn
->pn_cookie
.level(); i
; i
--) {
1192 skippedScopes
+= DynamicNestedScopeDepth(bceOfDef
);
1193 FunctionBox
* funbox
= bceOfDef
->sc
->asFunctionBox();
1194 if (funbox
->isHeavyweight()) {
1196 if (funbox
->function()->isNamedLambda())
1199 bceOfDef
= bceOfDef
->parent
;
1202 JS_ASSERT(pn
->isDefn());
1203 JS_ASSERT(pn
->pn_cookie
.level() == bce
->script
->staticLevel());
1207 * The final part of the skippedScopes computation depends on the type of
1208 * variable. An arg or local variable is at the outer scope of a function
1209 * and so includes the full DynamicNestedScopeDepth. A let/catch-binding
1210 * requires a search of the block chain to see how many (dynamic) block
1214 if (IsArgOp(pn
->getOp())) {
1215 if (!AssignHops(bce
, pn
, skippedScopes
+ DynamicNestedScopeDepth(bceOfDef
), &sc
))
1217 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef
->script
, pn
->name(), &sc
));
1219 JS_ASSERT(IsLocalOp(pn
->getOp()) || pn
->isKind(PNK_FUNCTION
));
1220 uint32_t local
= pn
->pn_cookie
.slot();
1221 if (local
< bceOfDef
->script
->bindings
.numVars()) {
1222 if (!AssignHops(bce
, pn
, skippedScopes
+ DynamicNestedScopeDepth(bceOfDef
), &sc
))
1224 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef
->script
, pn
->name(), &sc
));
1226 JS_ASSERT_IF(bce
->sc
->isFunctionBox(), local
<= bceOfDef
->script
->bindings
.numLocals());
1227 JS_ASSERT(bceOfDef
->staticScope
->is
<StaticBlockObject
>());
1228 Rooted
<StaticBlockObject
*> b(cx
, &bceOfDef
->staticScope
->as
<StaticBlockObject
>());
1229 while (local
< b
->localOffset()) {
1230 if (b
->needsClone())
1232 b
= &b
->enclosingNestedScope()->as
<StaticBlockObject
>();
1234 if (!AssignHops(bce
, pn
, skippedScopes
, &sc
))
1236 sc
.setSlot(b
->localIndexToSlot(local
));
1240 return EmitAliasedVarOp(cx
, op
, sc
, bce
);
1244 EmitVarOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
1246 JS_ASSERT(pn
->isKind(PNK_FUNCTION
) || pn
->isKind(PNK_NAME
));
1247 JS_ASSERT(!pn
->pn_cookie
.isFree());
1249 if (IsAliasedVarOp(op
)) {
1251 sc
.setHops(pn
->pn_cookie
.level());
1252 sc
.setSlot(pn
->pn_cookie
.slot());
1253 return EmitAliasedVarOp(cx
, op
, sc
, bce
);
1256 JS_ASSERT_IF(pn
->isKind(PNK_NAME
), IsArgOp(op
) || IsLocalOp(op
));
1258 if (!bce
->isAliasedName(pn
)) {
1259 JS_ASSERT(pn
->isUsed() || pn
->isDefn());
1260 JS_ASSERT_IF(pn
->isUsed(), pn
->pn_cookie
.level() == 0);
1261 JS_ASSERT_IF(pn
->isDefn(), pn
->pn_cookie
.level() == bce
->script
->staticLevel());
1262 return EmitUnaliasedVarOp(cx
, op
, pn
->pn_cookie
.slot(), bce
);
1266 case JSOP_GETARG
: case JSOP_GETLOCAL
: op
= JSOP_GETALIASEDVAR
; break;
1267 case JSOP_SETARG
: case JSOP_SETLOCAL
: op
= JSOP_SETALIASEDVAR
; break;
1268 default: MOZ_CRASH("unexpected var op");
1271 return EmitAliasedVarOp(cx
, op
, pn
, bce
);
1275 GetIncDecInfo(ParseNodeKind kind
, bool* post
)
1277 JS_ASSERT(kind
== PNK_POSTINCREMENT
|| kind
== PNK_PREINCREMENT
||
1278 kind
== PNK_POSTDECREMENT
|| kind
== PNK_PREDECREMENT
);
1279 *post
= kind
== PNK_POSTINCREMENT
|| kind
== PNK_POSTDECREMENT
;
1280 return (kind
== PNK_POSTINCREMENT
|| kind
== PNK_PREINCREMENT
) ? JSOP_ADD
: JSOP_SUB
;
1284 EmitVarIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
1286 JSOp op
= pn
->pn_kid
->getOp();
1287 JS_ASSERT(IsArgOp(op
) || IsLocalOp(op
) || IsAliasedVarOp(op
));
1288 JS_ASSERT(pn
->pn_kid
->isKind(PNK_NAME
));
1289 JS_ASSERT(!pn
->pn_kid
->pn_cookie
.isFree());
1292 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
1295 if (IsLocalOp(op
)) {
1296 getOp
= JSOP_GETLOCAL
;
1297 setOp
= JSOP_SETLOCAL
;
1298 } else if (IsArgOp(op
)) {
1299 getOp
= JSOP_GETARG
;
1300 setOp
= JSOP_SETARG
;
1302 getOp
= JSOP_GETALIASEDVAR
;
1303 setOp
= JSOP_SETALIASEDVAR
;
1306 if (!EmitVarOp(cx
, pn
->pn_kid
, getOp
, bce
)) // V
1308 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // N
1310 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // N? N
1312 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // N? N 1
1314 if (Emit1(cx
, bce
, binop
) < 0) // N? N+1
1316 if (!EmitVarOp(cx
, pn
->pn_kid
, setOp
, bce
)) // N? N+1
1318 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
1325 BytecodeEmitter::isAliasedName(ParseNode
* pn
)
1327 Definition
* dn
= pn
->resolve();
1328 JS_ASSERT(dn
->isDefn());
1329 JS_ASSERT(!dn
->isPlaceholder());
1330 JS_ASSERT(dn
->isBound());
1332 /* If dn is in an enclosing function, it is definitely aliased. */
1333 if (dn
->pn_cookie
.level() != script
->staticLevel())
1336 switch (dn
->kind()) {
1337 case Definition::LET
:
1339 * There are two ways to alias a let variable: nested functions and
1340 * dynamic scope operations. (This is overly conservative since the
1341 * bindingsAccessedDynamically flag, checked by allLocalsAliased, is
1344 * In addition all locals in generators are marked as aliased, to ensure
1345 * that they are allocated on scope chains instead of on the stack. See
1346 * the definition of SharedContext::allLocalsAliased.
1348 return dn
->isClosed() || sc
->allLocalsAliased();
1349 case Definition::ARG
:
1351 * Consult the bindings, since they already record aliasing. We might
1352 * be tempted to use the same definition as VAR/CONST/LET, but there is
1353 * a problem caused by duplicate arguments: only the last argument with
1354 * a given name is aliased. This is necessary to avoid generating a
1355 * shape for the call object with with more than one name for a given
1356 * slot (which violates internal engine invariants). All this means that
1357 * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
1358 * mark both parameters in function(x,x) as aliased.
1360 return script
->formalIsAliased(pn
->pn_cookie
.slot());
1361 case Definition::VAR
:
1362 case Definition::CONST
:
1363 JS_ASSERT_IF(sc
->allLocalsAliased(), script
->varIsAliased(pn
->pn_cookie
.slot()));
1364 return script
->varIsAliased(pn
->pn_cookie
.slot());
1365 case Definition::PLACEHOLDER
:
1366 case Definition::NAMED_LAMBDA
:
1367 case Definition::MISSING
:
1368 MOZ_CRASH("unexpected dn->kind");
1374 * Try to convert a *NAME op with a free name to a more specialized GNAME,
1375 * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
1376 * Return true if a conversion was made.
1379 TryConvertFreeName(BytecodeEmitter
* bce
, ParseNode
* pn
)
1382 * In self-hosting mode, JSOP_*NAME is unconditionally converted to
1383 * JSOP_*INTRINSIC. This causes lookups to be redirected to the special
1384 * intrinsics holder in the global object, into which any missing values are
1385 * cloned lazily upon first access.
1387 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
) {
1389 switch (pn
->getOp()) {
1390 case JSOP_NAME
: op
= JSOP_GETINTRINSIC
; break;
1391 case JSOP_SETNAME
: op
= JSOP_SETINTRINSIC
; break;
1392 /* Other *NAME ops aren't (yet) supported in self-hosted code. */
1393 default: MOZ_CRASH("intrinsic");
1400 * When parsing inner functions lazily, parse nodes for outer functions no
1401 * longer exist and only the function's scope chain is available for
1402 * resolving upvar accesses within the inner function.
1404 if (bce
->emitterMode
== BytecodeEmitter::LazyFunction
) {
1405 // The only statements within a lazy function which can push lexical
1406 // scopes are try/catch blocks. Use generic ops in this case.
1407 for (StmtInfoBCE
* stmt
= bce
->topStmt
; stmt
; stmt
= stmt
->down
) {
1408 if (stmt
->type
== STMT_CATCH
)
1413 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
1414 if (funbox
->hasExtensibleScope())
1416 if (funbox
->function()->isNamedLambda() && funbox
->function()->atom() == pn
->pn_atom
)
1418 if (funbox
->isHeavyweight()) {
1420 if (funbox
->function()->isNamedLambda())
1423 if (bce
->script
->directlyInsideEval())
1425 RootedObject
outerScope(bce
->sc
->context
, bce
->script
->enclosingStaticScope());
1426 for (StaticScopeIter
<CanGC
> ssi(bce
->sc
->context
, outerScope
); !ssi
.done(); ssi
++) {
1427 if (ssi
.type() != StaticScopeIter
<CanGC
>::FUNCTION
) {
1428 if (ssi
.type() == StaticScopeIter
<CanGC
>::BLOCK
) {
1429 // Use generic ops if a catch block is encountered.
1432 if (ssi
.hasDynamicScopeObject())
1436 RootedScript
script(bce
->sc
->context
, ssi
.funScript());
1437 if (script
->functionNonDelazifying()->atom() == pn
->pn_atom
)
1439 if (ssi
.hasDynamicScopeObject()) {
1441 if (LookupAliasedName(script
, pn
->pn_atom
->asPropertyName(), &slot
)) {
1443 switch (pn
->getOp()) {
1444 case JSOP_NAME
: op
= JSOP_GETALIASEDVAR
; break;
1445 case JSOP_SETNAME
: op
= JSOP_SETALIASEDVAR
; break;
1446 default: return false;
1449 JS_ALWAYS_TRUE(pn
->pn_cookie
.set(bce
->parser
->tokenStream
, hops
, slot
));
1455 if (script
->funHasExtensibleScope() || script
->directlyInsideEval())
1460 // Unbound names aren't recognizable global-property references if the
1461 // script isn't running against its global object.
1462 if (!bce
->script
->compileAndGo() || !bce
->hasGlobalScope
)
1465 // Deoptimized names also aren't necessarily globals.
1466 if (pn
->isDeoptimized())
1469 if (bce
->sc
->isFunctionBox()) {
1470 // Unbound names in function code may not be globals if new locals can
1471 // be added to this function (or an enclosing one) to alias a global
1473 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
1474 if (funbox
->mightAliasLocals())
1478 // If this is eval code, being evaluated inside strict mode eval code,
1479 // an "unbound" name might be a binding local to that outer eval:
1481 // var x = "GLOBAL";
1482 // eval('"use strict"; ' +
1484 // 'eval("print(x)");'); // "undefined", not "GLOBAL"
1486 // Given the enclosing eval code's strictness and its bindings (neither is
1487 // readily available now), we could exactly check global-ness, but it's not
1488 // worth the trouble for doubly-nested eval code. So we conservatively
1489 // approximate. If the outer eval code is strict, then this eval code will
1490 // be: thus, don't optimize if we're compiling strict code inside an eval.
1491 if (bce
->insideEval
&& bce
->sc
->strict
)
1495 switch (pn
->getOp()) {
1496 case JSOP_NAME
: op
= JSOP_GETGNAME
; break;
1497 case JSOP_SETNAME
: op
= JSOP_SETGNAME
; break;
1501 default: MOZ_CRASH("gname");
1508 * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
1509 * loads and stores, given the compile-time information in bce and a PNK_NAME
1510 * node pn. It returns false on error, true on success.
1512 * The caller can test pn->pn_cookie.isFree() to tell whether optimization
1513 * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If
1514 * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
1515 * been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1516 * pn->pn_op was modified, if this function finds an argument or local variable
1517 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1518 * successful return.
1520 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1521 * to update the special cases in EmitFor (for-in) and EmitAssignment (= and
1525 BindNameToSlotHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
1527 JS_ASSERT(pn
->isKind(PNK_NAME
));
1529 JS_ASSERT_IF(pn
->isKind(PNK_FUNCTION
), pn
->isBound());
1531 /* Don't attempt if 'pn' is already bound or deoptimized or a function. */
1532 if (pn
->isBound() || pn
->isDeoptimized())
1535 /* JSOP_CALLEE is pre-bound by definition. */
1536 JSOp op
= pn
->getOp();
1537 JS_ASSERT(op
!= JSOP_CALLEE
);
1538 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1541 * The parser already linked name uses to definitions when (where not
1542 * prevented by non-lexical constructs like 'with' and 'eval').
1546 JS_ASSERT(pn
->pn_cookie
.isFree());
1548 JS_ASSERT(dn
->isDefn());
1549 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
1550 } else if (pn
->isDefn()) {
1551 dn
= (Definition
*) pn
;
1557 * Turn attempts to mutate const-declared bindings into get ops (for
1558 * pre-increment and pre-decrement ops, our caller will have to emit
1559 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1561 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1562 * bindings visible to the compiler are permanent in JS unless the
1563 * declaration originates at top level in eval code.
1570 if (pn
->isConst()) {
1571 if (bce
->sc
->needStrictChecks()) {
1572 JSAutoByteString name
;
1573 if (!AtomToPrintableString(cx
, pn
->pn_atom
, &name
) ||
1574 !bce
->reportStrictModeError(pn
, JSMSG_READ_ONLY
, name
.ptr()))
1579 pn
->setOp(op
= JSOP_NAME
);
1583 if (dn
->pn_cookie
.isFree()) {
1584 if (HandleScript caller
= bce
->evalCaller
) {
1585 JS_ASSERT(bce
->script
->compileAndGo());
1588 * Don't generate upvars on the left side of a for loop. See
1591 if (bce
->emittingForInit
)
1595 * If this is an eval in the global scope, then unbound variables
1596 * must be globals, so try to use GNAME ops.
1598 if (!caller
->functionOrCallerFunction() && TryConvertFreeName(bce
, pn
)) {
1599 pn
->pn_dflags
|= PND_BOUND
;
1604 * Out of tricks, so we must rely on PICs to optimize named
1605 * accesses from direct eval called from function code.
1610 /* Optimize accesses to undeclared globals. */
1611 if (!TryConvertFreeName(bce
, pn
))
1614 pn
->pn_dflags
|= PND_BOUND
;
1619 * At this point, we are only dealing with uses that have already been
1620 * bound to definitions via pn_lexdef. The rest of this routine converts
1621 * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG
1622 * op. This requires setting the node's pn_cookie with a pair (level, slot)
1623 * where 'level' is the number of function scopes between the use and the
1624 * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL
1625 * op. For example, in this code:
1627 * function(a,b,x) { return x }
1628 * function(y) { function() { return y } }
1630 * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0).
1632 JS_ASSERT(!pn
->isDefn());
1633 JS_ASSERT(pn
->isUsed());
1634 JS_ASSERT(pn
->pn_lexdef
);
1635 JS_ASSERT(pn
->pn_cookie
.isFree());
1638 * We are compiling a function body and may be able to optimize name
1639 * to stack slot. Look for an argument or variable in the function and
1640 * rewrite pn_op and update pn accordingly.
1642 switch (dn
->kind()) {
1643 case Definition::ARG
:
1645 case JSOP_NAME
: op
= JSOP_GETARG
; break;
1646 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
1647 default: MOZ_CRASH("arg");
1649 JS_ASSERT(!pn
->isConst());
1652 case Definition::VAR
:
1653 case Definition::CONST
:
1654 case Definition::LET
:
1656 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
1657 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
1658 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
1659 default: MOZ_CRASH("local");
1663 case Definition::NAMED_LAMBDA
: {
1664 JS_ASSERT(dn
->isOp(JSOP_CALLEE
));
1665 JS_ASSERT(op
!= JSOP_CALLEE
);
1668 * Currently, the ALIASEDVAR ops do not support accessing the
1669 * callee of a DeclEnvObject, so use NAME.
1671 if (dn
->pn_cookie
.level() != bce
->script
->staticLevel())
1674 DebugOnly
<JSFunction
*> fun
= bce
->sc
->asFunctionBox()->function();
1675 JS_ASSERT(fun
->isLambda());
1676 JS_ASSERT(pn
->pn_atom
== fun
->atom());
1679 * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to
1680 * address two cases: a new binding introduced by eval, and
1681 * assignment to the name in strict mode.
1683 * var fun = (function f(s) { eval(s); return f; });
1684 * assertEq(fun("var f = 42"), 42);
1686 * ECMAScript specifies that a function expression's name is bound
1687 * in a lexical environment distinct from that used to bind its
1688 * named parameters, the arguments object, and its variables. The
1689 * new binding for "var f = 42" shadows the binding for the
1690 * function itself, so the name of the function will not refer to
1693 * (function f() { "use strict"; f = 12; })();
1695 * Outside strict mode, assignment to a function expression's name
1696 * has no effect. But in strict mode, this attempt to mutate an
1697 * immutable binding must throw a TypeError. We implement this by
1698 * not optimizing such assignments and by marking such functions as
1699 * heavyweight, ensuring that the function name is represented in
1700 * the scope chain so that assignment will throw a TypeError.
1702 if (!bce
->sc
->asFunctionBox()->isHeavyweight()) {
1704 pn
->pn_dflags
|= PND_CONST
;
1708 pn
->pn_dflags
|= PND_BOUND
;
1712 case Definition::PLACEHOLDER
:
1715 case Definition::MISSING
:
1716 MOZ_CRASH("missing");
1720 * The difference between the current static level and the static level of
1721 * the definition is the number of function scopes between the current
1722 * scope and dn's scope.
1724 unsigned skip
= bce
->script
->staticLevel() - dn
->pn_cookie
.level();
1725 JS_ASSERT_IF(skip
, dn
->isClosed());
1728 * Explicitly disallow accessing var/let bindings in global scope from
1729 * nested functions. The reason for this limitation is that, since the
1730 * global script is not included in the static scope chain (1. because it
1731 * has no object to stand in the static scope chain, 2. to minimize memory
1732 * bloat where a single live function keeps its whole global script
1733 * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
1734 * associated types::TypeSet.
1737 BytecodeEmitter
* bceSkipped
= bce
;
1738 for (unsigned i
= 0; i
< skip
; i
++)
1739 bceSkipped
= bceSkipped
->parent
;
1740 if (!bceSkipped
->sc
->isFunctionBox())
1744 JS_ASSERT(!pn
->isOp(op
));
1746 if (!pn
->pn_cookie
.set(bce
->parser
->tokenStream
, skip
, dn
->pn_cookie
.slot()))
1749 pn
->pn_dflags
|= PND_BOUND
;
1754 * Attempts to bind the name, then checks that no dynamic scope lookup ops are
1755 * emitted in self-hosting mode. NAME ops do lookups off current scope chain,
1756 * and we do not want to allow self-hosted code to use the dynamic scope.
1759 BindNameToSlot(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
1761 if (!BindNameToSlotHelper(cx
, bce
, pn
))
1764 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
&& !pn
->isBound()) {
1765 bce
->reportError(pn
, JSMSG_SELFHOSTED_UNBOUND_NAME
);
1773 * If pn contains a useful expression, return true with *answer set to true.
1774 * If pn contains a useless expression, return true with *answer set to false.
1775 * Return false on error.
1777 * The caller should initialize *answer to false and invoke this function on
1778 * an expression statement or similar subtree to decide whether the tree could
1779 * produce code that has any side effects. For an expression statement, we
1780 * define useless code as code with no side effects, because the main effect,
1781 * the value left on the stack after the code executes, will be discarded by a
1785 CheckSideEffects(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool* answer
)
1790 switch (pn
->getArity()) {
1793 * A named function, contrary to ES3, is no longer useful, because we
1794 * bind its name lexically (using JSOP_CALLEE) instead of creating an
1795 * Object instance and binding a readonly, permanent property in it
1796 * (the object and binding can be detected and hijacked or captured).
1797 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
1799 MOZ_ASSERT(*answer
== false);
1803 if (pn
->isOp(JSOP_NOP
) || pn
->isOp(JSOP_OR
) || pn
->isOp(JSOP_AND
) ||
1804 pn
->isOp(JSOP_STRICTEQ
) || pn
->isOp(JSOP_STRICTNE
)) {
1806 * Non-operators along with ||, &&, ===, and !== never invoke
1807 * toString or valueOf.
1810 for (ParseNode
* pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
1811 ok
&= CheckSideEffects(cx
, bce
, pn2
, answer
);
1815 if (pn
->isKind(PNK_GENEXP
)) {
1816 /* Generator-expressions are harmless if the result is ignored. */
1817 MOZ_ASSERT(*answer
== false);
1822 * All invocation operations (construct: PNK_NEW, call: PNK_CALL)
1823 * are presumed to be useful, because they may have side effects
1824 * even if their main effect (their return value) is discarded.
1826 * PNK_ELEM binary trees of 3+ nodes are flattened into lists to
1827 * avoid too much recursion. All such lists must be presumed to be
1828 * useful because each index operation could invoke a getter.
1830 * Likewise, array and object initialisers may call prototype
1831 * setters (the __defineSetter__ built-in, and writable __proto__
1832 * on Array.prototype create this hazard). Initialiser list nodes
1833 * have JSOP_NEWINIT in their pn_op.
1839 return CheckSideEffects(cx
, bce
, pn
->pn_kid1
, answer
) &&
1840 CheckSideEffects(cx
, bce
, pn
->pn_kid2
, answer
) &&
1841 CheckSideEffects(cx
, bce
, pn
->pn_kid3
, answer
);
1845 if (pn
->isAssignment()) {
1847 * Assignment is presumed to be useful, even if the next operation
1848 * is another assignment overwriting this one's ostensible effect,
1849 * because the left operand may be a property with a setter that
1852 * The only exception is assignment of a useless value to a const
1853 * declared in the function currently being compiled.
1855 ParseNode
* pn2
= pn
->pn_left
;
1856 if (!pn2
->isKind(PNK_NAME
)) {
1859 if (!BindNameToSlot(cx
, bce
, pn2
))
1861 if (!CheckSideEffects(cx
, bce
, pn
->pn_right
, answer
))
1863 if (!*answer
&& (!pn
->isOp(JSOP_NOP
) || !pn2
->isConst()))
1869 if (pn
->isOp(JSOP_OR
) || pn
->isOp(JSOP_AND
) || pn
->isOp(JSOP_STRICTEQ
) ||
1870 pn
->isOp(JSOP_STRICTNE
)) {
1872 * ||, &&, ===, and !== do not convert their operands via
1873 * toString or valueOf method calls.
1875 return CheckSideEffects(cx
, bce
, pn
->pn_left
, answer
) &&
1876 CheckSideEffects(cx
, bce
, pn
->pn_right
, answer
);
1880 * We can't easily prove that neither operand ever denotes an
1881 * object with a toString or valueOf method.
1887 switch (pn
->getKind()) {
1890 ParseNode
* pn2
= pn
->pn_kid
;
1891 switch (pn2
->getKind()) {
1893 if (!BindNameToSlot(cx
, bce
, pn2
))
1895 if (pn2
->isConst()) {
1896 MOZ_ASSERT(*answer
== false);
1903 /* All these delete addressing modes have effects too. */
1907 return CheckSideEffects(cx
, bce
, pn2
, answer
);
1909 MOZ_CRASH("We have a returning default case");
1916 if (pn
->isOp(JSOP_NOT
)) {
1917 /* ! does not convert its operand via toString or valueOf. */
1918 return CheckSideEffects(cx
, bce
, pn
->pn_kid
, answer
);
1924 * All of PNK_INC, PNK_DEC, PNK_THROW, PNK_YIELD, and PNK_YIELD_STAR
1925 * have direct effects. Of the remaining unary-arity node types, we
1926 * can't easily prove that the operand never denotes an object with
1927 * a toString or valueOf method.
1932 MOZ_CRASH("We have a returning default case");
1936 * Take care to avoid trying to bind a label name (labels, both for
1937 * statements and property values in object initialisers, have pn_op
1938 * defaulted to JSOP_NOP).
1940 if (pn
->isKind(PNK_NAME
) && !pn
->isOp(JSOP_NOP
)) {
1941 if (!BindNameToSlot(cx
, bce
, pn
))
1943 if (!pn
->isOp(JSOP_CALLEE
) && pn
->pn_cookie
.isFree()) {
1945 * Not a use of an unshadowed named function expression's given
1946 * name, so this expression could invoke a getter that has side
1952 if (pn
->isKind(PNK_DOT
)) {
1953 /* Dotted property references in general can call getters. */
1956 return CheckSideEffects(cx
, bce
, pn
->maybeExpr(), answer
);
1959 if (pn
->isKind(PNK_DEBUGGER
))
1967 BytecodeEmitter::isInLoop()
1969 for (StmtInfoBCE
* stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
1977 BytecodeEmitter::checkSingletonContext()
1979 if (!script
->compileAndGo() || sc
->isFunctionBox() || isInLoop())
1981 hasSingletons
= true;
1986 BytecodeEmitter::needsImplicitThis()
1988 if (!script
->compileAndGo())
1991 if (sc
->isFunctionBox()) {
1992 if (sc
->asFunctionBox()->inWith
)
1995 JSObject
* scope
= sc
->asGlobalSharedContext()->scopeChain();
1997 if (scope
->is
<DynamicWithObject
>())
1999 scope
= scope
->enclosingScope();
2003 for (StmtInfoBCE
* stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
2004 if (stmt
->type
== STMT_WITH
)
2011 BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext
* cx
)
2013 // Note: when parsing off thread the resulting scripts need to be handed to
2014 // the debugger after rejoining to the main thread.
2015 if (!cx
->isJSContext())
2018 // Lazy scripts are never top level (despite always being invoked with a
2019 // nullptr parent), and so the hook should never be fired.
2020 if (emitterMode
!= LazyFunction
&& !parent
) {
2021 GlobalObject
* compileAndGoGlobal
= nullptr;
2022 if (script
->compileAndGo())
2023 compileAndGoGlobal
= &script
->global();
2024 Debugger::onNewScript(cx
->asJSContext(), script
, compileAndGoGlobal
);
2029 BytecodeEmitter::tokenStream()
2031 return &parser
->tokenStream
;
2035 BytecodeEmitter::reportError(ParseNode
* pn
, unsigned errorNumber
, ...)
2037 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2040 va_start(args
, errorNumber
);
2041 bool result
= tokenStream()->reportCompileErrorNumberVA(pos
.begin
, JSREPORT_ERROR
,
2048 BytecodeEmitter::reportStrictWarning(ParseNode
* pn
, unsigned errorNumber
, ...)
2050 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2053 va_start(args
, errorNumber
);
2054 bool result
= tokenStream()->reportStrictWarningErrorNumberVA(pos
.begin
, errorNumber
, args
);
2060 BytecodeEmitter::reportStrictModeError(ParseNode
* pn
, unsigned errorNumber
, ...)
2062 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2065 va_start(args
, errorNumber
);
2066 bool result
= tokenStream()->reportStrictModeErrorNumberVA(pos
.begin
, sc
->strict
,
2073 EmitNewInit(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSProtoKey key
)
2075 const size_t len
= 1 + UINT32_INDEX_LEN
;
2076 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
2080 jsbytecode
* code
= bce
->code(offset
);
2081 code
[0] = JSOP_NEWINIT
;
2082 code
[1] = jsbytecode(key
);
2086 UpdateDepth(cx
, bce
, offset
);
2087 CheckTypeSet(cx
, bce
, JSOP_NEWINIT
);
2092 IteratorResultShape(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned* shape
)
2094 JS_ASSERT(bce
->script
->compileAndGo());
2096 RootedObject
obj(cx
);
2097 gc::AllocKind kind
= GuessObjectGCKind(2);
2098 obj
= NewBuiltinClassInstance(cx
, &JSObject::class_
, kind
);
2102 Rooted
<jsid
> value_id(cx
, AtomToId(cx
->names().value
));
2103 Rooted
<jsid
> done_id(cx
, AtomToId(cx
->names().done
));
2104 if (!DefineNativeProperty(cx
, obj
, value_id
, UndefinedHandleValue
, nullptr, nullptr,
2107 if (!DefineNativeProperty(cx
, obj
, done_id
, UndefinedHandleValue
, nullptr, nullptr,
2111 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
2115 *shape
= bce
->objectList
.add(objbox
);
2121 EmitPrepareIteratorResult(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
2123 if (bce
->script
->compileAndGo()) {
2125 if (!IteratorResultShape(cx
, bce
, &shape
))
2127 return EmitIndex32(cx
, JSOP_NEWOBJECT
, shape
, bce
);
2130 return EmitNewInit(cx
, bce
, JSProto_Object
);
2134 EmitFinishIteratorResult(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, bool done
)
2137 if (!bce
->makeAtomIndex(cx
->names().value
, &value_id
))
2140 if (!bce
->makeAtomIndex(cx
->names().done
, &done_id
))
2143 if (!EmitIndex32(cx
, JSOP_INITPROP
, value_id
, bce
))
2145 if (Emit1(cx
, bce
, done
? JSOP_TRUE
: JSOP_FALSE
) < 0)
2147 if (!EmitIndex32(cx
, JSOP_INITPROP
, done_id
, bce
))
2149 if (Emit1(cx
, bce
, JSOP_ENDINIT
) < 0)
2155 EmitNameOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool callContext
)
2157 if (!BindNameToSlot(cx
, bce
, pn
))
2160 JSOp op
= pn
->getOp();
2162 if (op
== JSOP_CALLEE
) {
2163 if (Emit1(cx
, bce
, op
) < 0)
2166 if (!pn
->pn_cookie
.isFree()) {
2167 JS_ASSERT(JOF_OPTYPE(op
) != JOF_ATOM
);
2168 if (!EmitVarOp(cx
, pn
, op
, bce
))
2171 if (!EmitAtomOp(cx
, pn
, op
, bce
))
2176 /* Need to provide |this| value for call */
2178 if (op
== JSOP_NAME
&& bce
->needsImplicitThis()) {
2179 if (!EmitAtomOp(cx
, pn
, JSOP_IMPLICITTHIS
, bce
))
2182 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
2191 EmitPropLHS(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2193 JS_ASSERT(pn
->isKind(PNK_DOT
));
2194 ParseNode
* pn2
= pn
->maybeExpr();
2197 * If the object operand is also a dotted property reference, reverse the
2198 * list linked via pn_expr temporarily so we can iterate over it from the
2199 * bottom up (reversing again as we go), to avoid excessive recursion.
2201 if (pn2
->isKind(PNK_DOT
)) {
2202 ParseNode
* pndot
= pn2
;
2203 ParseNode
* pnup
= nullptr, *pndown
;
2204 ptrdiff_t top
= bce
->offset();
2206 /* Reverse pndot->pn_expr to point up, not down. */
2207 pndot
->pn_offset
= top
;
2208 JS_ASSERT(!pndot
->isUsed());
2209 pndown
= pndot
->pn_expr
;
2210 pndot
->pn_expr
= pnup
;
2211 if (!pndown
->isKind(PNK_DOT
))
2217 /* pndown is a primary expression, not a dotted property reference. */
2218 if (!EmitTree(cx
, bce
, pndown
))
2222 /* Walk back up the list, emitting annotated name ops. */
2223 if (!EmitAtomOp(cx
, pndot
, JSOP_GETPROP
, bce
))
2226 /* Reverse the pn_expr link again. */
2227 pnup
= pndot
->pn_expr
;
2228 pndot
->pn_expr
= pndown
;
2230 } while ((pndot
= pnup
) != nullptr);
2234 // The non-optimized case.
2235 return EmitTree(cx
, bce
, pn2
);
2239 EmitPropOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2241 JS_ASSERT(pn
->isArity(PN_NAME
));
2243 if (!EmitPropLHS(cx
, pn
, op
, bce
))
2246 if (op
== JSOP_CALLPROP
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
2249 if (!EmitAtomOp(cx
, pn
, op
, bce
))
2252 if (op
== JSOP_CALLPROP
&& Emit1(cx
, bce
, JSOP_SWAP
) < 0)
2259 EmitPropIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2261 JS_ASSERT(pn
->pn_kid
->getKind() == PNK_DOT
);
2264 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2266 JSOp get
= JSOP_GETPROP
;
2267 if (!EmitPropLHS(cx
, pn
->pn_kid
, get
, bce
)) // OBJ
2269 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ OBJ
2271 if (!EmitAtomOp(cx
, pn
->pn_kid
, JSOP_GETPROP
, bce
)) // OBJ V
2273 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ N
2275 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ N? N
2277 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ N? N 1
2279 if (Emit1(cx
, bce
, binop
) < 0) // OBJ N? N+1
2283 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N? N+1 OBJ
2285 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // N? OBJ N+1
2289 if (!EmitAtomOp(cx
, pn
->pn_kid
, JSOP_SETPROP
, bce
)) // N? N+1
2291 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2298 EmitNameIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2300 const JSCodeSpec
* cs
= &js_CodeSpec
[pn
->pn_kid
->getOp()];
2302 bool global
= (cs
->format
& JOF_GNAME
);
2304 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2306 if (!EmitAtomOp(cx
, pn
->pn_kid
, global
? JSOP_BINDGNAME
: JSOP_BINDNAME
, bce
)) // OBJ
2308 if (!EmitAtomOp(cx
, pn
->pn_kid
, global
? JSOP_GETGNAME
: JSOP_NAME
, bce
)) // OBJ V
2310 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ N
2312 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ N? N
2314 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ N? N 1
2316 if (Emit1(cx
, bce
, binop
) < 0) // OBJ N? N+1
2320 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N? N+1 OBJ
2322 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // N? OBJ N+1
2326 if (!EmitAtomOp(cx
, pn
->pn_kid
, global
? JSOP_SETGNAME
: JSOP_SETNAME
, bce
)) // N? N+1
2328 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2335 * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM
2336 * opcode onto the stack in the right order. In the case of SETELEM, the
2337 * value to be assigned must already be pushed.
2340 EmitElemOperands(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2342 JS_ASSERT(pn
->isArity(PN_BINARY
));
2343 if (!EmitTree(cx
, bce
, pn
->pn_left
))
2345 if (op
== JSOP_CALLELEM
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
2347 if (!EmitTree(cx
, bce
, pn
->pn_right
))
2349 if (op
== JSOP_SETELEM
&& Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0)
2355 EmitElemOpBase(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
2357 if (Emit1(cx
, bce
, op
) < 0)
2359 CheckTypeSet(cx
, bce
, op
);
2361 if (op
== JSOP_CALLELEM
) {
2362 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
2369 EmitElemOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2371 return EmitElemOperands(cx
, pn
, op
, bce
) && EmitElemOpBase(cx
, bce
, op
);
2375 EmitElemIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2377 JS_ASSERT(pn
->pn_kid
->getKind() == PNK_ELEM
);
2379 if (!EmitElemOperands(cx
, pn
->pn_kid
, JSOP_GETELEM
, bce
))
2383 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2386 * We need to convert the key to an object id first, so that we do not do
2387 * it inside both the GETELEM and the SETELEM.
2390 if (Emit1(cx
, bce
, JSOP_TOID
) < 0) // OBJ KEY
2392 if (Emit1(cx
, bce
, JSOP_DUP2
) < 0) // OBJ KEY OBJ KEY
2394 if (!EmitElemOpBase(cx
, bce
, JSOP_GETELEM
)) // OBJ KEY V
2396 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ KEY N
2398 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ KEY N? N
2400 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ KEY N? N 1
2402 if (Emit1(cx
, bce
, binop
) < 0) // OBJ KEY N? N+1
2406 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // KEY N N+1 OBJ
2408 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // N N+1 OBJ KEY
2410 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N OBJ KEY N+1
2414 if (!EmitElemOpBase(cx
, bce
, JSOP_SETELEM
)) // N? N+1
2416 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2423 EmitNumberOp(ExclusiveContext
* cx
, double dval
, BytecodeEmitter
* bce
)
2430 if (NumberIsInt32(dval
, &ival
)) {
2432 return Emit1(cx
, bce
, JSOP_ZERO
) >= 0;
2434 return Emit1(cx
, bce
, JSOP_ONE
) >= 0;
2435 if ((int)(int8_t)ival
== ival
)
2436 return Emit2(cx
, bce
, JSOP_INT8
, (jsbytecode
)(int8_t)ival
) >= 0;
2439 if (u
< JS_BIT(16)) {
2440 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2441 } else if (u
< JS_BIT(24)) {
2442 off
= EmitN(cx
, bce
, JSOP_UINT24
, 3);
2445 pc
= bce
->code(off
);
2448 off
= EmitN(cx
, bce
, JSOP_INT32
, 4);
2451 pc
= bce
->code(off
);
2452 SET_INT32(pc
, ival
);
2457 if (!bce
->constList
.append(DoubleValue(dval
)))
2460 return EmitIndex32(cx
, JSOP_DOUBLE
, bce
->constList
.length() - 1, bce
);
2464 SetJumpOffsetAt(BytecodeEmitter
* bce
, ptrdiff_t off
)
2466 SET_JUMP_OFFSET(bce
->code(off
), bce
->offset() - off
);
2470 PushUndefinedValues(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned n
)
2472 for (unsigned i
= 0; i
< n
; ++i
) {
2473 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
2480 InitializeBlockScopedLocalsFromStack(ExclusiveContext
* cx
, BytecodeEmitter
* bce
,
2481 Handle
<StaticBlockObject
*> blockObj
)
2483 for (unsigned i
= blockObj
->numVariables(); i
> 0; --i
) {
2484 if (blockObj
->isAliased(i
- 1)) {
2487 sc
.setSlot(BlockObject::RESERVED_SLOTS
+ i
- 1);
2488 if (!EmitAliasedVarOp(cx
, JSOP_SETALIASEDVAR
, sc
, bce
))
2491 unsigned local
= blockObj
->blockIndexToLocalIndex(i
- 1);
2492 if (!EmitUnaliasedVarOp(cx
, JSOP_SETLOCAL
, local
, bce
))
2495 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
2502 EnterBlockScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmtInfo
,
2503 ObjectBox
* objbox
, unsigned alreadyPushed
= 0)
2505 // Initial values for block-scoped locals.
2506 Rooted
<StaticBlockObject
*> blockObj(cx
, &objbox
->object
->as
<StaticBlockObject
>());
2507 if (!PushUndefinedValues(cx
, bce
, blockObj
->numVariables() - alreadyPushed
))
2510 if (!EnterNestedScope(cx
, bce
, stmtInfo
, objbox
, STMT_BLOCK
))
2513 if (!InitializeBlockScopedLocalsFromStack(cx
, bce
, blockObj
))
2520 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
2521 * LLVM is deciding to inline this function which uses a lot of stack space
2522 * into EmitTree which is recursive and uses relatively little stack space.
2524 MOZ_NEVER_INLINE
static bool
2525 EmitSwitch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
2529 ptrdiff_t top
, off
, defaultOffset
;
2530 ParseNode
* pn2
, *pn3
, *pn4
;
2536 /* Try for most optimal, fall back if not dense ints. */
2537 switchOp
= JSOP_TABLESWITCH
;
2542 JS_ASSERT(pn2
->isKind(PNK_LEXICALSCOPE
) || pn2
->isKind(PNK_STATEMENTLIST
));
2544 /* Push the discriminant. */
2545 if (!EmitTree(cx
, bce
, pn
->pn_left
))
2548 StmtInfoBCE
stmtInfo(cx
);
2549 if (pn2
->isKind(PNK_LEXICALSCOPE
)) {
2550 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, pn2
->pn_objbox
, 0))
2553 stmtInfo
.type
= STMT_SWITCH
;
2554 stmtInfo
.update
= top
= bce
->offset();
2555 /* Advance pn2 to refer to the switch case list. */
2558 JS_ASSERT(pn2
->isKind(PNK_STATEMENTLIST
));
2559 top
= bce
->offset();
2560 PushStatementBCE(bce
, &stmtInfo
, STMT_SWITCH
, top
);
2563 /* Switch bytecodes run from here till end of final case. */
2564 uint32_t caseCount
= pn2
->pn_count
;
2565 uint32_t tableLength
= 0;
2566 UniquePtr
<ParseNode
*[], JS::FreePolicy
> table(nullptr);
2568 if (caseCount
> JS_BIT(16)) {
2569 bce
->parser
->tokenStream
.reportError(JSMSG_TOO_MANY_CASES
);
2573 if (caseCount
== 0 ||
2575 (hasDefault
= (pn2
->pn_head
->isKind(PNK_DEFAULT
))))) {
2581 #define INTMAP_LENGTH 256
2582 jsbitmap intmap_space
[INTMAP_LENGTH
];
2583 jsbitmap
* intmap
= nullptr;
2584 int32_t intmap_bitlen
= 0;
2586 low
= JSVAL_INT_MAX
;
2587 high
= JSVAL_INT_MIN
;
2589 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2590 if (pn3
->isKind(PNK_DEFAULT
)) {
2592 caseCount
--; /* one of the "cases" was the default */
2596 JS_ASSERT(pn3
->isKind(PNK_CASE
));
2597 if (switchOp
== JSOP_CONDSWITCH
)
2600 JS_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2604 if (pn4
->getKind() != PNK_NUMBER
) {
2605 switchOp
= JSOP_CONDSWITCH
;
2610 if (!NumberIsInt32(pn4
->pn_dval
, &i
)) {
2611 switchOp
= JSOP_CONDSWITCH
;
2615 if ((unsigned)(i
+ (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) {
2616 switchOp
= JSOP_CONDSWITCH
;
2625 * Check for duplicates, which require a JSOP_CONDSWITCH.
2626 * We bias i by 65536 if it's negative, and hope that's a rare
2627 * case (because it requires a malloc'd bitmap).
2631 if (i
>= intmap_bitlen
) {
2633 size_t(i
) < (INTMAP_LENGTH
* JS_BITMAP_NBITS
)) {
2634 intmap
= intmap_space
;
2635 intmap_bitlen
= INTMAP_LENGTH
* JS_BITMAP_NBITS
;
2637 /* Just grab 8K for the worst-case bitmap. */
2638 intmap_bitlen
= JS_BIT(16);
2639 intmap
= cx
->pod_malloc
<jsbitmap
>(JS_BIT(16) / JS_BITMAP_NBITS
);
2641 js_ReportOutOfMemory(cx
);
2645 memset(intmap
, 0, size_t(intmap_bitlen
) / CHAR_BIT
);
2647 if (JS_TEST_BIT(intmap
, i
)) {
2648 switchOp
= JSOP_CONDSWITCH
;
2651 JS_SET_BIT(intmap
, i
);
2654 if (intmap
&& intmap
!= intmap_space
)
2660 * Compute table length and select condswitch instead if overlarge or
2661 * more than half-sparse.
2663 if (switchOp
== JSOP_TABLESWITCH
) {
2664 tableLength
= (uint32_t)(high
- low
+ 1);
2665 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
2666 switchOp
= JSOP_CONDSWITCH
;
2671 * The note has one or two offsets: first tells total switch code length;
2672 * second (if condswitch) tells offset to first JSOP_CASE.
2674 if (switchOp
== JSOP_CONDSWITCH
) {
2675 /* 0 bytes of immediate for unoptimized switch. */
2677 noteIndex
= NewSrcNote3(cx
, bce
, SRC_CONDSWITCH
, 0, 0);
2679 JS_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2681 /* 3 offsets (len, low, high) before the table, 1 per entry. */
2682 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
2683 noteIndex
= NewSrcNote2(cx
, bce
, SRC_TABLESWITCH
, 0);
2688 /* Emit switchOp followed by switchSize bytes of jump or lookup table. */
2689 if (EmitN(cx
, bce
, switchOp
, switchSize
) < 0)
2693 if (switchOp
== JSOP_CONDSWITCH
) {
2694 int caseNoteIndex
= -1;
2695 bool beforeCases
= true;
2697 /* Emit code for evaluating cases and jumping to case statements. */
2698 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2700 if (pn4
&& !EmitTree(cx
, bce
, pn4
))
2702 if (caseNoteIndex
>= 0) {
2703 /* off is the previous JSOP_CASE's bytecode offset. */
2704 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)caseNoteIndex
, 0, bce
->offset() - off
))
2708 JS_ASSERT(pn3
->isKind(PNK_DEFAULT
));
2711 caseNoteIndex
= NewSrcNote2(cx
, bce
, SRC_NEXTCASE
, 0);
2712 if (caseNoteIndex
< 0)
2714 off
= EmitJump(cx
, bce
, JSOP_CASE
, 0);
2717 pn3
->pn_offset
= off
;
2719 unsigned noteCount
, noteCountDelta
;
2721 /* Switch note's second offset is to first JSOP_CASE. */
2722 noteCount
= bce
->notes().length();
2723 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 1, off
- top
))
2725 noteCountDelta
= bce
->notes().length() - noteCount
;
2726 if (noteCountDelta
!= 0)
2727 caseNoteIndex
+= noteCountDelta
;
2728 beforeCases
= false;
2733 * If we didn't have an explicit default (which could fall in between
2734 * cases, preventing us from fusing this SetSrcNoteOffset with the call
2735 * in the loop above), link the last case to the implicit default for
2736 * the benefit of IonBuilder.
2739 caseNoteIndex
>= 0 &&
2740 !SetSrcNoteOffset(cx
, bce
, (unsigned)caseNoteIndex
, 0, bce
->offset() - off
))
2745 /* Emit default even if no explicit default statement. */
2746 defaultOffset
= EmitJump(cx
, bce
, JSOP_DEFAULT
, 0);
2747 if (defaultOffset
< 0)
2750 JS_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2751 pc
= bce
->code(top
+ JUMP_OFFSET_LEN
);
2753 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2754 SET_JUMP_OFFSET(pc
, low
);
2755 pc
+= JUMP_OFFSET_LEN
;
2756 SET_JUMP_OFFSET(pc
, high
);
2757 pc
+= JUMP_OFFSET_LEN
;
2760 * Use malloc to avoid arena bloat for programs with many switches.
2761 * UniquePtr takes care of freeing it on exit.
2763 if (tableLength
!= 0) {
2764 table
= cx
->make_zeroed_pod_array
<ParseNode
*>(tableLength
);
2767 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2768 if (pn3
->isKind(PNK_DEFAULT
))
2771 JS_ASSERT(pn3
->isKind(PNK_CASE
));
2774 JS_ASSERT(pn4
->getKind() == PNK_NUMBER
);
2776 int32_t i
= int32_t(pn4
->pn_dval
);
2777 JS_ASSERT(double(i
) == pn4
->pn_dval
);
2780 JS_ASSERT(uint32_t(i
) < tableLength
);
2786 /* Emit code for each case's statements, copying pn_offset up to pn3. */
2787 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2788 if (switchOp
== JSOP_CONDSWITCH
&& !pn3
->isKind(PNK_DEFAULT
))
2789 SetJumpOffsetAt(bce
, pn3
->pn_offset
);
2790 pn4
= pn3
->pn_right
;
2791 if (!EmitTree(cx
, bce
, pn4
))
2793 pn3
->pn_offset
= pn4
->pn_offset
;
2794 if (pn3
->isKind(PNK_DEFAULT
))
2795 off
= pn3
->pn_offset
- top
;
2799 /* If no default case, offset for default is to end of switch. */
2800 off
= bce
->offset() - top
;
2803 /* We better have set "off" by now. */
2804 JS_ASSERT(off
!= -1);
2806 /* Set the default offset (to end of switch if no default). */
2807 if (switchOp
== JSOP_CONDSWITCH
) {
2809 JS_ASSERT(defaultOffset
!= -1);
2810 SET_JUMP_OFFSET(bce
->code(defaultOffset
), off
- (defaultOffset
- top
));
2812 pc
= bce
->code(top
);
2813 SET_JUMP_OFFSET(pc
, off
);
2814 pc
+= JUMP_OFFSET_LEN
;
2817 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
2818 off
= bce
->offset() - top
;
2819 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, off
))
2822 if (switchOp
== JSOP_TABLESWITCH
) {
2823 /* Skip over the already-initialized switch bounds. */
2824 pc
+= 2 * JUMP_OFFSET_LEN
;
2826 /* Fill in the jump table, if there is one. */
2827 for (uint32_t i
= 0; i
< tableLength
; i
++) {
2829 off
= pn3
? pn3
->pn_offset
- top
: 0;
2830 SET_JUMP_OFFSET(pc
, off
);
2831 pc
+= JUMP_OFFSET_LEN
;
2835 if (pn
->pn_right
->isKind(PNK_LEXICALSCOPE
)) {
2836 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
2839 if (!PopStatementBCE(cx
, bce
))
2847 BytecodeEmitter::isRunOnceLambda()
2849 // The run once lambda flags set by the parser are approximate, and we look
2850 // at properties of the function itself before deciding to emit a function
2851 // as a run once lambda.
2853 if (!(parent
&& parent
->emittingRunOnceLambda
) && !lazyRunOnceLambda
)
2856 FunctionBox
* funbox
= sc
->asFunctionBox();
2857 return !funbox
->argumentsHasLocalBinding() &&
2858 !funbox
->isGenerator() &&
2859 !funbox
->function()->name();
2863 frontend::EmitFunctionScript(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* body
)
2866 * IonBuilder has assumptions about what may occur immediately after
2867 * script->main (e.g., in the case of destructuring params). Thus, put the
2868 * following ops into the range [script->code, script->main). Note:
2869 * execution starts from script->code, so this has no semantic effect.
2872 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
2873 if (funbox
->argumentsHasLocalBinding()) {
2874 JS_ASSERT(bce
->offset() == 0); /* See JSScript::argumentsBytecode. */
2875 bce
->switchToProlog();
2876 if (Emit1(cx
, bce
, JSOP_ARGUMENTS
) < 0)
2878 InternalBindingsHandle
bindings(bce
->script
, &bce
->script
->bindings
);
2879 uint32_t varIndex
= Bindings::argumentsVarIndex(cx
, bindings
);
2880 if (bce
->script
->varIsAliased(varIndex
)) {
2883 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce
->script
, cx
->names().arguments
, &sc
));
2884 if (!EmitAliasedVarOp(cx
, JSOP_SETALIASEDVAR
, sc
, bce
))
2887 if (!EmitUnaliasedVarOp(cx
, JSOP_SETLOCAL
, varIndex
, bce
))
2890 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
2892 bce
->switchToMain();
2895 if (funbox
->isGenerator()) {
2896 bce
->switchToProlog();
2897 if (Emit1(cx
, bce
, JSOP_GENERATOR
) < 0)
2899 bce
->switchToMain();
2903 * Emit a prologue for run-once scripts which will deoptimize JIT code if
2904 * the script ends up running multiple times via foo.caller related
2907 bool runOnce
= bce
->isRunOnceLambda();
2909 bce
->switchToProlog();
2910 if (Emit1(cx
, bce
, JSOP_RUNONCE
) < 0)
2912 bce
->switchToMain();
2915 if (!EmitTree(cx
, bce
, body
))
2918 // If we fall off the end of an ES6 generator, return a boxed iterator
2919 // result object of the form { value: undefined, done: true }.
2920 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isStarGenerator()) {
2921 if (!EmitPrepareIteratorResult(cx
, bce
))
2923 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
2925 if (!EmitFinishIteratorResult(cx
, bce
, true))
2928 // No need to check for finally blocks, etc as in EmitReturn.
2929 if (Emit1(cx
, bce
, JSOP_RETURN
) < 0)
2934 * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase
2935 * depend on this opcode, e.g. js_InternalInterpret.
2937 if (Emit1(cx
, bce
, JSOP_RETRVAL
) < 0)
2940 if (!JSScript::fullyInitFromEmitter(cx
, bce
->script
, bce
))
2944 * If this function is only expected to run once, mark the script so that
2945 * initializers created within it may be given more precise types.
2948 bce
->script
->setTreatAsRunOnce();
2949 JS_ASSERT(!bce
->script
->hasRunOnce());
2952 /* Initialize fun->script() so that the debugger has a valid fun->script(). */
2953 RootedFunction
fun(cx
, bce
->script
->functionNonDelazifying());
2954 JS_ASSERT(fun
->isInterpreted());
2956 if (fun
->isInterpretedLazy())
2957 fun
->setUnlazifiedScript(bce
->script
);
2959 fun
->setScript(bce
->script
);
2961 bce
->tellDebuggerAboutCompiledScript(cx
);
2967 MaybeEmitVarDecl(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
,
2972 if (!pn
->pn_cookie
.isFree()) {
2973 atomIndex
= pn
->pn_cookie
.slot();
2975 if (!bce
->makeAtomIndex(pn
->pn_atom
, &atomIndex
))
2979 if (JOF_OPTYPE(pn
->getOp()) == JOF_ATOM
&&
2980 (!bce
->sc
->isFunctionBox() || bce
->sc
->asFunctionBox()->isHeavyweight()))
2982 bce
->switchToProlog();
2983 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
2985 if (!EmitIndexOp(cx
, prologOp
, atomIndex
, bce
))
2987 bce
->switchToMain();
2991 *result
= atomIndex
;
2996 * This enum tells EmitVariables and the destructuring functions how emit the
2997 * given Parser::variables parse tree. In the base case, DefineVars, the caller
2998 * only wants variables to be defined in the prologue (if necessary). For
2999 * PushInitialValues, variable initializer expressions are evaluated and left
3000 * on the stack. For InitializeVars, the initializer expressions values are
3001 * assigned (to local variables) and popped.
3006 PushInitialValues
= 1,
3011 (*DestructuringDeclEmitter
)(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
);
3014 EmitDestructuringDecl(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
)
3016 JS_ASSERT(pn
->isKind(PNK_NAME
));
3017 if (!BindNameToSlot(cx
, bce
, pn
))
3020 JS_ASSERT(!pn
->isOp(JSOP_CALLEE
));
3021 return MaybeEmitVarDecl(cx
, bce
, prologOp
, pn
, nullptr);
3025 EmitDestructuringDecls(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
,
3028 if (pattern
->isKind(PNK_ARRAY
)) {
3029 for (ParseNode
* element
= pattern
->pn_head
; element
; element
= element
->pn_next
) {
3030 if (element
->isKind(PNK_ELISION
))
3032 ParseNode
* target
= element
;
3033 if (element
->isKind(PNK_SPREAD
)) {
3034 JS_ASSERT(element
->pn_kid
->isKind(PNK_NAME
));
3035 target
= element
->pn_kid
;
3037 DestructuringDeclEmitter emitter
=
3038 target
->isKind(PNK_NAME
) ? EmitDestructuringDecl
: EmitDestructuringDecls
;
3039 if (!emitter(cx
, bce
, prologOp
, target
))
3045 MOZ_ASSERT(pattern
->isKind(PNK_OBJECT
));
3046 for (ParseNode
* member
= pattern
->pn_head
; member
; member
= member
->pn_next
) {
3047 ParseNode
* target
= member
->pn_right
;
3048 DestructuringDeclEmitter emitter
=
3049 target
->isKind(PNK_NAME
) ? EmitDestructuringDecl
: EmitDestructuringDecls
;
3050 if (!emitter(cx
, bce
, prologOp
, target
))
3057 EmitDestructuringOpsHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
,
3058 VarEmitOption emitOption
);
3061 * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on
3062 * the stack and emits code to destructure a single lhs expression (either a
3063 * name or a compound []/{} expression).
3065 * If emitOption is InitializeVars, the to-be-destructured value is assigned to
3066 * locals and ultimately the initial slot is popped (-1 total depth change).
3068 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3069 * with the initial values of the N (where 0 <= N) variables assigned in the
3070 * lhs expression. (Same post-condition as EmitDestructuringOpsHelper)
3073 EmitDestructuringLHS(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, VarEmitOption emitOption
)
3075 JS_ASSERT(emitOption
!= DefineVars
);
3077 // Now emit the lvalue opcode sequence. If the lvalue is a nested
3078 // destructuring initialiser-form, call ourselves to handle it, then pop
3079 // the matched value. Otherwise emit an lvalue bytecode sequence followed
3080 // by an assignment op.
3081 if (pn
->isKind(PNK_SPREAD
))
3083 if (pn
->isKind(PNK_ARRAY
) || pn
->isKind(PNK_OBJECT
)) {
3084 if (!EmitDestructuringOpsHelper(cx
, bce
, pn
, emitOption
))
3086 if (emitOption
== InitializeVars
) {
3087 // Per its post-condition, EmitDestructuringOpsHelper has left the
3088 // to-be-destructured value on top of the stack.
3089 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3092 } else if (emitOption
== PushInitialValues
) {
3093 // The lhs is a simple name so the to-be-destructured value is
3094 // its initial value and there is nothing to do.
3095 JS_ASSERT(pn
->getOp() == JSOP_SETLOCAL
);
3096 JS_ASSERT(pn
->pn_dflags
& PND_BOUND
);
3098 switch (pn
->getKind()) {
3100 if (!BindNameToSlot(cx
, bce
, pn
))
3103 // Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop.
3104 if (pn
->isConst() && !pn
->isDefn())
3105 return Emit1(cx
, bce
, JSOP_POP
) >= 0;
3107 switch (pn
->getOp()) {
3110 case JSOP_SETCONST
: {
3111 // This is like ordinary assignment, but with one difference.
3113 // In `a = b`, we first determine a binding for `a` (using
3114 // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then
3115 // a JSOP_SETNAME instruction.
3117 // In `[a] = [b]`, per spec, `b` is evaluated first, then we
3118 // determine a binding for `a`. Then we need to do assignment--
3119 // but the operands are on the stack in the wrong order for
3120 // JSOP_SETPROP, so we have to add a JSOP_SWAP.
3122 if (!bce
->makeAtomIndex(pn
->pn_atom
, &atomIndex
))
3125 if (!pn
->isOp(JSOP_SETCONST
)) {
3126 JSOp bindOp
= pn
->isOp(JSOP_SETNAME
) ? JSOP_BINDNAME
: JSOP_BINDGNAME
;
3127 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
3129 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
3133 if (!EmitIndexOp(cx
, pn
->getOp(), atomIndex
, bce
))
3140 if (!EmitVarOp(cx
, pn
, pn
->getOp(), bce
))
3145 MOZ_CRASH("EmitDestructuringLHS: bad name op");
3150 // See the (PNK_NAME, JSOP_SETNAME) case above.
3152 // In `a.x = b`, `a` is evaluated first, then `b`, then a
3153 // JSOP_SETPROP instruction.
3155 // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we
3156 // need a property set -- but the operands are on the stack in the
3157 // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP.
3158 if (!EmitTree(cx
, bce
, pn
->pn_expr
))
3160 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
3162 if (!EmitAtomOp(cx
, pn
, JSOP_SETPROP
, bce
))
3167 // See the comment at `case PNK_DOT:` above. This case,
3168 // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP
3169 // is emitted by EmitElemOperands.
3170 if (!EmitElemOp(cx
, pn
, JSOP_SETELEM
, bce
))
3175 JS_ASSERT(pn
->pn_xflags
& PNX_SETCALL
);
3176 if (!EmitTree(cx
, bce
, pn
))
3179 // Pop the call return value. Below, we pop the RHS too, balancing
3180 // the stack --- presumably for the benefit of bytecode
3181 // analysis. (The interpreter will never reach these instructions
3182 // since we just emitted JSOP_SETCALL, which always throws. It's
3183 // possible no analyses actually depend on this either.)
3184 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3189 MOZ_CRASH("EmitDestructuringLHS: bad lhs kind");
3192 // Pop the assigned value.
3193 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3200 static bool EmitSpread(ExclusiveContext
* cx
, BytecodeEmitter
* bce
);
3201 static bool EmitIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
);
3204 * EmitIteratorNext will pop iterator from the top of the stack.
3205 * It will push the result of |.next()| onto the stack.
3208 EmitIteratorNext(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
=nullptr)
3210 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... ITER ITER
3212 if (!EmitAtomOp(cx
, cx
->names().next
, JSOP_CALLPROP
, bce
)) // ... ITER NEXT
3214 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // ... NEXT ITER
3216 if (EmitCall(cx
, bce
, JSOP_CALL
, 0, pn
) < 0) // ... RESULT
3218 CheckTypeSet(cx
, bce
, JSOP_CALL
);
3223 * Recursive helper for EmitDestructuringOps.
3224 * EmitDestructuringOpsHelper assumes the to-be-destructured value has been
3225 * pushed on the stack and emits code to destructure each part of a [] or {}
3228 * If emitOption is InitializeVars, the initial to-be-destructured value is
3229 * left untouched on the stack and the overall depth is not changed.
3231 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3232 * with the initial values of the N (where 0 <= N) variables assigned in the
3233 * lhs expression. (Same post-condition as EmitDestructuringLHS)
3236 EmitDestructuringOpsHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
,
3237 VarEmitOption emitOption
)
3239 JS_ASSERT(emitOption
!= DefineVars
);
3241 ParseNode
* pn2
, *pn3
;
3243 bool needToPopIterator
= false;
3246 int stackDepth
= bce
->stackDepth
;
3247 JS_ASSERT(stackDepth
!= 0);
3248 JS_ASSERT(pn
->isArity(PN_LIST
));
3249 JS_ASSERT(pn
->isKind(PNK_ARRAY
) || pn
->isKind(PNK_OBJECT
));
3253 * When destructuring an array, use an iterator to walk it, instead of index lookup.
3254 * InitializeVars expects us to leave the *original* value on the stack.
3256 if (pn
->isKind(PNK_ARRAY
)) {
3257 if (emitOption
== InitializeVars
) {
3258 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ OBJ
3261 if (!EmitIterator(cx
, bce
)) // OBJ? ITER
3263 needToPopIterator
= true;
3266 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3268 * Now push the property name currently being matched, which is the
3269 * current property name "label" on the left of a colon in the object initialiser.
3270 * Set pn3 to the lvalue node, which is in the value-initializing position.
3272 if (pn
->isKind(PNK_OBJECT
)) {
3274 JS_ASSERT(pn2
->isKind(PNK_COLON
) || pn2
->isKind(PNK_SHORTHAND
));
3276 /* Duplicate the value being destructured to use as a reference base. */
3277 if (Emit1(cx
, bce
, JSOP_DUP
) < 0)
3280 ParseNode
* key
= pn2
->pn_left
;
3281 if (key
->isKind(PNK_NUMBER
)) {
3282 if (!EmitNumberOp(cx
, key
->pn_dval
, bce
))
3284 } else if (key
->isKind(PNK_NAME
) || key
->isKind(PNK_STRING
)) {
3285 PropertyName
* name
= key
->pn_atom
->asPropertyName();
3287 // The parser already checked for atoms representing indexes and
3288 // used PNK_NUMBER instead, but also watch for ids which TI treats
3289 // as indexes for simplification of downstream analysis.
3290 jsid id
= NameToId(name
);
3291 if (id
!= types::IdToTypeId(id
)) {
3292 if (!EmitTree(cx
, bce
, key
))
3295 if (!EmitAtomOp(cx
, name
, JSOP_GETPROP
, bce
))
3300 JS_ASSERT(key
->isKind(PNK_COMPUTED_NAME
));
3301 if (!EmitTree(cx
, bce
, key
->pn_kid
))
3307 * Ok, get the value of the matching property name. This leaves
3308 * that value on top of the value being destructured, so the stack
3309 * is one deeper than when we started.
3311 if (!EmitElemOpBase(cx
, bce
, JSOP_GETELEM
))
3313 JS_ASSERT(bce
->stackDepth
>= stackDepth
+ 1);
3316 pn3
= pn2
->pn_right
;
3318 JS_ASSERT(pn
->isKind(PNK_ARRAY
));
3320 if (pn2
->isKind(PNK_SPREAD
)) {
3321 /* Create a new array with the rest of the iterator */
3322 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_NEWARRAY
, 3); // ITER ARRAY
3325 CheckTypeSet(cx
, bce
, JSOP_NEWARRAY
);
3326 jsbytecode
* pc
= bce
->code(off
);
3329 if (!EmitNumberOp(cx
, 0, bce
)) // ITER ARRAY INDEX
3331 if (!EmitSpread(cx
, bce
)) // ARRAY INDEX
3333 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ARRAY
3335 if (Emit1(cx
, bce
, JSOP_ENDINIT
) < 0)
3337 needToPopIterator
= false;
3339 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER ITER
3341 if (!EmitIteratorNext(cx
, bce
, pn
)) // ITER RESULT
3343 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER RESULT RESULT
3345 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ITER RESULT DONE?
3348 // Emit (result.done ? undefined : result.value)
3349 // This is mostly copied from EmitConditionalExpression, except that this code
3350 // does not push new values onto the stack.
3351 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_COND
);
3354 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
3358 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER
3360 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ITER UNDEFINED
3363 /* Jump around else, fixup the branch, emit else, fixup jump. */
3364 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
3367 SetJumpOffsetAt(bce
, beq
);
3369 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // ITER VALUE
3372 SetJumpOffsetAt(bce
, jmp
);
3373 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
3380 /* Elision node makes a hole in the array destructurer. */
3381 if (pn3
->isKind(PNK_ELISION
)) {
3382 JS_ASSERT(pn
->isKind(PNK_ARRAY
));
3383 JS_ASSERT(pn2
== pn3
);
3384 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3387 int32_t depthBefore
= bce
->stackDepth
;
3388 if (!EmitDestructuringLHS(cx
, bce
, pn3
, emitOption
))
3391 if (emitOption
== PushInitialValues
&&
3392 (pn
->isKind(PNK_OBJECT
) || needToPopIterator
)) {
3394 * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is
3395 * | to-be-destructured-value | x | y |
3398 * so emit a pick to produce the intermediate state
3399 * | x | y | to-be-destructured-value |
3400 * before destructuring z. This gives the loop invariant that
3401 * the to-be-destructured-value is always on top of the stack.
3403 JS_ASSERT((bce
->stackDepth
- bce
->stackDepth
) >= -1);
3404 uint32_t pickDistance
= (uint32_t)((bce
->stackDepth
+ 1) - depthBefore
);
3405 if (pickDistance
> 0) {
3406 if (pickDistance
> UINT8_MAX
) {
3407 bce
->reportError(pn3
, JSMSG_TOO_MANY_LOCALS
);
3410 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)pickDistance
) < 0)
3417 if (needToPopIterator
&& Emit1(cx
, bce
, JSOP_POP
) < 0)
3420 if (emitOption
== PushInitialValues
&& pn
->isKind(PNK_OBJECT
)) {
3422 * Per the above loop invariant, to-be-destructured-value is at the top
3423 * of the stack. To achieve the post-condition, pop it.
3424 * In case of array destructuring, the above POP already took care of the iterator.
3426 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3434 EmitDestructuringOps(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool isLet
= false)
3437 * Call our recursive helper to emit the destructuring assignments and
3438 * related stack manipulations.
3440 VarEmitOption emitOption
= isLet
? PushInitialValues
: InitializeVars
;
3441 return EmitDestructuringOpsHelper(cx
, bce
, pn
, emitOption
);
3445 EmitTemplateString(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
3447 JS_ASSERT(pn
->isArity(PN_LIST
));
3449 for (ParseNode
* pn2
= pn
->pn_head
; pn2
!= NULL
; pn2
= pn2
->pn_next
) {
3450 if (pn2
->getKind() != PNK_STRING
&& pn2
->getKind() != PNK_TEMPLATE_STRING
) {
3451 // We update source notes before emitting the expression
3452 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
3455 if (!EmitTree(cx
, bce
, pn2
))
3458 if (pn2
->getKind() != PNK_STRING
&& pn2
->getKind() != PNK_TEMPLATE_STRING
) {
3459 // We need to convert the expression to a string
3460 if (Emit1(cx
, bce
, JSOP_TOSTRING
) < 0)
3464 if (pn2
!= pn
->pn_head
) {
3465 // We've pushed two strings onto the stack. Add them together, leaving just one.
3466 if (Emit1(cx
, bce
, JSOP_ADD
) < 0)
3475 EmitVariables(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, VarEmitOption emitOption
,
3478 JS_ASSERT(pn
->isArity(PN_LIST
));
3479 JS_ASSERT(isLet
== (emitOption
== PushInitialValues
));
3482 for (ParseNode
* pn2
= pn
->pn_head
; ; pn2
= next
) {
3483 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
3485 next
= pn2
->pn_next
;
3488 if (!pn2
->isKind(PNK_NAME
)) {
3489 if (pn2
->isKind(PNK_ARRAY
) || pn2
->isKind(PNK_OBJECT
)) {
3491 * Emit variable binding ops, but not destructuring ops. The
3492 * parser (see Parser::variables) has ensured that our caller
3493 * will be the PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree, and
3494 * that case will emit the destructuring code only after
3495 * emitting an enumerating opcode and a branch that tests
3496 * whether the enumeration ended.
3498 JS_ASSERT(emitOption
== DefineVars
);
3499 JS_ASSERT(pn
->pn_count
== 1);
3500 if (!EmitDestructuringDecls(cx
, bce
, pn
->getOp(), pn2
))
3506 * A destructuring initialiser assignment preceded by var will
3507 * never occur to the left of 'in' in a for-in loop. As with 'for
3508 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3509 * i' to be hoisted out of the loop.
3511 JS_ASSERT(pn2
->isKind(PNK_ASSIGN
));
3512 JS_ASSERT(pn2
->isOp(JSOP_NOP
));
3513 JS_ASSERT(emitOption
!= DefineVars
);
3516 * To allow the front end to rewrite var f = x; as f = x; when a
3517 * function f(){} precedes the var, detect simple name assignment
3518 * here and initialize the name.
3520 if (pn2
->pn_left
->isKind(PNK_NAME
)) {
3521 pn3
= pn2
->pn_right
;
3527 if (!EmitDestructuringDecls(cx
, bce
, pn
->getOp(), pn3
))
3530 if (!EmitTree(cx
, bce
, pn2
->pn_right
))
3533 if (!EmitDestructuringOps(cx
, bce
, pn3
, isLet
))
3536 /* If we are not initializing, nothing to pop. */
3537 if (emitOption
!= InitializeVars
) {
3546 * Load initializer early to share code above that jumps to do_name.
3547 * NB: if this var redeclares an existing binding, then pn2 is linked
3548 * on its definition's use-chain and pn_expr has been overlayed with
3551 pn3
= pn2
->maybeExpr();
3554 if (!BindNameToSlot(cx
, bce
, pn2
))
3560 JS_ASSERT(op
!= JSOP_CALLEE
);
3561 JS_ASSERT(!pn2
->pn_cookie
.isFree() || !pn
->isOp(JSOP_NOP
));
3564 if (!MaybeEmitVarDecl(cx
, bce
, pn
->getOp(), pn2
, &atomIndex
))
3568 JS_ASSERT(emitOption
!= DefineVars
);
3569 if (op
== JSOP_SETNAME
|| op
== JSOP_SETGNAME
|| op
== JSOP_SETINTRINSIC
) {
3570 JS_ASSERT(emitOption
!= PushInitialValues
);
3572 if (op
== JSOP_SETNAME
)
3573 bindOp
= JSOP_BINDNAME
;
3574 else if (op
== JSOP_SETGNAME
)
3575 bindOp
= JSOP_BINDGNAME
;
3577 bindOp
= JSOP_BINDINTRINSIC
;
3578 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
3582 bool oldEmittingForInit
= bce
->emittingForInit
;
3583 bce
->emittingForInit
= false;
3584 if (!EmitTree(cx
, bce
, pn3
))
3586 bce
->emittingForInit
= oldEmittingForInit
;
3588 /* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */
3589 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
3593 /* If we are not initializing, nothing to pop. */
3594 if (emitOption
!= InitializeVars
) {
3600 JS_ASSERT_IF(pn2
->isDefn(), pn3
== pn2
->pn_expr
);
3601 if (!pn2
->pn_cookie
.isFree()) {
3602 if (!EmitVarOp(cx
, pn2
, op
, bce
))
3605 if (!EmitIndexOp(cx
, op
, atomIndex
, bce
))
3612 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3616 if (pn
->pn_xflags
& PNX_POPVAR
) {
3617 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3625 EmitAssignment(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* lhs
, JSOp op
, ParseNode
* rhs
)
3628 * Check left operand type and generate specialized code for it.
3629 * Specialize to avoid ECMA "reference type" values on the operand
3630 * stack, which impose pervasive runtime "GetValue" costs.
3632 jsatomid atomIndex
= (jsatomid
) -1;
3633 jsbytecode offset
= 1;
3635 switch (lhs
->getKind()) {
3637 if (!BindNameToSlot(cx
, bce
, lhs
))
3639 if (lhs
->pn_cookie
.isFree()) {
3640 if (!bce
->makeAtomIndex(lhs
->pn_atom
, &atomIndex
))
3642 if (!lhs
->isConst()) {
3644 if (lhs
->isOp(JSOP_SETNAME
))
3645 bindOp
= JSOP_BINDNAME
;
3646 else if (lhs
->isOp(JSOP_SETGNAME
))
3647 bindOp
= JSOP_BINDGNAME
;
3649 bindOp
= JSOP_BINDINTRINSIC
;
3650 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
3657 if (!EmitTree(cx
, bce
, lhs
->expr()))
3660 if (!bce
->makeAtomIndex(lhs
->pn_atom
, &atomIndex
))
3664 JS_ASSERT(lhs
->isArity(PN_BINARY
));
3665 if (!EmitTree(cx
, bce
, lhs
->pn_left
))
3667 if (!EmitTree(cx
, bce
, lhs
->pn_right
))
3675 JS_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
3676 if (!EmitTree(cx
, bce
, lhs
))
3678 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3685 if (op
!= JSOP_NOP
) {
3687 switch (lhs
->getKind()) {
3689 if (lhs
->isConst()) {
3690 if (lhs
->isOp(JSOP_CALLEE
)) {
3691 if (Emit1(cx
, bce
, JSOP_CALLEE
) < 0)
3693 } else if (lhs
->isOp(JSOP_NAME
) || lhs
->isOp(JSOP_GETGNAME
)) {
3694 if (!EmitIndex32(cx
, lhs
->getOp(), atomIndex
, bce
))
3697 JS_ASSERT(JOF_OPTYPE(lhs
->getOp()) != JOF_ATOM
);
3698 if (!EmitVarOp(cx
, lhs
, lhs
->getOp(), bce
))
3701 } else if (lhs
->isOp(JSOP_SETNAME
)) {
3702 if (Emit1(cx
, bce
, JSOP_DUP
) < 0)
3704 if (!EmitIndex32(cx
, JSOP_GETXPROP
, atomIndex
, bce
))
3706 } else if (lhs
->isOp(JSOP_SETGNAME
)) {
3707 JS_ASSERT(lhs
->pn_cookie
.isFree());
3708 if (!EmitAtomOp(cx
, lhs
, JSOP_GETGNAME
, bce
))
3710 } else if (lhs
->isOp(JSOP_SETINTRINSIC
)) {
3711 JS_ASSERT(lhs
->pn_cookie
.isFree());
3712 if (!EmitAtomOp(cx
, lhs
, JSOP_GETINTRINSIC
, bce
))
3716 switch (lhs
->getOp()) {
3717 case JSOP_SETARG
: op
= JSOP_GETARG
; break;
3718 case JSOP_SETLOCAL
: op
= JSOP_GETLOCAL
; break;
3719 case JSOP_SETALIASEDVAR
: op
= JSOP_GETALIASEDVAR
; break;
3720 default: MOZ_CRASH("Bad op");
3722 if (!EmitVarOp(cx
, lhs
, op
, bce
))
3727 if (Emit1(cx
, bce
, JSOP_DUP
) < 0)
3729 bool isLength
= (lhs
->pn_atom
== cx
->names().length
);
3730 if (!EmitIndex32(cx
, isLength
? JSOP_LENGTH
: JSOP_GETPROP
, atomIndex
, bce
))
3735 if (Emit1(cx
, bce
, JSOP_DUP2
) < 0)
3737 if (!EmitElemOpBase(cx
, bce
, JSOP_GETELEM
))
3742 * We just emitted a JSOP_SETCALL (which will always throw) and
3743 * popped the call's return value. Push a random value to make sure
3744 * the stack depth is correct.
3746 JS_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
3747 if (Emit1(cx
, bce
, JSOP_NULL
) < 0)
3754 /* Now emit the right operand (it may affect the namespace). */
3756 if (!EmitTree(cx
, bce
, rhs
))
3760 * The value to assign is the next enumeration value in a for-in or
3761 * for-of loop. That value has already been emitted: by JSOP_ITERNEXT
3762 * in the for-in case, or via a GETPROP "value" on the result object in
3763 * the for-of case. If offset == 1, that slot is already at the top of
3764 * the stack. Otherwise, rearrange the stack to put that value on top.
3766 if (offset
!= 1 && Emit2(cx
, bce
, JSOP_PICK
, offset
- 1) < 0)
3770 /* If += etc., emit the binary operator with a source note. */
3771 if (op
!= JSOP_NOP
) {
3773 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
3774 * declared in the current compilation unit, as in this case (just
3775 * a bit further below) we will avoid emitting the assignment op.
3777 if (!lhs
->isKind(PNK_NAME
) || !lhs
->isConst()) {
3778 if (NewSrcNote(cx
, bce
, SRC_ASSIGNOP
) < 0)
3781 if (Emit1(cx
, bce
, op
) < 0)
3785 /* Finally, emit the specialized assignment bytecode. */
3786 switch (lhs
->getKind()) {
3788 if (lhs
->isConst()) {
3790 bce
->reportError(lhs
, JSMSG_BAD_FOR_LEFTSIDE
);
3795 if (lhs
->isOp(JSOP_SETARG
) || lhs
->isOp(JSOP_SETLOCAL
) || lhs
->isOp(JSOP_SETALIASEDVAR
)) {
3796 if (!EmitVarOp(cx
, lhs
, lhs
->getOp(), bce
))
3799 if (!EmitIndexOp(cx
, lhs
->getOp(), atomIndex
, bce
))
3804 if (!EmitIndexOp(cx
, JSOP_SETPROP
, atomIndex
, bce
))
3808 /* Do nothing. The JSOP_SETCALL we emitted will always throw. */
3809 JS_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
3812 if (Emit1(cx
, bce
, JSOP_SETELEM
) < 0)
3817 if (!EmitDestructuringOps(cx
, bce
, lhs
))
3827 ParseNode::getConstantValue(ExclusiveContext
* cx
, AllowConstantObjects allowObjects
, MutableHandleValue vp
)
3829 switch (getKind()) {
3831 vp
.setNumber(pn_dval
);
3833 case PNK_TEMPLATE_STRING
:
3835 vp
.setString(pn_atom
);
3838 vp
.setBoolean(true);
3841 vp
.setBoolean(false);
3848 case PNK_CALLSITEOBJ
:
3850 RootedValue
value(cx
);
3854 if (allowObjects
== DontAllowObjects
)
3856 if (allowObjects
== DontAllowNestedObjects
)
3857 allowObjects
= DontAllowObjects
;
3859 if (getKind() == PNK_CALLSITEOBJ
) {
3860 count
= pn_count
- 1;
3861 pn
= pn_head
->pn_next
;
3863 JS_ASSERT(isOp(JSOP_NEWINIT
) && !(pn_xflags
& PNX_NONCONST
));
3868 RootedObject
obj(cx
, NewDenseAllocatedArray(cx
, count
, nullptr, MaybeSingletonObject
));
3874 for (; pn
; idx
++, pn
= pn
->pn_next
) {
3875 if (!pn
->getConstantValue(cx
, allowObjects
, &value
))
3877 id
= INT_TO_JSID(idx
);
3878 if (!JSObject::defineGeneric(cx
, obj
, id
, value
, nullptr, nullptr, JSPROP_ENUMERATE
))
3881 JS_ASSERT(idx
== count
);
3883 types::FixArrayType(cx
, obj
);
3888 JS_ASSERT(isOp(JSOP_NEWINIT
));
3889 JS_ASSERT(!(pn_xflags
& PNX_NONCONST
));
3891 if (allowObjects
== DontAllowObjects
)
3893 if (allowObjects
== DontAllowNestedObjects
)
3894 allowObjects
= DontAllowObjects
;
3896 gc::AllocKind kind
= GuessObjectGCKind(pn_count
);
3897 RootedObject
obj(cx
, NewBuiltinClassInstance(cx
, &JSObject::class_
, kind
, MaybeSingletonObject
));
3901 RootedValue
value(cx
), idvalue(cx
);
3902 for (ParseNode
* pn
= pn_head
; pn
; pn
= pn
->pn_next
) {
3903 if (!pn
->pn_right
->getConstantValue(cx
, allowObjects
, &value
))
3906 ParseNode
* pnid
= pn
->pn_left
;
3907 if (pnid
->isKind(PNK_NUMBER
)) {
3908 idvalue
= NumberValue(pnid
->pn_dval
);
3910 JS_ASSERT(pnid
->isKind(PNK_NAME
) || pnid
->isKind(PNK_STRING
));
3911 JS_ASSERT(pnid
->pn_atom
!= cx
->names().proto
);
3912 idvalue
= StringValue(pnid
->pn_atom
);
3916 if (IsDefinitelyIndex(idvalue
, &index
)) {
3917 if (!JSObject::defineElement(cx
, obj
, index
, value
, nullptr, nullptr,
3926 JSAtom
* name
= ToAtom
<CanGC
>(cx
, idvalue
);
3930 if (name
->isIndex(&index
)) {
3931 if (!JSObject::defineElement(cx
, obj
, index
, value
,
3932 nullptr, nullptr, JSPROP_ENUMERATE
))
3935 if (!JSObject::defineProperty(cx
, obj
, name
->asPropertyName(), value
,
3936 nullptr, nullptr, JSPROP_ENUMERATE
))
3943 types::FixObjectType(cx
, obj
);
3948 MOZ_CRASH("Unexpected node");
3954 EmitSingletonInitialiser(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
3956 RootedValue
value(cx
);
3957 if (!pn
->getConstantValue(cx
, ParseNode::AllowObjects
, &value
))
3960 RootedObject
obj(cx
, &value
.toObject());
3961 if (!obj
->is
<ArrayObject
>() && !JSObject::setSingletonType(cx
, obj
))
3964 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
3968 return EmitObjectOp(cx
, objbox
, JSOP_OBJECT
, bce
);
3972 EmitCallSiteObject(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
3974 RootedValue
value(cx
);
3975 if (!pn
->getConstantValue(cx
, ParseNode::AllowObjects
, &value
))
3978 JS_ASSERT(value
.isObject());
3980 ObjectBox
* objbox1
= bce
->parser
->newObjectBox(&value
.toObject());
3984 if (!pn
->as
<CallSiteNode
>().getRawArrayValue(cx
, &value
))
3987 JS_ASSERT(value
.isObject());
3989 ObjectBox
* objbox2
= bce
->parser
->newObjectBox(&value
.toObject());
3993 return EmitObjectPairOp(cx
, objbox1
, objbox2
, JSOP_CALLSITEOBJ
, bce
);
3996 /* See the SRC_FOR source note offsetBias comments later in this file. */
3997 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
3998 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4002 class EmitLevelManager
4004 BytecodeEmitter
* bce
;
4006 explicit EmitLevelManager(BytecodeEmitter
* bce
) : bce(bce
) { bce
->emitLevel
++; }
4007 ~EmitLevelManager() { bce
->emitLevel
--; }
4010 } /* anonymous namespace */
4013 EmitCatch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4016 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4017 * and save the block object atom.
4019 StmtInfoBCE
* stmt
= bce
->topStmt
;
4020 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& stmt
->isBlockScope
);
4021 stmt
->type
= STMT_CATCH
;
4023 /* Go up one statement info record to the TRY or FINALLY record. */
4025 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
4027 /* Pick up the pending exception and bind it to the catch variable. */
4028 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0)
4032 * Dup the exception object if there is a guard for rethrowing to use
4033 * it later when rethrowing or in other catches.
4035 if (pn
->pn_kid2
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
4038 ParseNode
* pn2
= pn
->pn_kid1
;
4039 switch (pn2
->getKind()) {
4042 if (!EmitDestructuringOps(cx
, bce
, pn2
))
4044 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4049 /* Inline and specialize BindNameToSlot for pn2. */
4050 JS_ASSERT(!pn2
->pn_cookie
.isFree());
4051 if (!EmitVarOp(cx
, pn2
, JSOP_SETLOCAL
, bce
))
4053 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4061 // If there is a guard expression, emit it and arrange to jump to the next
4062 // catch block if the guard expression is false.
4064 if (!EmitTree(cx
, bce
, pn
->pn_kid2
))
4067 // If the guard expression is false, fall through, pop the block scope,
4068 // and jump to the next catch block. Otherwise jump over that code and
4069 // pop the dupped exception.
4070 ptrdiff_t guardCheck
= EmitJump(cx
, bce
, JSOP_IFNE
, 0);
4075 NonLocalExitScope
nle(cx
, bce
);
4077 // Move exception back to cx->exception to prepare for
4079 if (Emit1(cx
, bce
, JSOP_THROWING
) < 0)
4082 // Leave the scope for this catch block.
4083 if (!nle
.prepareForNonLocalJump(stmt
))
4086 // Jump to the next handler. The jump target is backpatched by EmitTry.
4087 ptrdiff_t guardJump
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4090 stmt
->guardJump() = guardJump
;
4093 // Back to normal control flow.
4094 SetJumpOffsetAt(bce
, guardCheck
);
4096 // Pop duplicated exception object as we no longer need it.
4097 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4101 /* Emit the catch body. */
4102 return EmitTree(cx
, bce
, pn
->pn_kid3
);
4105 // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the
4106 // comment on EmitSwitch.
4108 MOZ_NEVER_INLINE
static bool
4109 EmitTry(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4111 StmtInfoBCE
stmtInfo(cx
);
4113 // Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4116 // When a finally block is active (STMT_FINALLY in our parse context),
4117 // non-local jumps (including jumps-over-catches) result in a GOSUB
4118 // being written into the bytecode stream and fixed-up later (c.f.
4119 // EmitBackPatchOp and BackPatch).
4121 PushStatementBCE(bce
, &stmtInfo
, pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
, bce
->offset());
4123 // Since an exception can be thrown at any place inside the try block,
4124 // we need to restore the stack and the scope chain before we transfer
4125 // the control to the exception handler.
4127 // For that we store in a try note associated with the catch or
4128 // finally block the stack depth upon the try entry. The interpreter
4129 // uses this depth to properly unwind the stack and the scope chain.
4131 int depth
= bce
->stackDepth
;
4133 // Record the try location, then emit the try block.
4134 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_TRY
);
4135 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_TRY
) < 0)
4137 ptrdiff_t tryStart
= bce
->offset();
4138 if (!EmitTree(cx
, bce
, pn
->pn_kid1
))
4140 JS_ASSERT(depth
== bce
->stackDepth
);
4142 // GOSUB to finally, if present.
4144 if (EmitBackPatchOp(cx
, bce
, &stmtInfo
.gosubs()) < 0)
4148 // Source note points to the jump at the end of the try block.
4149 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, bce
->offset() - tryStart
+ JSOP_TRY_LENGTH
))
4152 // Emit jump over catch and/or finally.
4153 ptrdiff_t catchJump
= -1;
4154 if (EmitBackPatchOp(cx
, bce
, &catchJump
) < 0)
4157 ptrdiff_t tryEnd
= bce
->offset();
4159 // If this try has a catch block, emit it.
4160 if (ParseNode
* pn2
= pn
->pn_kid2
) {
4161 // The emitted code for a catch block looks like:
4163 // [pushblockscope] only if any local aliased
4165 // if there is a catchguard:
4167 // setlocal 0; pop assign or possibly destructure exception
4168 // if there is a catchguard:
4169 // < catchguard code >
4172 // [popblockscope] only if any local aliased
4173 // throwing pop exception to cx->exception
4174 // goto <next catch block>
4176 // < catch block contents >
4178 // [popblockscope] only if any local aliased
4179 // goto <end of catch blocks> non-local; finally applies
4181 // If there's no catch block without a catchguard, the last <next catch
4182 // block> points to rethrow code. This code will [gosub] to the finally
4183 // code if appropriate, and is also used for the catch-all trynote for
4184 // capturing exceptions thrown from catch{} blocks.
4186 for (ParseNode
* pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
4187 JS_ASSERT(bce
->stackDepth
== depth
);
4189 // Emit the lexical scope and catch body.
4190 JS_ASSERT(pn3
->isKind(PNK_LEXICALSCOPE
));
4191 if (!EmitTree(cx
, bce
, pn3
))
4194 // gosub <finally>, if required.
4196 if (EmitBackPatchOp(cx
, bce
, &stmtInfo
.gosubs()) < 0)
4198 JS_ASSERT(bce
->stackDepth
== depth
);
4201 // Jump over the remaining catch blocks. This will get fixed
4202 // up to jump to after catch/finally.
4203 if (EmitBackPatchOp(cx
, bce
, &catchJump
) < 0)
4206 // If this catch block had a guard clause, patch the guard jump to
4208 if (stmtInfo
.guardJump() != -1) {
4209 SetJumpOffsetAt(bce
, stmtInfo
.guardJump());
4210 stmtInfo
.guardJump() = -1;
4212 // If this catch block is the last one, rethrow, delegating
4213 // execution of any finally block to the exception handler.
4214 if (!pn3
->pn_next
) {
4215 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0)
4217 if (Emit1(cx
, bce
, JSOP_THROW
) < 0)
4224 JS_ASSERT(bce
->stackDepth
== depth
);
4226 // Emit the finally handler, if there is one.
4227 ptrdiff_t finallyStart
= 0;
4229 // Fix up the gosubs that might have been emitted before non-local
4230 // jumps to the finally code.
4231 if (!BackPatch(cx
, bce
, stmtInfo
.gosubs(), bce
->code().end(), JSOP_GOSUB
))
4234 finallyStart
= bce
->offset();
4236 // Indicate that we're emitting a subroutine body.
4237 stmtInfo
.type
= STMT_SUBROUTINE
;
4238 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_kid3
->pn_pos
.begin
))
4240 if (Emit1(cx
, bce
, JSOP_FINALLY
) < 0 ||
4241 !EmitTree(cx
, bce
, pn
->pn_kid3
) ||
4242 Emit1(cx
, bce
, JSOP_RETSUB
) < 0)
4246 JS_ASSERT(bce
->stackDepth
== depth
);
4248 if (!PopStatementBCE(cx
, bce
))
4251 // ReconstructPCStack needs a NOP here to mark the end of the last catch block.
4252 if (Emit1(cx
, bce
, JSOP_NOP
) < 0)
4255 // Fix up the end-of-try/catch jumps to come here.
4256 if (!BackPatch(cx
, bce
, catchJump
, bce
->code().end(), JSOP_GOTO
))
4259 // Add the try note last, to let post-order give us the right ordering
4260 // (first to last for a given nesting level, inner to outer by level).
4261 if (pn
->pn_kid2
&& !bce
->tryNoteList
.append(JSTRY_CATCH
, depth
, tryStart
, tryEnd
))
4264 // If we've got a finally, mark try+catch region with additional
4265 // trynote to catch exceptions (re)thrown from a catch block or
4266 // for the try{}finally{} case.
4267 if (pn
->pn_kid3
&& !bce
->tryNoteList
.append(JSTRY_FINALLY
, depth
, tryStart
, finallyStart
))
4274 EmitIf(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4276 StmtInfoBCE
stmtInfo(cx
);
4278 /* Initialize so we can detect else-if chains and avoid recursion. */
4279 stmtInfo
.type
= STMT_IF
;
4282 ptrdiff_t noteIndex
= -1;
4285 /* Emit code for the condition before pushing stmtInfo. */
4286 if (!EmitTree(cx
, bce
, pn
->pn_kid1
))
4288 ptrdiff_t top
= bce
->offset();
4289 if (stmtInfo
.type
== STMT_IF
) {
4290 PushStatementBCE(bce
, &stmtInfo
, STMT_IF
, top
);
4293 * We came here from the goto further below that detects else-if
4294 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4295 * Also we need a note offset for SRC_IF_ELSE to help IonMonkey.
4297 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4298 stmtInfo
.type
= STMT_IF
;
4299 stmtInfo
.update
= top
;
4300 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
4304 /* Emit an annotated branch-if-false around the then part. */
4305 ParseNode
* pn3
= pn
->pn_kid3
;
4306 noteIndex
= NewSrcNote(cx
, bce
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4309 beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
4313 /* Emit code for the then and optional else parts. */
4314 if (!EmitTree(cx
, bce
, pn
->pn_kid2
))
4317 /* Modify stmtInfo so we know we're in the else part. */
4318 stmtInfo
.type
= STMT_ELSE
;
4321 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4322 * around the else part. The PopStatementBCE call at the bottom of
4323 * this function will fix up the backpatch chain linked from
4326 jmp
= EmitGoto(cx
, bce
, &stmtInfo
, &stmtInfo
.breaks
);
4330 /* Ensure the branch-if-false comes here, then emit the else. */
4331 SetJumpOffsetAt(bce
, beq
);
4332 if (pn3
->isKind(PNK_IF
)) {
4337 if (!EmitTree(cx
, bce
, pn3
))
4341 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4342 * IonMonkey's benefit. We can't just "back up" from the pc
4343 * of the else clause, because we don't know whether an extended
4344 * jump was required to leap from the end of the then clause over
4347 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
4350 /* No else part, fixup the branch-if-false to come here. */
4351 SetJumpOffsetAt(bce
, beq
);
4353 return PopStatementBCE(cx
, bce
);
4357 * pnLet represents one of:
4359 * let-expression: (let (x = y) EXPR)
4360 * let-statement: let (x = y) { ... }
4362 * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces:
4364 * bytecode stackDepth srcnotes
4377 * pushblockscope (if needed)
4380 * popblockscope (if needed)
4382 * Note that, since pushblockscope simply changes fp->scopeChain and does not
4383 * otherwise touch the stack, evaluation of the let-var initializers must leave
4384 * the initial value in the let-var's future slot.
4387 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4388 * the comment on EmitSwitch.
4390 MOZ_NEVER_INLINE
static bool
4391 EmitLet(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pnLet
)
4393 JS_ASSERT(pnLet
->isArity(PN_BINARY
));
4394 ParseNode
* varList
= pnLet
->pn_left
;
4395 JS_ASSERT(varList
->isArity(PN_LIST
));
4396 ParseNode
* letBody
= pnLet
->pn_right
;
4397 JS_ASSERT(letBody
->isLet() && letBody
->isKind(PNK_LEXICALSCOPE
));
4399 int letHeadDepth
= bce
->stackDepth
;
4401 if (!EmitVariables(cx
, bce
, varList
, PushInitialValues
, true))
4404 /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
4405 uint32_t alreadyPushed
= bce
->stackDepth
- letHeadDepth
;
4406 StmtInfoBCE
stmtInfo(cx
);
4407 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, letBody
->pn_objbox
, alreadyPushed
))
4410 if (!EmitTree(cx
, bce
, letBody
->pn_expr
))
4413 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4420 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4421 * the comment on EmitSwitch.
4423 MOZ_NEVER_INLINE
static bool
4424 EmitLexicalScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4426 JS_ASSERT(pn
->isKind(PNK_LEXICALSCOPE
));
4428 StmtInfoBCE
stmtInfo(cx
);
4429 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, pn
->pn_objbox
, 0))
4432 if (!EmitTree(cx
, bce
, pn
->pn_expr
))
4435 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4442 EmitWith(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4444 StmtInfoBCE
stmtInfo(cx
);
4445 if (!EmitTree(cx
, bce
, pn
->pn_left
))
4447 if (!EnterNestedScope(cx
, bce
, &stmtInfo
, pn
->pn_binary_obj
, STMT_WITH
))
4449 if (!EmitTree(cx
, bce
, pn
->pn_right
))
4451 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4457 * EmitIterator expects the iterable to already be on the stack.
4458 * It will replace that stack value with the corresponding iterator
4461 EmitIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
4463 // Convert iterable to iterator.
4464 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ OBJ
4466 if (!EmitAtomOp(cx
, cx
->names().std_iterator
, JSOP_CALLPROP
, bce
)) // OBJ @@ITERATOR
4468 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // @@ITERATOR OBJ
4470 if (EmitCall(cx
, bce
, JSOP_CALL
, 0) < 0) // ITER
4472 CheckTypeSet(cx
, bce
, JSOP_CALL
);
4477 * If type is STMT_FOR_OF_LOOP, it emits bytecode for for-of loop.
4478 * pn should be PNK_FOR, and pn->pn_left should be PNK_FOROF.
4480 * If type is STMT_SPREAD, it emits bytecode for spread operator.
4481 * pn should be nullptr.
4482 * Please refer the comment above EmitSpread for additional information about
4486 EmitForOf(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtType type
, ParseNode
* pn
, ptrdiff_t top
)
4488 JS_ASSERT(type
== STMT_FOR_OF_LOOP
|| type
== STMT_SPREAD
);
4489 JS_ASSERT_IF(type
== STMT_FOR_OF_LOOP
, pn
&& pn
->pn_left
->isKind(PNK_FOROF
));
4490 JS_ASSERT_IF(type
== STMT_SPREAD
, !pn
);
4492 ParseNode
* forHead
= pn
? pn
->pn_left
: nullptr;
4493 ParseNode
* forBody
= pn
? pn
->pn_right
: nullptr;
4495 ParseNode
* pn1
= forHead
? forHead
->pn_kid1
: nullptr;
4496 bool letDecl
= pn1
&& pn1
->isKind(PNK_LEXICALSCOPE
);
4497 JS_ASSERT_IF(letDecl
, pn1
->isLet());
4499 // If the left part is 'var x', emit code to define x if necessary using a
4500 // prolog opcode, but do not emit a pop.
4502 ParseNode
* decl
= letDecl
? pn1
->pn_expr
: pn1
;
4503 JS_ASSERT(decl
->isKind(PNK_VAR
) || decl
->isKind(PNK_LET
));
4504 bce
->emittingForInit
= true;
4505 if (!EmitVariables(cx
, bce
, decl
, DefineVars
))
4507 bce
->emittingForInit
= false;
4510 if (type
== STMT_FOR_OF_LOOP
) {
4511 // For-of loops run with two values on the stack: the iterator and the
4512 // current result object.
4514 // Compile the object expression to the right of 'of'.
4515 if (!EmitTree(cx
, bce
, forHead
->pn_kid3
))
4517 if (!EmitIterator(cx
, bce
))
4520 // Push a dummy result so that we properly enter iteration midstream.
4521 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ITER RESULT
4525 // Enter the block before the loop body, after evaluating the obj.
4526 StmtInfoBCE
letStmt(cx
);
4528 if (!EnterBlockScope(cx
, bce
, &letStmt
, pn1
->pn_objbox
, 0))
4532 LoopStmtInfo
stmtInfo(cx
);
4533 PushLoopStatement(bce
, &stmtInfo
, type
, top
);
4535 // Jump down to the loop condition to minimize overhead assuming at least
4536 // one iteration, as the other loop forms do. Annotate so IonMonkey can
4537 // find the loop-closing jump.
4538 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR_OF
);
4541 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4545 top
= bce
->offset();
4546 SET_STATEMENT_TOP(&stmtInfo
, top
);
4547 if (EmitLoopHead(cx
, bce
, nullptr) < 0)
4550 if (type
== STMT_SPREAD
)
4554 int loopDepth
= bce
->stackDepth
;
4557 // Emit code to assign result.value to the iteration variable.
4558 if (type
== STMT_FOR_OF_LOOP
) {
4559 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER RESULT RESULT
4562 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // ... RESULT VALUE
4564 if (type
== STMT_FOR_OF_LOOP
) {
4565 if (!EmitAssignment(cx
, bce
, forHead
->pn_kid2
, JSOP_NOP
, nullptr)) // ITER RESULT VALUE
4567 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER RESULT
4570 // The stack should be balanced around the assignment opcode sequence.
4571 JS_ASSERT(bce
->stackDepth
== loopDepth
);
4573 // Emit code for the loop body.
4574 if (!EmitTree(cx
, bce
, forBody
))
4577 // Set loop and enclosing "update" offsets, for continue.
4578 StmtInfoBCE
* stmt
= &stmtInfo
;
4580 stmt
->update
= bce
->offset();
4581 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
4583 if (Emit1(cx
, bce
, JSOP_INITELEM_INC
) < 0) // ITER ARR (I+1)
4586 JS_ASSERT(bce
->stackDepth
== loopDepth
- 1);
4588 // STMT_SPREAD never contain continue, so do not set "update" offset.
4591 // COME FROM the beginning of the loop to here.
4592 SetJumpOffsetAt(bce
, jmp
);
4593 if (!EmitLoopEntry(cx
, bce
, nullptr))
4596 if (type
== STMT_FOR_OF_LOOP
) {
4597 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER
4599 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER ITER
4602 if (!EmitDupAt(cx
, bce
, bce
->stackDepth
- 1 - 2)) // ITER ARR I ITER
4605 if (!EmitIteratorNext(cx
, bce
, forHead
)) // ... RESULT
4607 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... RESULT RESULT
4609 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ... RESULT DONE?
4612 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, top
- bce
->offset()); // ... RESULT
4616 JS_ASSERT(bce
->stackDepth
== loopDepth
);
4618 // Let Ion know where the closing jump of this loop is.
4619 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, beq
- jmp
))
4622 // Fixup breaks and continues.
4623 // For STMT_SPREAD, just pop pc->topStmt.
4624 if (!PopStatementBCE(cx
, bce
))
4628 if (!LeaveNestedScope(cx
, bce
, &letStmt
))
4632 if (type
== STMT_SPREAD
) {
4633 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ARR I RESULT ITER
4637 // Pop the result and the iter.
4638 EMIT_UINT16_IMM_OP(JSOP_POPN
, 2);
4644 EmitForIn(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
4646 ParseNode
* forHead
= pn
->pn_left
;
4647 ParseNode
* forBody
= pn
->pn_right
;
4649 ParseNode
* pn1
= forHead
->pn_kid1
;
4650 bool letDecl
= pn1
&& pn1
->isKind(PNK_LEXICALSCOPE
);
4651 JS_ASSERT_IF(letDecl
, pn1
->isLet());
4654 * If the left part is 'var x', emit code to define x if necessary
4655 * using a prolog opcode, but do not emit a pop. If the left part was
4656 * originally 'var x = i', the parser will have rewritten it; see
4657 * Parser::forStatement. 'for (let x = i in o)' is mercifully banned.
4660 ParseNode
* decl
= letDecl
? pn1
->pn_expr
: pn1
;
4661 JS_ASSERT(decl
->isKind(PNK_VAR
) || decl
->isKind(PNK_LET
));
4662 bce
->emittingForInit
= true;
4663 if (!EmitVariables(cx
, bce
, decl
, DefineVars
))
4665 bce
->emittingForInit
= false;
4668 /* Compile the object expression to the right of 'in'. */
4669 if (!EmitTree(cx
, bce
, forHead
->pn_kid3
))
4673 * Emit a bytecode to convert top of stack value to the iterator
4674 * object depending on the loop variant (for-in, for-each-in, or
4675 * destructuring for-in).
4677 JS_ASSERT(pn
->isOp(JSOP_ITER
));
4678 if (Emit2(cx
, bce
, JSOP_ITER
, (uint8_t) pn
->pn_iflags
) < 0)
4681 /* Enter the block before the loop body, after evaluating the obj. */
4682 StmtInfoBCE
letStmt(cx
);
4684 if (!EnterBlockScope(cx
, bce
, &letStmt
, pn1
->pn_objbox
, 0))
4688 LoopStmtInfo
stmtInfo(cx
);
4689 PushLoopStatement(bce
, &stmtInfo
, STMT_FOR_IN_LOOP
, top
);
4691 /* Annotate so IonMonkey can find the loop-closing jump. */
4692 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR_IN
);
4697 * Jump down to the loop condition to minimize overhead assuming at
4698 * least one iteration, as the other loop forms do.
4700 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4704 top
= bce
->offset();
4705 SET_STATEMENT_TOP(&stmtInfo
, top
);
4706 if (EmitLoopHead(cx
, bce
, nullptr) < 0)
4710 int loopDepth
= bce
->stackDepth
;
4714 * Emit code to get the next enumeration value and assign it to the
4717 if (Emit1(cx
, bce
, JSOP_ITERNEXT
) < 0)
4719 if (!EmitAssignment(cx
, bce
, forHead
->pn_kid2
, JSOP_NOP
, nullptr))
4722 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4725 /* The stack should be balanced around the assignment opcode sequence. */
4726 JS_ASSERT(bce
->stackDepth
== loopDepth
);
4728 /* Emit code for the loop body. */
4729 if (!EmitTree(cx
, bce
, forBody
))
4732 /* Set loop and enclosing "update" offsets, for continue. */
4733 StmtInfoBCE
* stmt
= &stmtInfo
;
4735 stmt
->update
= bce
->offset();
4736 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
4739 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4741 SetJumpOffsetAt(bce
, jmp
);
4742 if (!EmitLoopEntry(cx
, bce
, nullptr))
4744 if (Emit1(cx
, bce
, JSOP_MOREITER
) < 0)
4746 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFNE
, top
- bce
->offset());
4750 /* Set the srcnote offset so we can find the closing jump. */
4751 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, beq
- jmp
))
4754 // Fix up breaks and continues.
4755 if (!PopStatementBCE(cx
, bce
))
4758 if (!bce
->tryNoteList
.append(JSTRY_ITER
, bce
->stackDepth
, top
, bce
->offset()))
4760 if (Emit1(cx
, bce
, JSOP_ENDITER
) < 0)
4764 if (!LeaveNestedScope(cx
, bce
, &letStmt
))
4772 EmitNormalFor(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
4774 LoopStmtInfo
stmtInfo(cx
);
4775 PushLoopStatement(bce
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4777 ParseNode
* forHead
= pn
->pn_left
;
4778 ParseNode
* forBody
= pn
->pn_right
;
4780 /* C-style for (init; cond; update) ... loop. */
4782 ParseNode
* pn3
= forHead
->pn_kid1
;
4784 // No initializer, but emit a nop so that there's somewhere to put the
4785 // SRC_FOR annotation that IonBuilder will look for.
4788 bce
->emittingForInit
= true;
4789 if (!UpdateSourceCoordNotes(cx
, bce
, pn3
->pn_pos
.begin
))
4791 if (!EmitTree(cx
, bce
, pn3
))
4793 bce
->emittingForInit
= false;
4797 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4798 * Use tmp to hold the biased srcnote "top" offset, which differs
4799 * from the top local variable by the length of the JSOP_GOTO
4800 * emitted in between tmp and top if this loop has a condition.
4802 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR
);
4803 if (noteIndex
< 0 || Emit1(cx
, bce
, op
) < 0)
4805 ptrdiff_t tmp
= bce
->offset();
4808 if (forHead
->pn_kid2
) {
4809 /* Goto the loop condition, which branches back to iterate. */
4810 jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4814 if (op
!= JSOP_NOP
&& Emit1(cx
, bce
, JSOP_NOP
) < 0)
4818 top
= bce
->offset();
4819 SET_STATEMENT_TOP(&stmtInfo
, top
);
4821 /* Emit code for the loop body. */
4822 if (EmitLoopHead(cx
, bce
, forBody
) < 0)
4824 if (jmp
== -1 && !EmitLoopEntry(cx
, bce
, forBody
))
4826 if (!EmitTree(cx
, bce
, forBody
))
4829 /* Set the second note offset so we can find the update part. */
4830 JS_ASSERT(noteIndex
!= -1);
4831 ptrdiff_t tmp2
= bce
->offset();
4833 /* Set loop and enclosing "update" offsets, for continue. */
4834 StmtInfoBCE
* stmt
= &stmtInfo
;
4836 stmt
->update
= bce
->offset();
4837 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
4839 /* Check for update code to do before the condition (if any). */
4840 pn3
= forHead
->pn_kid3
;
4842 if (!UpdateSourceCoordNotes(cx
, bce
, pn3
->pn_pos
.begin
))
4845 if (!EmitTree(cx
, bce
, pn3
))
4848 /* Always emit the POP or NOP to help IonBuilder. */
4849 if (Emit1(cx
, bce
, op
) < 0)
4852 /* Restore the absolute line number for source note readers. */
4853 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.end
);
4854 if (bce
->currentLine() != lineNum
) {
4855 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, ptrdiff_t(lineNum
)) < 0)
4857 bce
->current
->currentLine
= lineNum
;
4858 bce
->current
->lastColumn
= 0;
4862 ptrdiff_t tmp3
= bce
->offset();
4864 if (forHead
->pn_kid2
) {
4865 /* Fix up the goto from top to target the loop condition. */
4866 JS_ASSERT(jmp
>= 0);
4867 SetJumpOffsetAt(bce
, jmp
);
4868 if (!EmitLoopEntry(cx
, bce
, forHead
->pn_kid2
))
4871 if (!EmitTree(cx
, bce
, forHead
->pn_kid2
))
4875 /* Set the first note offset so we can find the loop condition. */
4876 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, tmp3
- tmp
))
4878 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 1, tmp2
- tmp
))
4880 /* The third note offset helps us find the loop-closing jump. */
4881 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 2, bce
->offset() - tmp
))
4884 /* If no loop condition, just emit a loop-closing jump. */
4885 op
= forHead
->pn_kid2
? JSOP_IFNE
: JSOP_GOTO
;
4886 if (EmitJump(cx
, bce
, op
, top
- bce
->offset()) < 0)
4889 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
4892 /* Now fixup all breaks and continues. */
4893 return PopStatementBCE(cx
, bce
);
4897 EmitFor(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
4899 if (pn
->pn_left
->isKind(PNK_FORIN
))
4900 return EmitForIn(cx
, bce
, pn
, top
);
4902 if (pn
->pn_left
->isKind(PNK_FOROF
))
4903 return EmitForOf(cx
, bce
, STMT_FOR_OF_LOOP
, pn
, top
);
4905 JS_ASSERT(pn
->pn_left
->isKind(PNK_FORHEAD
));
4906 return EmitNormalFor(cx
, bce
, pn
, top
);
4909 static MOZ_NEVER_INLINE
bool
4910 EmitFunc(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4912 FunctionBox
* funbox
= pn
->pn_funbox
;
4913 RootedFunction
fun(cx
, funbox
->function());
4914 JS_ASSERT_IF(fun
->isInterpretedLazy(), fun
->lazyScript());
4917 * Set the EMITTEDFUNCTION flag in function definitions once they have been
4918 * emitted. Function definitions that need hoisting to the top of the
4919 * function will be seen by EmitFunc in two places.
4921 if (pn
->pn_dflags
& PND_EMITTEDFUNCTION
) {
4922 JS_ASSERT_IF(fun
->hasScript(), fun
->nonLazyScript());
4923 JS_ASSERT(pn
->functionIsHoisted());
4924 JS_ASSERT(bce
->sc
->isFunctionBox());
4928 pn
->pn_dflags
|= PND_EMITTEDFUNCTION
;
4931 * Mark as singletons any function which will only be executed once, or
4932 * which is inner to a lambda we only expect to run once. In the latter
4933 * case, if the lambda runs multiple times then CloneFunctionObject will
4934 * make a deep clone of its contents.
4936 if (fun
->isInterpreted()) {
4938 bce
->script
->compileAndGo() &&
4939 fun
->isInterpreted() &&
4940 (bce
->checkSingletonContext() ||
4941 (!bce
->isInLoop() && bce
->isRunOnceLambda()));
4942 if (!JSFunction::setTypeForScriptedFunction(cx
, fun
, singleton
))
4945 if (fun
->isInterpretedLazy()) {
4946 if (!fun
->lazyScript()->sourceObject()) {
4947 JSObject
* scope
= bce
->staticScope
;
4948 if (!scope
&& bce
->sc
->isFunctionBox())
4949 scope
= bce
->sc
->asFunctionBox()->function();
4950 JSObject
* source
= bce
->script
->sourceObject();
4951 fun
->lazyScript()->setParent(scope
, &source
->as
<ScriptSourceObject
>());
4953 if (bce
->emittingRunOnceLambda
)
4954 fun
->lazyScript()->setTreatAsRunOnce();
4956 SharedContext
* outersc
= bce
->sc
;
4958 if (outersc
->isFunctionBox() && outersc
->asFunctionBox()->mightAliasLocals())
4959 funbox
->setMightAliasLocals(); // inherit mightAliasLocals from parent
4960 JS_ASSERT_IF(outersc
->strict
, funbox
->strict
);
4962 // Inherit most things (principals, version, etc) from the parent.
4963 Rooted
<JSScript
*> parent(cx
, bce
->script
);
4964 CompileOptions
options(cx
, bce
->parser
->options());
4965 options
.setOriginPrincipals(parent
->originPrincipals())
4966 .setCompileAndGo(parent
->compileAndGo())
4967 .setSelfHostingMode(parent
->selfHosted())
4968 .setNoScriptRval(false)
4970 .setVersion(parent
->getVersion());
4972 Rooted
<JSObject
*> enclosingScope(cx
, EnclosingStaticScope(bce
));
4973 Rooted
<JSObject
*> sourceObject(cx
, bce
->script
->sourceObject());
4974 Rooted
<JSScript
*> script(cx
, JSScript::Create(cx
, enclosingScope
, false, options
,
4975 parent
->staticLevel() + 1,
4977 funbox
->bufStart
, funbox
->bufEnd
));
4981 script
->bindings
= funbox
->bindings
;
4983 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.begin
);
4984 BytecodeEmitter
bce2(bce
, bce
->parser
, funbox
, script
, bce
->insideEval
,
4985 bce
->evalCaller
, bce
->hasGlobalScope
, lineNum
,
4990 /* We measured the max scope depth when we parsed the function. */
4991 if (!EmitFunctionScript(cx
, &bce2
, pn
->pn_body
))
4994 if (funbox
->usesArguments
&& funbox
->usesApply
)
4995 script
->setUsesArgumentsAndApply();
4998 JS_ASSERT(IsAsmJSModuleNative(fun
->native()));
5001 /* Make the function object a literal in the outer script's pool. */
5002 unsigned index
= bce
->objectList
.add(pn
->pn_funbox
);
5004 /* Non-hoisted functions simply emit their respective op. */
5005 if (!pn
->functionIsHoisted()) {
5006 /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */
5007 MOZ_ASSERT(fun
->isArrow() == (pn
->getOp() == JSOP_LAMBDA_ARROW
));
5008 if (fun
->isArrow() && Emit1(cx
, bce
, JSOP_THIS
) < 0)
5010 return EmitIndex32(cx
, pn
->getOp(), index
, bce
);
5014 * For a script we emit the code as we parse. Thus the bytecode for
5015 * top-level functions should go in the prolog to predefine their
5016 * names in the variable object before the already-generated main code
5017 * is executed. This extra work for top-level scripts is not necessary
5018 * when we emit the code for a function. It is fully parsed prior to
5019 * invocation of the emitter and calls to EmitTree for function
5020 * definitions can be scheduled before generating the rest of code.
5022 if (!bce
->sc
->isFunctionBox()) {
5023 JS_ASSERT(pn
->pn_cookie
.isFree());
5024 JS_ASSERT(pn
->getOp() == JSOP_NOP
);
5025 JS_ASSERT(!bce
->topStmt
);
5026 bce
->switchToProlog();
5027 if (!EmitIndex32(cx
, JSOP_DEFFUN
, index
, bce
))
5029 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5031 bce
->switchToMain();
5034 BindingIter
bi(bce
->script
);
5035 while (bi
->name() != fun
->atom())
5037 JS_ASSERT(bi
->kind() == Binding::VARIABLE
|| bi
->kind() == Binding::CONSTANT
||
5038 bi
->kind() == Binding::ARGUMENT
);
5039 JS_ASSERT(bi
.frameIndex() < JS_BIT(20));
5041 pn
->pn_index
= index
;
5042 if (!EmitIndexOp(cx
, JSOP_LAMBDA
, index
, bce
))
5044 JS_ASSERT(pn
->getOp() == JSOP_GETLOCAL
|| pn
->getOp() == JSOP_GETARG
);
5045 JSOp setOp
= pn
->getOp() == JSOP_GETLOCAL
? JSOP_SETLOCAL
: JSOP_SETARG
;
5046 if (!EmitVarOp(cx
, pn
, setOp
, bce
))
5048 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5056 EmitDo(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5058 /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */
5059 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5060 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_NOP
) < 0)
5063 ptrdiff_t noteIndex2
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5067 /* Compile the loop body. */
5068 ptrdiff_t top
= EmitLoopHead(cx
, bce
, pn
->pn_left
);
5072 LoopStmtInfo
stmtInfo(cx
);
5073 PushLoopStatement(bce
, &stmtInfo
, STMT_DO_LOOP
, top
);
5075 if (!EmitLoopEntry(cx
, bce
, nullptr))
5078 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5081 /* Set loop and enclosing label update offsets, for continue. */
5082 ptrdiff_t off
= bce
->offset();
5083 StmtInfoBCE
* stmt
= &stmtInfo
;
5086 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
5088 /* Compile the loop condition, now that continues know where to go. */
5089 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5092 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFNE
, top
- bce
->offset());
5096 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
5100 * Update the annotations with the update and back edge positions, for
5103 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5106 if (!SetSrcNoteOffset(cx
, bce
, noteIndex2
, 0, beq
- top
))
5108 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, 1 + (off
- top
)))
5111 return PopStatementBCE(cx
, bce
);
5115 EmitWhile(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5118 * Minimize bytecodes issued for one or more iterations by jumping to
5119 * the condition below the body and closing the loop if the condition
5120 * is true with a backward branch. For iteration count i:
5122 * i test at the top test at the bottom
5123 * = =============== ==================
5124 * 0 ifeq-pass goto; ifne-fail
5125 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
5126 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
5128 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
5130 LoopStmtInfo
stmtInfo(cx
);
5131 PushLoopStatement(bce
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
5133 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5137 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
5141 top
= EmitLoopHead(cx
, bce
, pn
->pn_right
);
5145 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5148 SetJumpOffsetAt(bce
, jmp
);
5149 if (!EmitLoopEntry(cx
, bce
, pn
->pn_left
))
5151 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5154 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFNE
, top
- bce
->offset());
5158 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
5161 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, beq
- jmp
))
5164 return PopStatementBCE(cx
, bce
);
5168 EmitBreak(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, PropertyName
* label
)
5170 StmtInfoBCE
* stmt
= bce
->topStmt
;
5171 SrcNoteType noteType
;
5173 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= label
)
5175 noteType
= SRC_BREAK2LABEL
;
5177 while (!stmt
->isLoop() && stmt
->type
!= STMT_SWITCH
)
5179 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_SWITCHBREAK
: SRC_BREAK
;
5182 return EmitGoto(cx
, bce
, stmt
, &stmt
->breaks
, noteType
) >= 0;
5186 EmitContinue(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, PropertyName
* label
)
5188 StmtInfoBCE
* stmt
= bce
->topStmt
;
5190 /* Find the loop statement enclosed by the matching label. */
5191 StmtInfoBCE
* loop
= nullptr;
5192 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= label
) {
5199 while (!stmt
->isLoop())
5203 return EmitGoto(cx
, bce
, stmt
, &stmt
->continues
, SRC_CONTINUE
) >= 0;
5207 EmitReturn(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5209 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5212 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isStarGenerator()) {
5213 if (!EmitPrepareIteratorResult(cx
, bce
))
5217 /* Push a return value */
5218 if (ParseNode
* pn2
= pn
->pn_kid
) {
5219 if (!EmitTree(cx
, bce
, pn2
))
5222 /* No explicit return value provided */
5223 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
5227 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isStarGenerator()) {
5228 if (!EmitFinishIteratorResult(cx
, bce
, true))
5233 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5234 * blocks having finally clauses and to exit intermingled let blocks.
5235 * We can't simply transfer control flow to our caller in that case,
5236 * because we must gosub to those finally clauses from inner to outer,
5237 * with the correct stack pointer (i.e., after popping any with,
5238 * for/in, etc., slots nested inside the finally's try).
5240 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5241 * extra JSOP_RETRVAL after the fixups.
5243 ptrdiff_t top
= bce
->offset();
5245 if (Emit1(cx
, bce
, JSOP_RETURN
) < 0)
5248 NonLocalExitScope
nle(cx
, bce
);
5250 if (!nle
.prepareForNonLocalJump(nullptr))
5253 if (top
+ static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH
) != bce
->offset()) {
5254 bce
->code()[top
] = JSOP_SETRVAL
;
5255 if (Emit1(cx
, bce
, JSOP_RETRVAL
) < 0)
5263 EmitYieldStar(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* iter
)
5265 JS_ASSERT(bce
->sc
->isFunctionBox());
5266 JS_ASSERT(bce
->sc
->asFunctionBox()->isStarGenerator());
5268 if (!EmitTree(cx
, bce
, iter
)) // ITERABLE
5271 // Convert iterable to iterator.
5272 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITERABLE ITERABLE
5274 if (!EmitAtomOp(cx
, cx
->names().std_iterator
, JSOP_CALLPROP
, bce
)) // ITERABLE @@ITERATOR
5276 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // @@ITERATOR ITERABLE
5278 if (EmitCall(cx
, bce
, JSOP_CALL
, 0, iter
) < 0) // ITER
5280 CheckTypeSet(cx
, bce
, JSOP_CALL
);
5282 int depth
= bce
->stackDepth
;
5283 JS_ASSERT(depth
>= 1);
5285 // Initial send value is undefined.
5286 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ITER RECEIVED
5288 ptrdiff_t initialSend
= -1;
5289 if (EmitBackPatchOp(cx
, bce
, &initialSend
) < 0) // goto initialSend
5292 // Try prologue. // ITER RESULT
5293 StmtInfoBCE
stmtInfo(cx
);
5294 PushStatementBCE(bce
, &stmtInfo
, STMT_TRY
, bce
->offset());
5295 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_TRY
);
5296 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_TRY
) < 0)
5298 ptrdiff_t tryStart
= bce
->offset(); // tryStart:
5299 JS_ASSERT(bce
->stackDepth
== depth
+ 1);
5301 // Yield RESULT as-is, without re-boxing.
5302 if (Emit1(cx
, bce
, JSOP_YIELD
) < 0) // ITER RECEIVED
5306 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, bce
->offset() - tryStart
+ JSOP_TRY_LENGTH
))
5308 ptrdiff_t subsequentSend
= -1;
5309 if (EmitBackPatchOp(cx
, bce
, &subsequentSend
) < 0) // goto subsequentSend
5311 ptrdiff_t tryEnd
= bce
->offset(); // tryEnd:
5314 // THROW? = 'throw' in ITER // ITER
5315 bce
->stackDepth
= (uint32_t) depth
;
5316 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0) // ITER EXCEPTION
5318 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER
5320 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER
5322 if (!EmitAtomOp(cx
, cx
->names().throw_
, JSOP_STRING
, bce
)) // EXCEPTION ITER ITER "throw"
5324 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER "throw" ITER
5326 if (Emit1(cx
, bce
, JSOP_IN
) < 0) // EXCEPTION ITER THROW?
5328 // if (THROW?) goto delegate
5329 ptrdiff_t checkThrow
= EmitJump(cx
, bce
, JSOP_IFNE
, 0); // EXCEPTION ITER
5332 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // EXCEPTION
5334 if (Emit1(cx
, bce
, JSOP_THROW
) < 0) // throw EXCEPTION
5337 SetJumpOffsetAt(bce
, checkThrow
); // delegate:
5338 // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER
5339 bce
->stackDepth
= (uint32_t) depth
+ 1;
5340 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER
5342 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER ITER
5344 if (!EmitAtomOp(cx
, cx
->names().throw_
, JSOP_CALLPROP
, bce
)) // EXCEPTION ITER ITER THROW
5346 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER THROW ITER
5348 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ITER THROW ITER EXCEPTION
5350 if (EmitCall(cx
, bce
, JSOP_CALL
, 1, iter
) < 0) // ITER RESULT
5352 CheckTypeSet(cx
, bce
, JSOP_CALL
);
5353 JS_ASSERT(bce
->stackDepth
== depth
+ 1);
5354 ptrdiff_t checkResult
= -1;
5355 if (EmitBackPatchOp(cx
, bce
, &checkResult
) < 0) // goto checkResult
5359 if (!PopStatementBCE(cx
, bce
))
5361 // This is a peace offering to ReconstructPCStack. See the note in EmitTry.
5362 if (Emit1(cx
, bce
, JSOP_NOP
) < 0)
5364 if (!bce
->tryNoteList
.append(JSTRY_CATCH
, depth
, tryStart
, tryEnd
))
5367 // After the try/catch block: send the received value to the iterator.
5368 if (!BackPatch(cx
, bce
, initialSend
, bce
->code().end(), JSOP_GOTO
)) // initialSend:
5370 if (!BackPatch(cx
, bce
, subsequentSend
, bce
->code().end(), JSOP_GOTO
)) // subsequentSend:
5374 // result = iter.next(received) // ITER RECEIVED
5375 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RECEIVED ITER
5377 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // RECEIVED ITER ITER
5379 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // RECEIVED ITER ITER ITER
5381 if (!EmitAtomOp(cx
, cx
->names().next
, JSOP_CALLPROP
, bce
)) // RECEIVED ITER ITER NEXT
5383 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RECEIVED ITER NEXT ITER
5385 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ITER NEXT ITER RECEIVED
5387 if (EmitCall(cx
, bce
, JSOP_CALL
, 1, iter
) < 0) // ITER RESULT
5389 CheckTypeSet(cx
, bce
, JSOP_CALL
);
5390 JS_ASSERT(bce
->stackDepth
== depth
+ 1);
5392 if (!BackPatch(cx
, bce
, checkResult
, bce
->code().end(), JSOP_GOTO
)) // checkResult:
5394 // if (!result.done) goto tryStart; // ITER RESULT
5395 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER RESULT RESULT
5397 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ITER RESULT DONE
5399 // if (!DONE) goto tryStart;
5400 if (EmitJump(cx
, bce
, JSOP_IFEQ
, tryStart
- bce
->offset()) < 0) // ITER RESULT
5404 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RESULT ITER
5406 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
5408 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // VALUE
5411 JS_ASSERT(bce
->stackDepth
== depth
);
5417 EmitStatementList(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5419 JS_ASSERT(pn
->isArity(PN_LIST
));
5421 StmtInfoBCE
stmtInfo(cx
);
5422 PushStatementBCE(bce
, &stmtInfo
, STMT_BLOCK
, top
);
5424 ParseNode
* pnchild
= pn
->pn_head
;
5426 if (pn
->pn_xflags
& PNX_DESTRUCT
)
5427 pnchild
= pnchild
->pn_next
;
5429 for (ParseNode
* pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5430 if (!EmitTree(cx
, bce
, pn2
))
5434 return PopStatementBCE(cx
, bce
);
5438 EmitStatement(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5440 JS_ASSERT(pn
->isKind(PNK_SEMI
));
5442 ParseNode
* pn2
= pn
->pn_kid
;
5446 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5450 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5451 * debugger, and eval frames may need the value of the ultimate
5452 * expression statement as the script's result, despite the fact
5453 * that it appears useless to the compiler.
5455 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5456 * calling JS_Compile* to suppress JSOP_SETRVAL.
5458 bool wantval
= false;
5459 bool useful
= false;
5460 if (bce
->sc
->isFunctionBox()) {
5461 JS_ASSERT(!bce
->script
->noScriptRval());
5463 useful
= wantval
= !bce
->script
->noScriptRval();
5466 /* Don't eliminate expressions with side effects. */
5468 if (!CheckSideEffects(cx
, bce
, pn2
, &useful
))
5472 * Don't eliminate apparently useless expressions if they are
5473 * labeled expression statements. The pc->topStmt->update test
5474 * catches the case where we are nesting in EmitTree for a labeled
5475 * compound statement.
5478 bce
->topStmt
->type
== STMT_LABEL
&&
5479 bce
->topStmt
->update
>= bce
->offset())
5486 JSOp op
= wantval
? JSOP_SETRVAL
: JSOP_POP
;
5487 JS_ASSERT_IF(pn2
->isKind(PNK_ASSIGN
), pn2
->isOp(JSOP_NOP
));
5488 if (!EmitTree(cx
, bce
, pn2
))
5490 if (Emit1(cx
, bce
, op
) < 0)
5492 } else if (pn
->isDirectivePrologueMember()) {
5493 // Don't complain about directive prologue members; just don't emit
5496 if (JSAtom
* atom
= pn
->isStringExprStatement()) {
5497 // Warn if encountering a non-directive prologue member string
5498 // expression statement, that is inconsistent with the current
5499 // directive prologue. That is, a script *not* starting with
5500 // "use strict" should warn for any "use strict" statements seen
5501 // later in the script, because such statements are misleading.
5502 const char* directive
= nullptr;
5503 if (atom
== cx
->names().useStrict
) {
5504 if (!bce
->sc
->strict
)
5505 directive
= js_useStrict_str
;
5506 } else if (atom
== cx
->names().useAsm
) {
5507 if (bce
->sc
->isFunctionBox()) {
5508 JSFunction
* fun
= bce
->sc
->asFunctionBox()->function();
5509 if (fun
->isNative() && IsAsmJSModuleNative(fun
->native()))
5510 directive
= js_useAsm_str
;
5515 if (!bce
->reportStrictWarning(pn2
, JSMSG_CONTRARY_NONDIRECTIVE
, directive
))
5519 bce
->current
->currentLine
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn2
->pn_pos
.begin
);
5520 bce
->current
->lastColumn
= 0;
5521 if (!bce
->reportStrictWarning(pn2
, JSMSG_USELESS_EXPR
))
5530 EmitDelete(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5533 * Under ECMA 3, deleting a non-reference returns true -- but alas we
5534 * must evaluate the operand if it appears it might have side effects.
5536 ParseNode
* pn2
= pn
->pn_kid
;
5537 switch (pn2
->getKind()) {
5540 if (!BindNameToSlot(cx
, bce
, pn2
))
5542 JSOp op
= pn2
->getOp();
5543 if (op
== JSOP_FALSE
) {
5544 if (Emit1(cx
, bce
, op
) < 0)
5547 if (!EmitAtomOp(cx
, pn2
, op
, bce
))
5553 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, bce
))
5557 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, bce
))
5563 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
5564 * to foo(), true (a comma expression).
5566 bool useful
= false;
5567 if (!CheckSideEffects(cx
, bce
, pn2
, &useful
))
5571 JS_ASSERT_IF(pn2
->isKind(PNK_CALL
), !(pn2
->pn_xflags
& PNX_SETCALL
));
5572 if (!EmitTree(cx
, bce
, pn2
))
5574 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5578 if (Emit1(cx
, bce
, JSOP_TRUE
) < 0)
5587 EmitArray(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, uint32_t count
);
5590 EmitCallOrNew(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5592 bool callop
= pn
->isKind(PNK_CALL
) || pn
->isKind(PNK_TAGGED_TEMPLATE
);
5594 * Emit callable invocation or operator new (constructor call) code.
5595 * First, emit code for the left operand to evaluate the callable or
5596 * constructable object expression.
5598 * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc.
5599 * This is necessary to interpose the lambda-initialized method read
5600 * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by
5601 * JSOP_{SET,INIT}PROP.
5603 * Then (or in a call case that has no explicit reference-base
5604 * object) we emit JSOP_UNDEFINED to produce the undefined |this|
5605 * value required for calls (which non-strict mode functions
5606 * will box into the global object).
5608 uint32_t argc
= pn
->pn_count
- 1;
5610 if (argc
>= ARGC_LIMIT
) {
5611 bce
->parser
->tokenStream
.reportError(callop
5612 ? JSMSG_TOO_MANY_FUN_ARGS
5613 : JSMSG_TOO_MANY_CON_ARGS
);
5617 bool emitArgs
= true;
5618 ParseNode
* pn2
= pn
->pn_head
;
5619 bool spread
= JOF_OPTYPE(pn
->getOp()) == JOF_BYTE
;
5620 switch (pn2
->getKind()) {
5622 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
&&
5623 pn2
->name() == cx
->names().callFunction
&&
5627 * Special-casing of callFunction to emit bytecode that directly
5628 * invokes the callee with the correct |this| object and arguments.
5629 * callFunction(fun, thisArg, arg0, arg1) thus becomes:
5630 * - emit lookup for fun
5631 * - emit lookup for thisArg
5632 * - emit lookups for arg0, arg1
5634 * argc is set to the amount of actually emitted args and the
5635 * emitting of args below is disabled by setting emitArgs to false.
5637 if (pn
->pn_count
< 3) {
5638 bce
->reportError(pn
, JSMSG_MORE_ARGS_NEEDED
, "callFunction", "1", "s");
5641 ParseNode
* funNode
= pn2
->pn_next
;
5642 if (!EmitTree(cx
, bce
, funNode
))
5644 ParseNode
* thisArg
= funNode
->pn_next
;
5645 if (!EmitTree(cx
, bce
, thisArg
))
5647 bool oldEmittingForInit
= bce
->emittingForInit
;
5648 bce
->emittingForInit
= false;
5649 for (ParseNode
* argpn
= thisArg
->pn_next
; argpn
; argpn
= argpn
->pn_next
) {
5650 if (!EmitTree(cx
, bce
, argpn
))
5653 bce
->emittingForInit
= oldEmittingForInit
;
5658 if (!EmitNameOp(cx
, bce
, pn2
, callop
))
5662 if (!EmitPropOp(cx
, pn2
, callop
? JSOP_CALLPROP
: JSOP_GETPROP
, bce
))
5666 if (!EmitElemOp(cx
, pn2
, callop
? JSOP_CALLELEM
: JSOP_GETELEM
, bce
))
5671 * Top level lambdas which are immediately invoked should be
5672 * treated as only running once. Every time they execute we will
5673 * create new types and scripts for their contents, to increase
5674 * the quality of type information within them and enable more
5675 * backend optimizations. Note that this does not depend on the
5676 * lambda being invoked at most once (it may be named or be
5677 * accessed via foo.caller indirection), as multiple executions
5678 * will just cause the inner scripts to be repeatedly cloned.
5680 JS_ASSERT(!bce
->emittingRunOnceLambda
);
5681 if (bce
->checkSingletonContext() || (!bce
->isInLoop() && bce
->isRunOnceLambda())) {
5682 bce
->emittingRunOnceLambda
= true;
5683 if (!EmitTree(cx
, bce
, pn2
))
5685 bce
->emittingRunOnceLambda
= false;
5687 if (!EmitTree(cx
, bce
, pn2
))
5693 if (!EmitTree(cx
, bce
, pn2
))
5695 callop
= false; /* trigger JSOP_UNDEFINED after */
5699 JSOp thisop
= pn
->isKind(PNK_GENEXP
) ? JSOP_THIS
: JSOP_UNDEFINED
;
5700 if (Emit1(cx
, bce
, thisop
) < 0)
5706 * Emit code for each argument in order, then emit the JSOP_*CALL or
5707 * JSOP_NEW bytecode with a two-byte immediate telling how many args
5708 * were pushed on the operand stack.
5710 bool oldEmittingForInit
= bce
->emittingForInit
;
5711 bce
->emittingForInit
= false;
5713 for (ParseNode
* pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
5714 if (!EmitTree(cx
, bce
, pn3
))
5718 if (!EmitArray(cx
, bce
, pn2
->pn_next
, argc
))
5721 bce
->emittingForInit
= oldEmittingForInit
;
5725 if (EmitCall(cx
, bce
, pn
->getOp(), argc
, pn
) < 0)
5728 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
5731 CheckTypeSet(cx
, bce
, pn
->getOp());
5732 if (pn
->isOp(JSOP_EVAL
) || pn
->isOp(JSOP_SPREADEVAL
)) {
5733 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.begin
);
5734 EMIT_UINT16_IMM_OP(JSOP_LINENO
, lineNum
);
5736 if (pn
->pn_xflags
& PNX_SETCALL
) {
5737 if (Emit1(cx
, bce
, JSOP_SETCALL
) < 0)
5744 EmitLogical(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5747 * JSOP_OR converts the operand on the stack to boolean, leaves the original
5748 * value on the stack and jumps if true; otherwise it falls into the next
5749 * bytecode, which pops the left operand and then evaluates the right operand.
5750 * The jump goes around the right operand evaluation.
5752 * JSOP_AND converts the operand on the stack to boolean and jumps if false;
5753 * otherwise it falls into the right operand's bytecode.
5756 if (pn
->isArity(PN_BINARY
)) {
5757 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5759 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
5762 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5764 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5766 ptrdiff_t off
= bce
->offset();
5767 jsbytecode
* pc
= bce
->code(top
);
5768 SET_JUMP_OFFSET(pc
, off
- top
);
5773 JS_ASSERT(pn
->isArity(PN_LIST
));
5774 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5776 /* Left-associative operator chain: avoid too much recursion. */
5777 ParseNode
* pn2
= pn
->pn_head
;
5778 if (!EmitTree(cx
, bce
, pn2
))
5780 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
5783 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5786 /* Emit nodes between the head and the tail. */
5787 ptrdiff_t jmp
= top
;
5788 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5789 if (!EmitTree(cx
, bce
, pn2
))
5791 ptrdiff_t off
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
5794 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5796 SET_JUMP_OFFSET(bce
->code(jmp
), off
- jmp
);
5799 if (!EmitTree(cx
, bce
, pn2
))
5803 ptrdiff_t off
= bce
->offset();
5805 jsbytecode
* pc
= bce
->code(top
);
5806 ptrdiff_t tmp
= GET_JUMP_OFFSET(pc
);
5807 SET_JUMP_OFFSET(pc
, off
- top
);
5810 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5816 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5817 * the comment on EmitSwitch.
5819 MOZ_NEVER_INLINE
static bool
5820 EmitIncOrDec(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5822 /* Emit lvalue-specialized code for ++/-- operators. */
5823 ParseNode
* pn2
= pn
->pn_kid
;
5824 switch (pn2
->getKind()) {
5826 if (!EmitPropIncDec(cx
, pn
, bce
))
5830 if (!EmitElemIncDec(cx
, pn
, bce
))
5834 JS_ASSERT(pn2
->pn_xflags
& PNX_SETCALL
);
5835 if (!EmitTree(cx
, bce
, pn2
))
5839 JS_ASSERT(pn2
->isKind(PNK_NAME
));
5840 pn2
->setOp(JSOP_SETNAME
);
5841 if (!BindNameToSlot(cx
, bce
, pn2
))
5843 JSOp op
= pn2
->getOp();
5848 case JSOP_SETALIASEDVAR
:
5856 if (op
== JSOP_CALLEE
) {
5857 if (Emit1(cx
, bce
, op
) < 0)
5859 } else if (!pn2
->pn_cookie
.isFree()) {
5861 if (!EmitVarIncDec(cx
, pn
, bce
))
5864 if (!EmitVarOp(cx
, pn2
, op
, bce
))
5868 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
5870 if (!EmitNameIncDec(cx
, pn
, bce
))
5873 if (!EmitAtomOp(cx
, pn2
, op
, bce
))
5878 if (pn2
->isConst()) {
5879 if (Emit1(cx
, bce
, JSOP_POS
) < 0)
5882 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
5884 if (Emit1(cx
, bce
, JSOP_ONE
) < 0)
5886 if (Emit1(cx
, bce
, binop
) < 0)
5895 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5896 * the comment on EmitSwitch.
5898 MOZ_NEVER_INLINE
static bool
5899 EmitLabeledStatement(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, const LabeledStatement
* pn
)
5902 * Emit a JSOP_LABEL instruction. The argument is the offset to the statement
5903 * following the labeled statement.
5906 if (!bce
->makeAtomIndex(pn
->label(), &index
))
5909 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_LABEL
, 0);
5913 /* Emit code for the labeled statement. */
5914 StmtInfoBCE
stmtInfo(cx
);
5915 PushStatementBCE(bce
, &stmtInfo
, STMT_LABEL
, bce
->offset());
5916 stmtInfo
.label
= pn
->label();
5917 if (!EmitTree(cx
, bce
, pn
->statement()))
5919 if (!PopStatementBCE(cx
, bce
))
5922 /* Patch the JSOP_LABEL offset. */
5923 SetJumpOffsetAt(bce
, top
);
5928 EmitSyntheticStatements(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5930 JS_ASSERT(pn
->isArity(PN_LIST
));
5931 StmtInfoBCE
stmtInfo(cx
);
5932 PushStatementBCE(bce
, &stmtInfo
, STMT_SEQ
, top
);
5933 ParseNode
* pn2
= pn
->pn_head
;
5934 if (pn
->pn_xflags
& PNX_DESTRUCT
)
5936 for (; pn2
; pn2
= pn2
->pn_next
) {
5937 if (!EmitTree(cx
, bce
, pn2
))
5940 return PopStatementBCE(cx
, bce
);
5944 EmitConditionalExpression(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ConditionalExpression
& conditional
)
5946 /* Emit the condition, then branch if false to the else part. */
5947 if (!EmitTree(cx
, bce
, &conditional
.condition()))
5949 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_COND
);
5952 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
5953 if (beq
< 0 || !EmitTree(cx
, bce
, &conditional
.thenExpression()))
5956 /* Jump around else, fixup the branch, emit else, fixup jump. */
5957 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
5960 SetJumpOffsetAt(bce
, beq
);
5963 * Because each branch pushes a single value, but our stack budgeting
5964 * analysis ignores branches, we now have to adjust bce->stackDepth to
5965 * ignore the value pushed by the first branch. Execution will follow
5966 * only one path, so we must decrement bce->stackDepth.
5968 * Failing to do this will foil code, such as let expression and block
5969 * code generation, which must use the stack depth to compute local
5970 * stack indexes correctly.
5972 JS_ASSERT(bce
->stackDepth
> 0);
5974 if (!EmitTree(cx
, bce
, &conditional
.elseExpression()))
5976 SetJumpOffsetAt(bce
, jmp
);
5977 return SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
);
5981 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
5982 * the comment on EmitSwitch.
5984 MOZ_NEVER_INLINE
static bool
5985 EmitObject(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5987 if (!(pn
->pn_xflags
& PNX_NONCONST
) && pn
->pn_head
&& bce
->checkSingletonContext())
5988 return EmitSingletonInitialiser(cx
, bce
, pn
);
5991 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
5992 * a new object and defining (in source order) each property on the object
5993 * (or mutating the object's [[Prototype]], in the case of __proto__).
5995 ptrdiff_t offset
= bce
->offset();
5996 if (!EmitNewInit(cx
, bce
, JSProto_Object
))
6000 * Try to construct the shape of the object as we go, so we can emit a
6001 * JSOP_NEWOBJECT with the final shape instead.
6003 RootedObject
obj(cx
);
6004 if (bce
->script
->compileAndGo()) {
6005 gc::AllocKind kind
= GuessObjectGCKind(pn
->pn_count
);
6006 obj
= NewBuiltinClassInstance(cx
, &JSObject::class_
, kind
, TenuredObject
);
6011 for (ParseNode
* pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6012 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
6015 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6016 ParseNode
* pn3
= pn2
->pn_left
;
6017 bool isIndex
= false;
6018 if (pn3
->isKind(PNK_NUMBER
)) {
6019 if (!EmitNumberOp(cx
, pn3
->pn_dval
, bce
))
6022 } else if (pn3
->isKind(PNK_NAME
) || pn3
->isKind(PNK_STRING
)) {
6023 // The parser already checked for atoms representing indexes and
6024 // used PNK_NUMBER instead, but also watch for ids which TI treats
6025 // as indexes for simpliciation of downstream analysis.
6026 jsid id
= NameToId(pn3
->pn_atom
->asPropertyName());
6027 if (id
!= types::IdToTypeId(id
)) {
6028 if (!EmitTree(cx
, bce
, pn3
))
6033 JS_ASSERT(pn3
->isKind(PNK_COMPUTED_NAME
));
6034 if (!EmitTree(cx
, bce
, pn3
->pn_kid
))
6039 /* Emit code for the property initializer. */
6040 if (!EmitTree(cx
, bce
, pn2
->pn_right
))
6043 JSOp op
= pn2
->getOp();
6044 JS_ASSERT(op
== JSOP_INITPROP
||
6045 op
== JSOP_INITPROP_GETTER
||
6046 op
== JSOP_INITPROP_SETTER
);
6048 if (op
== JSOP_INITPROP_GETTER
|| op
== JSOP_INITPROP_SETTER
)
6054 case JSOP_INITPROP
: op
= JSOP_INITELEM
; break;
6055 case JSOP_INITPROP_GETTER
: op
= JSOP_INITELEM_GETTER
; break;
6056 case JSOP_INITPROP_SETTER
: op
= JSOP_INITELEM_SETTER
; break;
6057 default: MOZ_CRASH("Invalid op");
6059 if (Emit1(cx
, bce
, op
) < 0)
6062 JS_ASSERT(pn3
->isKind(PNK_NAME
) || pn3
->isKind(PNK_STRING
));
6064 // If we have { __proto__: expr }, implement prototype mutation.
6065 if (op
== JSOP_INITPROP
&& pn3
->pn_atom
== cx
->names().proto
) {
6067 if (Emit1(cx
, bce
, JSOP_MUTATEPROTO
) < 0)
6073 if (!bce
->makeAtomIndex(pn3
->pn_atom
, &index
))
6076 MOZ_ASSERT(op
== JSOP_INITPROP
||
6077 op
== JSOP_INITPROP_GETTER
||
6078 op
== JSOP_INITPROP_SETTER
);
6081 JS_ASSERT(!obj
->inDictionaryMode());
6082 Rooted
<jsid
> id(cx
, AtomToId(pn3
->pn_atom
));
6083 RootedValue
undefinedValue(cx
, UndefinedValue());
6084 if (!DefineNativeProperty(cx
, obj
, id
, undefinedValue
, nullptr,
6085 nullptr, JSPROP_ENUMERATE
))
6089 if (obj
->inDictionaryMode())
6093 if (!EmitIndex32(cx
, op
, index
, bce
))
6098 if (Emit1(cx
, bce
, JSOP_ENDINIT
) < 0)
6103 * The object survived and has a predictable shape: update the original
6106 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
6110 static_assert(JSOP_NEWINIT_LENGTH
== JSOP_NEWOBJECT_LENGTH
,
6111 "newinit and newobject must have equal length to edit in-place");
6113 uint32_t index
= bce
->objectList
.add(objbox
);
6114 jsbytecode
* code
= bce
->code(offset
);
6115 code
[0] = JSOP_NEWOBJECT
;
6116 code
[1] = jsbytecode(index
>> 24);
6117 code
[2] = jsbytecode(index
>> 16);
6118 code
[3] = jsbytecode(index
>> 8);
6119 code
[4] = jsbytecode(index
);
6126 EmitArrayComp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6128 if (!EmitNewInit(cx
, bce
, JSProto_Array
))
6132 * Pass the new array's stack index to the PNK_ARRAYPUSH case via
6133 * bce->arrayCompDepth, then simply traverse the PNK_FOR node and
6134 * its kids under pn2 to generate this comprehension.
6136 JS_ASSERT(bce
->stackDepth
> 0);
6137 uint32_t saveDepth
= bce
->arrayCompDepth
;
6138 bce
->arrayCompDepth
= (uint32_t) (bce
->stackDepth
- 1);
6139 if (!EmitTree(cx
, bce
, pn
->pn_head
))
6141 bce
->arrayCompDepth
= saveDepth
;
6143 /* Emit the usual op needed for decompilation. */
6144 return Emit1(cx
, bce
, JSOP_ENDINIT
) >= 0;
6148 * EmitSpread expects the current index (I) of the array, the array itself and the iterator to be
6149 * on the stack in that order (iterator on the bottom).
6150 * It will pop the iterator and I, then iterate over the iterator by calling |.next()|
6151 * and put the results into the I-th element of array with incrementing I, then
6152 * push the result I (it will be original I + iteration count).
6153 * The stack after iteration will look like |ARRAY INDEX|.
6156 EmitSpread(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
6158 return EmitForOf(cx
, bce
, STMT_SPREAD
, nullptr, -1);
6162 EmitArray(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, uint32_t count
)
6165 * Emit code for [a, b, c] that is equivalent to constructing a new
6166 * array and in source order evaluating each element value and adding
6167 * it to the array, without invoking latent setters. We use the
6168 * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and
6169 * to avoid dup'ing and popping the array as each element is added, as
6170 * JSOP_SETELEM/JSOP_SETPROP would do.
6173 int32_t nspread
= 0;
6174 for (ParseNode
* elt
= pn
; elt
; elt
= elt
->pn_next
) {
6175 if (elt
->isKind(PNK_SPREAD
))
6179 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_NEWARRAY
, 3); // ARRAY
6182 CheckTypeSet(cx
, bce
, JSOP_NEWARRAY
);
6183 jsbytecode
* pc
= bce
->code(off
);
6185 // For arrays with spread, this is a very pessimistic allocation, the
6186 // minimum possible final size.
6187 SET_UINT24(pc
, count
- nspread
);
6189 ParseNode
* pn2
= pn
;
6191 bool afterSpread
= false;
6192 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6193 if (!afterSpread
&& pn2
->isKind(PNK_SPREAD
)) {
6195 if (!EmitNumberOp(cx
, atomIndex
, bce
)) // ARRAY INDEX
6198 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
6200 if (pn2
->isKind(PNK_ELISION
)) {
6201 if (Emit1(cx
, bce
, JSOP_HOLE
) < 0)
6204 ParseNode
* expr
= pn2
->isKind(PNK_SPREAD
) ? pn2
->pn_kid
: pn2
;
6205 if (!EmitTree(cx
, bce
, expr
)) // ARRAY INDEX? VALUE
6208 if (pn2
->isKind(PNK_SPREAD
)) {
6209 if (!EmitIterator(cx
, bce
)) // ARRAY INDEX ITER
6211 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // INDEX ITER ARRAY
6213 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // ITER ARRAY INDEX
6215 if (!EmitSpread(cx
, bce
)) // ARRAY INDEX
6217 } else if (afterSpread
) {
6218 if (Emit1(cx
, bce
, JSOP_INITELEM_INC
) < 0)
6221 off
= EmitN(cx
, bce
, JSOP_INITELEM_ARRAY
, 3);
6224 SET_UINT24(bce
->code(off
), atomIndex
);
6227 JS_ASSERT(atomIndex
== count
);
6229 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ARRAY
6233 /* Emit an op to finish the array and aid in decompilation. */
6234 return Emit1(cx
, bce
, JSOP_ENDINIT
) >= 0;
6238 EmitUnary(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6240 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
6242 /* Unary op, including unary +/-. */
6243 JSOp op
= pn
->getOp();
6244 ParseNode
* pn2
= pn
->pn_kid
;
6246 if (op
== JSOP_TYPEOF
&& !pn2
->isKind(PNK_NAME
))
6247 op
= JSOP_TYPEOFEXPR
;
6249 bool oldEmittingForInit
= bce
->emittingForInit
;
6250 bce
->emittingForInit
= false;
6251 if (!EmitTree(cx
, bce
, pn2
))
6254 bce
->emittingForInit
= oldEmittingForInit
;
6255 return Emit1(cx
, bce
, op
) >= 0;
6259 EmitDefaults(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6261 JS_ASSERT(pn
->isKind(PNK_ARGSBODY
));
6263 ParseNode
* arg
, *pnlast
= pn
->last();
6264 for (arg
= pn
->pn_head
; arg
!= pnlast
; arg
= arg
->pn_next
) {
6265 if (!(arg
->pn_dflags
& PND_DEFAULT
))
6267 if (!BindNameToSlot(cx
, bce
, arg
))
6269 if (!EmitVarOp(cx
, arg
, JSOP_GETARG
, bce
))
6271 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6273 if (Emit1(cx
, bce
, JSOP_STRICTEQ
) < 0)
6275 // Emit source note to enable ion compilation.
6276 if (NewSrcNote(cx
, bce
, SRC_IF
) < 0)
6278 ptrdiff_t jump
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
6281 if (!EmitTree(cx
, bce
, arg
->expr()))
6283 if (!EmitVarOp(cx
, arg
, JSOP_SETARG
, bce
))
6285 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6287 SET_JUMP_OFFSET(bce
->code(jump
), bce
->offset() - jump
);
6294 frontend::EmitTree(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6296 JS_CHECK_RECURSION(cx
, return false);
6298 EmitLevelManager
elm(bce
);
6301 ptrdiff_t top
= bce
->offset();
6302 pn
->pn_offset
= top
;
6304 /* Emit notes to tell the current bytecode's source line number. */
6305 if (!UpdateLineNumberNotes(cx
, bce
, pn
->pn_pos
.begin
))
6308 switch (pn
->getKind()) {
6310 ok
= EmitFunc(cx
, bce
, pn
);
6315 RootedFunction
fun(cx
, bce
->sc
->asFunctionBox()->function());
6316 ParseNode
* pnlast
= pn
->last();
6318 // Carefully emit everything in the right order:
6322 ParseNode
* pnchild
= pnlast
->pn_head
;
6323 if (pnlast
->pn_xflags
& PNX_DESTRUCT
) {
6324 // Assign the destructuring arguments before defining any functions,
6326 JS_ASSERT(pnchild
->isKind(PNK_SEMI
));
6327 JS_ASSERT(pnchild
->pn_kid
->isKind(PNK_VAR
) || pnchild
->pn_kid
->isKind(PNK_CONST
));
6328 if (!EmitTree(cx
, bce
, pnchild
))
6330 pnchild
= pnchild
->pn_next
;
6332 bool hasDefaults
= bce
->sc
->asFunctionBox()->hasDefaults();
6334 ParseNode
* rest
= nullptr;
6335 bool restIsDefn
= false;
6336 if (fun
->hasRest()) {
6337 JS_ASSERT(!bce
->sc
->asFunctionBox()->argumentsHasLocalBinding());
6339 // Defaults with a rest parameter need special handling. The
6340 // rest parameter needs to be undefined while defaults are being
6341 // processed. To do this, we create the rest argument and let it
6342 // sit on the stack while processing defaults. The rest
6343 // parameter's slot is set to undefined for the course of
6344 // default processing.
6346 while (rest
->pn_next
!= pnlast
)
6347 rest
= rest
->pn_next
;
6348 restIsDefn
= rest
->isDefn();
6349 if (Emit1(cx
, bce
, JSOP_REST
) < 0)
6351 CheckTypeSet(cx
, bce
, JSOP_REST
);
6353 // Only set the rest parameter if it's not aliased by a nested
6354 // function in the body.
6356 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6358 if (!BindNameToSlot(cx
, bce
, rest
))
6360 if (!EmitVarOp(cx
, rest
, JSOP_SETARG
, bce
))
6362 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6366 if (!EmitDefaults(cx
, bce
, pn
))
6368 if (fun
->hasRest()) {
6369 if (restIsDefn
&& !EmitVarOp(cx
, rest
, JSOP_SETARG
, bce
))
6371 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6375 for (ParseNode
* pn2
= pn
->pn_head
; pn2
!= pnlast
; pn2
= pn2
->pn_next
) {
6376 // Only bind the parameter if it's not aliased by a nested function
6380 if (!BindNameToSlot(cx
, bce
, pn2
))
6382 if (pn2
->pn_next
== pnlast
&& fun
->hasRest() && !hasDefaults
) {
6383 // Fill rest parameter. We handled the case with defaults above.
6384 JS_ASSERT(!bce
->sc
->asFunctionBox()->argumentsHasLocalBinding());
6385 bce
->switchToProlog();
6386 if (Emit1(cx
, bce
, JSOP_REST
) < 0)
6388 CheckTypeSet(cx
, bce
, JSOP_REST
);
6389 if (!EmitVarOp(cx
, pn2
, JSOP_SETARG
, bce
))
6391 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6393 bce
->switchToMain();
6396 if (pnlast
->pn_xflags
& PNX_FUNCDEFS
) {
6397 // This block contains top-level function definitions. To ensure
6398 // that we emit the bytecode defining them before the rest of code
6399 // in the block we use a separate pass over functions. During the
6400 // main pass later the emitter will add JSOP_NOP with source notes
6401 // for the function to preserve the original functions position
6402 // when decompiling.
6404 // Currently this is used only for functions, as compile-as-we go
6405 // mode for scripts does not allow separate emitter passes.
6406 for (ParseNode
* pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
6407 if (pn2
->isKind(PNK_FUNCTION
) && pn2
->functionIsHoisted()) {
6408 if (!EmitTree(cx
, bce
, pn2
))
6413 ok
= EmitTree(cx
, bce
, pnlast
);
6418 ok
= EmitIf(cx
, bce
, pn
);
6422 ok
= EmitSwitch(cx
, bce
, pn
);
6426 ok
= EmitWhile(cx
, bce
, pn
, top
);
6430 ok
= EmitDo(cx
, bce
, pn
);
6434 ok
= EmitFor(cx
, bce
, pn
, top
);
6438 ok
= EmitBreak(cx
, bce
, pn
->as
<BreakStatement
>().label());
6442 ok
= EmitContinue(cx
, bce
, pn
->as
<ContinueStatement
>().label());
6446 ok
= EmitWith(cx
, bce
, pn
);
6450 if (!EmitTry(cx
, bce
, pn
))
6455 if (!EmitCatch(cx
, bce
, pn
))
6461 if (!EmitVariables(cx
, bce
, pn
, InitializeVars
))
6466 ok
= EmitReturn(cx
, bce
, pn
);
6469 case PNK_YIELD_STAR
:
6470 ok
= EmitYieldStar(cx
, bce
, pn
->pn_kid
);
6474 JS_ASSERT(bce
->sc
->isFunctionBox());
6475 if (bce
->sc
->asFunctionBox()->isStarGenerator()) {
6476 if (!EmitPrepareIteratorResult(cx
, bce
))
6480 if (!EmitTree(cx
, bce
, pn
->pn_kid
))
6483 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6486 if (bce
->sc
->asFunctionBox()->isStarGenerator()) {
6487 if (!EmitFinishIteratorResult(cx
, bce
, false))
6490 if (Emit1(cx
, bce
, JSOP_YIELD
) < 0)
6494 case PNK_STATEMENTLIST
:
6495 ok
= EmitStatementList(cx
, bce
, pn
, top
);
6499 ok
= EmitSyntheticStatements(cx
, bce
, pn
, top
);
6503 ok
= EmitStatement(cx
, bce
, pn
);
6507 ok
= EmitLabeledStatement(cx
, bce
, &pn
->as
<LabeledStatement
>());
6512 for (ParseNode
* pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
6513 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
6515 if (!EmitTree(cx
, bce
, pn2
))
6519 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6528 case PNK_BITORASSIGN
:
6529 case PNK_BITXORASSIGN
:
6530 case PNK_BITANDASSIGN
:
6533 case PNK_URSHASSIGN
:
6537 if (!EmitAssignment(cx
, bce
, pn
->pn_left
, pn
->getOp(), pn
->pn_right
))
6541 case PNK_CONDITIONAL
:
6542 ok
= EmitConditionalExpression(cx
, bce
, pn
->as
<ConditionalExpression
>());
6547 ok
= EmitLogical(cx
, bce
, pn
);
6564 case PNK_INSTANCEOF
:
6571 if (pn
->isArity(PN_LIST
)) {
6572 /* Left-associative operator chain: avoid too much recursion. */
6573 ParseNode
* pn2
= pn
->pn_head
;
6574 if (!EmitTree(cx
, bce
, pn2
))
6576 JSOp op
= pn
->getOp();
6577 while ((pn2
= pn2
->pn_next
) != nullptr) {
6578 if (!EmitTree(cx
, bce
, pn2
))
6580 if (Emit1(cx
, bce
, op
) < 0)
6584 /* Binary operators that evaluate both operands unconditionally. */
6585 if (!EmitTree(cx
, bce
, pn
->pn_left
))
6587 if (!EmitTree(cx
, bce
, pn
->pn_right
))
6589 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
6601 ok
= EmitUnary(cx
, bce
, pn
);
6604 case PNK_PREINCREMENT
:
6605 case PNK_PREDECREMENT
:
6606 case PNK_POSTINCREMENT
:
6607 case PNK_POSTDECREMENT
:
6608 ok
= EmitIncOrDec(cx
, bce
, pn
);
6612 ok
= EmitDelete(cx
, bce
, pn
);
6616 ok
= EmitPropOp(cx
, pn
, JSOP_GETPROP
, bce
);
6620 ok
= EmitElemOp(cx
, pn
, JSOP_GETELEM
, bce
);
6624 case PNK_TAGGED_TEMPLATE
:
6627 ok
= EmitCallOrNew(cx
, bce
, pn
);
6630 case PNK_LEXICALSCOPE
:
6631 ok
= EmitLexicalScope(cx
, bce
, pn
);
6635 ok
= pn
->isArity(PN_BINARY
)
6636 ? EmitLet(cx
, bce
, pn
)
6637 : EmitVariables(cx
, bce
, pn
, InitializeVars
);
6642 // TODO: Implement emitter support for modules
6643 bce
->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED
);
6646 case PNK_ARRAYPUSH
: {
6648 * The array object's stack index is in bce->arrayCompDepth. See below
6649 * under the array initialiser code generator for array comprehension
6650 * special casing. Note that the array object is a pure stack value,
6651 * unaliased by blocks, so we can EmitUnaliasedVarOp.
6653 if (!EmitTree(cx
, bce
, pn
->pn_kid
))
6655 if (!EmitDupAt(cx
, bce
, bce
->arrayCompDepth
))
6657 if (Emit1(cx
, bce
, JSOP_ARRAYPUSH
) < 0)
6662 case PNK_CALLSITEOBJ
:
6663 ok
= EmitCallSiteObject(cx
, bce
, pn
);
6667 if (!(pn
->pn_xflags
& PNX_NONCONST
) && pn
->pn_head
) {
6668 if (bce
->checkSingletonContext()) {
6669 // Bake in the object entirely if it will only be created once.
6670 ok
= EmitSingletonInitialiser(cx
, bce
, pn
);
6674 // If the array consists entirely of primitive values, make a
6675 // template object with copy on write elements that can be reused
6676 // every time the initializer executes.
6677 RootedValue
value(cx
);
6678 if (bce
->emitterMode
!= BytecodeEmitter::SelfHosting
&&
6679 pn
->pn_count
!= 0 &&
6680 pn
->getConstantValue(cx
, ParseNode::DontAllowNestedObjects
, &value
))
6682 // Note: the type of the template object might not yet reflect
6683 // that the object has copy on write elements. When the
6684 // interpreter or JIT compiler fetches the template, it should
6685 // use types::GetOrFixupCopyOnWriteObject to make sure the type
6686 // for the template is accurate. We don't do this here as we
6687 // want to use types::InitObject, which requires a finished
6689 JSObject
* obj
= &value
.toObject();
6690 if (!ObjectElements::MakeElementsCopyOnWrite(cx
, obj
))
6693 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
6697 ok
= EmitObjectOp(cx
, objbox
, JSOP_NEWARRAY_COPYONWRITE
, bce
);
6702 ok
= EmitArray(cx
, bce
, pn
->pn_head
, pn
->pn_count
);
6706 ok
= EmitArrayComp(cx
, bce
, pn
);
6710 ok
= EmitObject(cx
, bce
, pn
);
6714 if (!EmitNameOp(cx
, bce
, pn
, false))
6718 case PNK_TEMPLATE_STRING_LIST
:
6719 ok
= EmitTemplateString(cx
, bce
, pn
);
6722 case PNK_TEMPLATE_STRING
:
6724 ok
= EmitAtomOp(cx
, pn
, JSOP_STRING
, bce
);
6728 ok
= EmitNumberOp(cx
, pn
->pn_dval
, bce
);
6732 ok
= EmitRegExp(cx
, bce
->regexpList
.add(pn
->as
<RegExpLiteral
>().objbox()), bce
);
6739 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
6744 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
6746 if (Emit1(cx
, bce
, JSOP_DEBUGGER
) < 0)
6751 JS_ASSERT(pn
->getArity() == PN_NULLARY
);
6758 /* bce->emitLevel == 1 means we're last on the stack, so finish up. */
6759 if (ok
&& bce
->emitLevel
== 1) {
6760 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.end
))
6768 AllocSrcNote(ExclusiveContext
* cx
, SrcNotesVector
& notes
)
6770 // Start it off moderately large to avoid repeated resizings early on.
6771 if (notes
.capacity() == 0 && !notes
.reserve(1024))
6774 jssrcnote dummy
= 0;
6775 if (!notes
.append(dummy
)) {
6776 js_ReportOutOfMemory(cx
);
6779 return notes
.length() - 1;
6783 frontend::NewSrcNote(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
)
6785 SrcNotesVector
& notes
= bce
->notes();
6788 index
= AllocSrcNote(cx
, notes
);
6793 * Compute delta from the last annotated bytecode's offset. If it's too
6794 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6796 ptrdiff_t offset
= bce
->offset();
6797 ptrdiff_t delta
= offset
- bce
->lastNoteOffset();
6798 bce
->current
->lastNoteOffset
= offset
;
6799 if (delta
>= SN_DELTA_LIMIT
) {
6801 ptrdiff_t xdelta
= Min(delta
, SN_XDELTA_MASK
);
6802 SN_MAKE_XDELTA(¬es
[index
], xdelta
);
6804 index
= AllocSrcNote(cx
, notes
);
6807 } while (delta
>= SN_DELTA_LIMIT
);
6811 * Initialize type and delta, then allocate the minimum number of notes
6812 * needed for type's arity. Usually, we won't need more, but if an offset
6813 * does take two bytes, SetSrcNoteOffset will grow notes.
6815 SN_MAKE_NOTE(¬es
[index
], type
, delta
);
6816 for (int n
= (int)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
6817 if (NewSrcNote(cx
, bce
, SRC_NULL
) < 0)
6824 frontend::NewSrcNote2(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
, ptrdiff_t offset
)
6828 index
= NewSrcNote(cx
, bce
, type
);
6830 if (!SetSrcNoteOffset(cx
, bce
, index
, 0, offset
))
6837 frontend::NewSrcNote3(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
, ptrdiff_t offset1
,
6842 index
= NewSrcNote(cx
, bce
, type
);
6844 if (!SetSrcNoteOffset(cx
, bce
, index
, 0, offset1
))
6846 if (!SetSrcNoteOffset(cx
, bce
, index
, 1, offset2
))
6853 frontend::AddToSrcNoteDelta(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, jssrcnote
* sn
, ptrdiff_t delta
)
6856 * Called only from FinishTakingSrcNotes to add to main script note
6857 * deltas, and only by a small positive amount.
6859 JS_ASSERT(bce
->current
== &bce
->main
);
6860 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
6862 ptrdiff_t base
= SN_DELTA(sn
);
6863 ptrdiff_t limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
6864 ptrdiff_t newdelta
= base
+ delta
;
6865 if (newdelta
< limit
) {
6866 SN_SET_DELTA(sn
, newdelta
);
6869 SN_MAKE_XDELTA(&xdelta
, delta
);
6870 if (!(sn
= bce
->main
.notes
.insert(sn
, xdelta
)))
6877 SetSrcNoteOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned index
, unsigned which
,
6880 if (size_t(offset
) > SN_MAX_OFFSET
) {
6881 ReportStatementTooLarge(bce
->parser
->tokenStream
, bce
->topStmt
);
6885 SrcNotesVector
& notes
= bce
->notes();
6887 /* Find the offset numbered which (i.e., skip exactly which offsets). */
6888 jssrcnote
* sn
= notes
.begin() + index
;
6889 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
6890 JS_ASSERT((int) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
6891 for (sn
++; which
; sn
++, which
--) {
6892 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
6897 * See if the new offset requires three bytes either by being too big or if
6898 * the offset has already been inflated (in which case, we need to stay big
6899 * to not break the srcnote encoding if this isn't the last srcnote).
6901 if (offset
> (ptrdiff_t)SN_4BYTE_OFFSET_MASK
|| (*sn
& SN_4BYTE_OFFSET_FLAG
)) {
6902 /* Maybe this offset was already set to a three-byte value. */
6903 if (!(*sn
& SN_4BYTE_OFFSET_FLAG
)) {
6904 /* Insert two dummy bytes that will be overwritten shortly. */
6905 jssrcnote dummy
= 0;
6906 if (!(sn
= notes
.insert(sn
, dummy
)) ||
6907 !(sn
= notes
.insert(sn
, dummy
)) ||
6908 !(sn
= notes
.insert(sn
, dummy
)))
6910 js_ReportOutOfMemory(cx
);
6914 *sn
++ = (jssrcnote
)(SN_4BYTE_OFFSET_FLAG
| (offset
>> 24));
6915 *sn
++ = (jssrcnote
)(offset
>> 16);
6916 *sn
++ = (jssrcnote
)(offset
>> 8);
6918 *sn
= (jssrcnote
)offset
;
6923 * Finish taking source notes in cx's notePool.
6924 * If successful, the final source note count is stored in the out outparam.
6927 frontend::FinishTakingSrcNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t* out
)
6929 JS_ASSERT(bce
->current
== &bce
->main
);
6931 unsigned prologCount
= bce
->prolog
.notes
.length();
6932 if (prologCount
&& bce
->prolog
.currentLine
!= bce
->firstLine
) {
6933 bce
->switchToProlog();
6934 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, (ptrdiff_t)bce
->firstLine
) < 0)
6936 bce
->switchToMain();
6939 * Either no prolog srcnotes, or no line number change over prolog.
6940 * We don't need a SRC_SETLINE, but we may need to adjust the offset
6941 * of the first main note, by adding to its delta and possibly even
6942 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
6943 * that came at and after the last annotated bytecode.
6945 ptrdiff_t offset
= bce
->prologOffset() - bce
->prolog
.lastNoteOffset
;
6946 JS_ASSERT(offset
>= 0);
6947 if (offset
> 0 && bce
->main
.notes
.length() != 0) {
6948 /* NB: Use as much of the first main note's delta as we can. */
6949 jssrcnote
* sn
= bce
->main
.notes
.begin();
6950 ptrdiff_t delta
= SN_IS_XDELTA(sn
)
6951 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
6952 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
6956 if (!AddToSrcNoteDelta(cx
, bce
, sn
, delta
))
6961 delta
= Min(offset
, SN_XDELTA_MASK
);
6962 sn
= bce
->main
.notes
.begin();
6967 // The prolog count might have changed, so we can't reuse prologCount.
6968 // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended
6969 // when the notes are copied to their final destination by CopySrcNotes.
6970 *out
= bce
->prolog
.notes
.length() + bce
->main
.notes
.length() + 1;
6975 frontend::CopySrcNotes(BytecodeEmitter
* bce
, jssrcnote
* destination
, uint32_t nsrcnotes
)
6977 unsigned prologCount
= bce
->prolog
.notes
.length();
6978 unsigned mainCount
= bce
->main
.notes
.length();
6979 unsigned totalCount
= prologCount
+ mainCount
;
6980 MOZ_ASSERT(totalCount
== nsrcnotes
- 1);
6982 PodCopy(destination
, bce
->prolog
.notes
.begin(), prologCount
);
6983 PodCopy(destination
+ prologCount
, bce
->main
.notes
.begin(), mainCount
);
6984 SN_MAKE_TERMINATOR(&destination
[totalCount
]);
6988 CGConstList::finish(ConstArray
* array
)
6990 JS_ASSERT(length() == array
->length
);
6992 for (unsigned i
= 0; i
< length(); i
++)
6993 array
->vector
[i
] = list
[i
];
6997 * Find the index of the given object for code generator.
6999 * Since the emitter refers to each parsed object only once, for the index we
7000 * use the number of already indexes objects. We also add the object to a list
7001 * to convert the list to a fixed-size array when we complete code generation,
7002 * see js::CGObjectList::finish below.
7004 * Most of the objects go to BytecodeEmitter::objectList but for regexp we use
7005 * a separated BytecodeEmitter::regexpList. In this way the emitted index can
7006 * be directly used to store and fetch a reference to a cloned RegExp object
7007 * that shares the same JSRegExp private data created for the object literal in
7008 * objbox. We need a cloned object to hold lastIndex and other direct
7009 * properties that should not be shared among threads sharing a precompiled
7010 * function or script.
7012 * If the code being compiled is function code, allocate a reserved slot in
7013 * the cloned function object that shares its precompiled script with other
7014 * cloned function objects and with the compiler-created clone-parent. There
7015 * are nregexps = script->regexps()->length such reserved slots in each
7016 * function object cloned from fun->object. NB: during compilation, a funobj
7017 * slots element must never be allocated, because JSObject::allocSlot could
7018 * hand out one of the slots that should be given to a regexp clone.
7020 * If the code being compiled is global code, the cloned regexp are stored in
7021 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to
7024 * The slots initially contain undefined or null. We populate them lazily when
7025 * JSOP_REGEXP is executed for the first time.
7027 * Why clone regexp objects? ECMA specifies that when a regular expression
7028 * literal is scanned, a RegExp object is created. In the spec, compilation
7029 * and execution happen indivisibly, but in this implementation and many of
7030 * its embeddings, code is precompiled early and re-executed in multiple
7031 * threads, or using multiple global objects, or both, for efficiency.
7033 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7034 * objects, which makes for collisions on the lastIndex property (especially
7035 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7036 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7039 CGObjectList::add(ObjectBox
* objbox
)
7041 JS_ASSERT(!objbox
->emitLink
);
7042 objbox
->emitLink
= lastbox
;
7048 CGObjectList::indexOf(JSObject
* obj
)
7050 JS_ASSERT(length
> 0);
7051 unsigned index
= length
- 1;
7052 for (ObjectBox
* box
= lastbox
; box
->object
!= obj
; box
= box
->emitLink
)
7058 CGObjectList::finish(ObjectArray
* array
)
7060 JS_ASSERT(length
<= INDEX_LIMIT
);
7061 JS_ASSERT(length
== array
->length
);
7063 js::HeapPtrObject
* cursor
= array
->vector
+ array
->length
;
7064 ObjectBox
* objbox
= lastbox
;
7067 JS_ASSERT(!*cursor
);
7068 *cursor
= objbox
->object
;
7069 } while ((objbox
= objbox
->emitLink
) != nullptr);
7070 JS_ASSERT(cursor
== array
->vector
);
7074 CGObjectList::find(uint32_t index
)
7076 JS_ASSERT(index
< length
);
7077 ObjectBox
* box
= lastbox
;
7078 for (unsigned n
= length
- 1; n
> index
; n
--)
7079 box
= box
->emitLink
;
7084 CGTryNoteList::append(JSTryNoteKind kind
, uint32_t stackDepth
, size_t start
, size_t end
)
7086 JS_ASSERT(start
<= end
);
7087 JS_ASSERT(size_t(uint32_t(start
)) == start
);
7088 JS_ASSERT(size_t(uint32_t(end
)) == end
);
7092 note
.stackDepth
= stackDepth
;
7093 note
.start
= uint32_t(start
);
7094 note
.length
= uint32_t(end
- start
);
7096 return list
.append(note
);
7100 CGTryNoteList::finish(TryNoteArray
* array
)
7102 JS_ASSERT(length() == array
->length
);
7104 for (unsigned i
= 0; i
< length(); i
++)
7105 array
->vector
[i
] = list
[i
];
7109 CGBlockScopeList::append(uint32_t scopeObject
, uint32_t offset
, uint32_t parent
)
7111 BlockScopeNote note
;
7112 mozilla::PodZero(¬e
);
7114 note
.index
= scopeObject
;
7115 note
.start
= offset
;
7116 note
.parent
= parent
;
7118 return list
.append(note
);
7122 CGBlockScopeList::findEnclosingScope(uint32_t index
)
7124 JS_ASSERT(index
< length());
7125 JS_ASSERT(list
[index
].index
!= BlockScopeNote::NoBlockScopeIndex
);
7127 DebugOnly
<uint32_t> pos
= list
[index
].start
;
7129 JS_ASSERT(list
[index
].start
<= pos
);
7130 if (list
[index
].length
== 0) {
7131 // We are looking for the nearest enclosing live scope. If the
7132 // scope contains POS, it should still be open, so its length should
7134 return list
[index
].index
;
7136 // Conversely, if the length is not zero, it should not contain
7138 JS_ASSERT(list
[index
].start
+ list
[index
].length
<= pos
);
7142 return BlockScopeNote::NoBlockScopeIndex
;
7146 CGBlockScopeList::recordEnd(uint32_t index
, uint32_t offset
)
7148 JS_ASSERT(index
< length());
7149 JS_ASSERT(offset
>= list
[index
].start
);
7150 JS_ASSERT(list
[index
].length
== 0);
7152 list
[index
].length
= offset
- list
[index
].start
;
7156 CGBlockScopeList::finish(BlockScopeArray
* array
)
7158 JS_ASSERT(length() == array
->length
);
7160 for (unsigned i
= 0; i
< length(); i
++)
7161 array
->vector
[i
] = list
[i
];
7165 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
7166 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR.
7168 const JSSrcNoteSpec js_SrcNoteSpec
[] = {
7169 #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity },
7170 FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC
)
7171 #undef DEFINE_SRC_NOTE_SPEC
7175 SrcNoteArity(jssrcnote
* sn
)
7177 JS_ASSERT(SN_TYPE(sn
) < SRC_LAST
);
7178 return js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7181 JS_FRIEND_API(unsigned)
7182 js_SrcNoteLength(jssrcnote
* sn
)
7187 arity
= SrcNoteArity(sn
);
7188 for (base
= sn
++; arity
; sn
++, arity
--) {
7189 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
7195 JS_FRIEND_API(ptrdiff_t)
7196 js_GetSrcNoteOffset(jssrcnote
* sn
, unsigned which
)
7198 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7199 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7200 JS_ASSERT((int) which
< SrcNoteArity(sn
));
7201 for (sn
++; which
; sn
++, which
--) {
7202 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
7205 if (*sn
& SN_4BYTE_OFFSET_FLAG
) {
7206 return (ptrdiff_t)(((uint32_t)(sn
[0] & SN_4BYTE_OFFSET_MASK
) << 24)
7211 return (ptrdiff_t)*sn
;