1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
59 #include "jsversion.h"
69 #include "jsautooplen.h" // generated headers last
70 #include "jsstaticcheck.h"
72 #include "jsobjinlines.h"
73 #include "jsscopeinlines.h"
75 /* Allocation chunk counts, must be powers of two in general. */
76 #define BYTECODE_CHUNK 256 /* code allocation increment */
77 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
78 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
80 /* Macros to compute byte sizes from typed element counts. */
81 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
82 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
83 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
88 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
89 uintN stackDepth
, size_t start
, size_t end
);
91 JSCodeGenerator::JSCodeGenerator(Parser
*parser
,
92 JSArenaPool
*cpool
, JSArenaPool
*npool
,
94 : JSTreeContext(parser
),
95 codePool(cpool
), notePool(npool
),
96 codeMark(JS_ARENA_MARK(cpool
)), noteMark(JS_ARENA_MARK(npool
)),
97 stackDepth(0), maxStackDepth(0),
98 ntrynotes(0), lastTryNode(NULL
),
99 spanDeps(NULL
), jumpTargets(NULL
), jtFreeList(NULL
),
100 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
103 constMap(parser
->context
),
104 constList(parser
->context
)
106 flags
= TCF_COMPILING
;
107 memset(&prolog
, 0, sizeof prolog
);
108 memset(&main
, 0, sizeof main
);
110 firstLine
= prolog
.currentLine
= main
.currentLine
= lineno
;
111 prolog
.noteMask
= main
.noteMask
= SRCNOTE_CHUNK
- 1;
112 memset(&upvarMap
, 0, sizeof upvarMap
);
115 bool JSCodeGenerator::init()
117 return constMap
.init();
120 JSCodeGenerator::~JSCodeGenerator()
122 JS_ARENA_RELEASE(codePool
, codeMark
);
123 JS_ARENA_RELEASE(notePool
, noteMark
);
125 /* NB: non-null only after OOM. */
127 parser
->context
->free(spanDeps
);
130 parser
->context
->free(upvarMap
.vector
);
134 EmitCheck(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t delta
)
136 jsbytecode
*base
, *limit
, *next
;
137 ptrdiff_t offset
, length
;
142 limit
= CG_LIMIT(cg
);
143 offset
= next
- base
;
144 if (next
+ delta
> limit
) {
145 length
= offset
+ delta
;
146 length
= (length
<= BYTECODE_CHUNK
)
148 : JS_BIT(JS_CeilingLog2(length
));
149 incr
= BYTECODE_SIZE(length
);
151 JS_ARENA_ALLOCATE_CAST(base
, jsbytecode
*, cg
->codePool
, incr
);
153 size
= BYTECODE_SIZE(limit
- base
);
155 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
158 js_ReportOutOfScriptQuota(cx
);
162 CG_LIMIT(cg
) = base
+ length
;
163 CG_NEXT(cg
) = base
+ offset
;
169 UpdateDepth(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t target
)
173 const JSCodeSpec
*cs
;
174 uintN extra
, depth
, nuses
;
177 pc
= CG_CODE(cg
, target
);
179 cs
= &js_CodeSpec
[op
];
181 extern uint8 js_opcode2extra
[];
182 extra
= js_opcode2extra
[op
];
186 if ((cs
->format
& JOF_TMPSLOT_MASK
) || extra
) {
187 depth
= (uintN
) cg
->stackDepth
+
188 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
) +
190 if (depth
> cg
->maxStackDepth
)
191 cg
->maxStackDepth
= depth
;
194 nuses
= js_GetStackUses(cs
, op
, pc
);
195 cg
->stackDepth
-= nuses
;
196 JS_ASSERT(cg
->stackDepth
>= 0);
197 if (cg
->stackDepth
< 0) {
201 JS_snprintf(numBuf
, sizeof numBuf
, "%d", target
);
202 ts
= &cg
->parser
->tokenStream
;
203 JS_ReportErrorFlagsAndNumber(cx
, JSREPORT_WARNING
,
204 js_GetErrorMessage
, NULL
,
205 JSMSG_STACK_UNDERFLOW
,
206 ts
->getFilename() ? ts
->getFilename() : "stdin",
213 /* We just executed IndexParsedObject */
214 JS_ASSERT(op
== JSOP_ENTERBLOCK
);
215 JS_ASSERT(nuses
== 0);
216 blockObj
= cg
->objectList
.lastbox
->object
;
217 JS_ASSERT(blockObj
->getClass() == &js_BlockClass
);
218 JS_ASSERT(blockObj
->fslots
[JSSLOT_BLOCK_DEPTH
].isUndefined());
220 OBJ_SET_BLOCK_DEPTH(cx
, blockObj
, cg
->stackDepth
);
221 ndefs
= OBJ_BLOCK_COUNT(cx
, blockObj
);
223 cg
->stackDepth
+= ndefs
;
224 if ((uintN
)cg
->stackDepth
> cg
->maxStackDepth
)
225 cg
->maxStackDepth
= cg
->stackDepth
;
229 js_Emit1(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
)
231 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 1);
234 *CG_NEXT(cg
)++ = (jsbytecode
)op
;
235 UpdateDepth(cx
, cg
, offset
);
241 js_Emit2(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
)
243 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 2);
246 jsbytecode
*next
= CG_NEXT(cg
);
247 next
[0] = (jsbytecode
)op
;
249 CG_NEXT(cg
) = next
+ 2;
250 UpdateDepth(cx
, cg
, offset
);
256 js_Emit3(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
,
259 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 3);
262 jsbytecode
*next
= CG_NEXT(cg
);
263 next
[0] = (jsbytecode
)op
;
266 CG_NEXT(cg
) = next
+ 3;
267 UpdateDepth(cx
, cg
, offset
);
273 js_EmitN(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, size_t extra
)
275 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
276 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, length
);
279 jsbytecode
*next
= CG_NEXT(cg
);
280 *next
= (jsbytecode
)op
;
281 memset(next
+ 1, 0, BYTECODE_SIZE(extra
));
282 CG_NEXT(cg
) = next
+ length
;
285 * Don't UpdateDepth if op's use-count comes from the immediate
286 * operand yet to be stored in the extra bytes after op.
288 if (js_CodeSpec
[op
].nuses
>= 0)
289 UpdateDepth(cx
, cg
, offset
);
294 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
295 const char js_with_statement_str
[] = "with statement";
296 const char js_finally_block_str
[] = "finally block";
297 const char js_script_str
[] = "script";
299 static const char *statementName
[] = {
300 "label statement", /* LABEL */
301 "if statement", /* IF */
302 "else statement", /* ELSE */
303 "destructuring body", /* BODY */
304 "switch statement", /* SWITCH */
306 js_with_statement_str
, /* WITH */
307 "catch block", /* CATCH */
308 "try block", /* TRY */
309 js_finally_block_str
, /* FINALLY */
310 js_finally_block_str
, /* SUBROUTINE */
311 "do loop", /* DO_LOOP */
312 "for loop", /* FOR_LOOP */
313 "for/in loop", /* FOR_IN_LOOP */
314 "while loop", /* WHILE_LOOP */
317 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
320 StatementName(JSCodeGenerator
*cg
)
323 return js_script_str
;
324 return statementName
[cg
->topStmt
->type
];
328 ReportStatementTooLarge(JSContext
*cx
, JSCodeGenerator
*cg
)
330 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_NEED_DIET
,
335 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
336 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
337 into unconditional (gotos and gosubs), and conditional jumps or branches
338 (which pop a value, test it, and jump depending on its value). Most jumps
339 have just one immediate operand, a signed offset from the jump opcode's pc
340 to the target bytecode. The lookup and table switch opcodes may contain
343 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
344 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
345 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
346 the extended form of the JSOP_OR branch opcode). The unextended or short
347 formats have 16-bit signed immediate offset operands, the extended or long
348 formats have 32-bit signed immediates. The span-dependency problem consists
349 of selecting as few long instructions as possible, or about as few -- since
350 jumps can span other jumps, extending one jump may cause another to need to
353 Most JS scripts are short, so need no extended jumps. We optimize for this
354 case by generating short jumps until we know a long jump is needed. After
355 that point, we keep generating short jumps, but each jump's 16-bit immediate
356 offset operand is actually an unsigned index into cg->spanDeps, an array of
357 JSSpanDep structs. Each struct tells the top offset in the script of the
358 opcode, the "before" offset of the jump (which will be the same as top for
359 simplex jumps, but which will index further into the bytecode array for a
360 non-initial jump offset in a lookup or table switch), the after "offset"
361 adjusted during span-dependent instruction selection (initially the same
362 value as the "before" offset), and the jump target (more below).
364 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
365 ensure that all bytecode generated so far can be inspected to discover where
366 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
367 that we generate span-dependency records sorted by their offsets, so we can
368 binary-search when trying to find a JSSpanDep for a given bytecode offset,
369 or the nearest JSSpanDep at or above a given pc.
371 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
372 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
373 tells us that we need to binary-search for the cg->spanDeps entry by the
374 jump opcode's bytecode offset (sd->before).
376 Jump targets need to be maintained in a data structure that lets us look
377 up an already-known target by its address (jumps may have a common target),
378 and that also lets us update the addresses (script-relative, a.k.a. absolute
379 offsets) of targets that come after a jump target (for when a jump below
380 that target needs to be extended). We use an AVL tree, implemented using
381 recursion, but with some tricky optimizations to its height-balancing code
382 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
384 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
385 positive sign, even though they link "backward" (i.e., toward lower bytecode
386 address). We don't want to waste space and search time in the AVL tree for
387 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
388 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
389 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
390 target, or is still awaiting backpatching.
392 Note that backpatch chains would present a problem for BuildSpanDepTable,
393 which inspects bytecode to build cg->spanDeps on demand, when the first
394 short jump offset overflows. To solve this temporary problem, we emit a
395 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
396 nuses/ndefs counts help keep the stack balanced, but whose opcode format
397 distinguishes its backpatch delta immediate operand from a normal jump
401 BalanceJumpTargets(JSJumpTarget
**jtp
)
403 JSJumpTarget
*jt
, *jt2
, *root
;
404 int dir
, otherDir
, heightChanged
;
408 JS_ASSERT(jt
->balance
!= 0);
410 if (jt
->balance
< -1) {
412 doubleRotate
= (jt
->kids
[JT_LEFT
]->balance
> 0);
413 } else if (jt
->balance
> 1) {
415 doubleRotate
= (jt
->kids
[JT_RIGHT
]->balance
< 0);
420 otherDir
= JT_OTHER_DIR(dir
);
422 jt2
= jt
->kids
[otherDir
];
423 *jtp
= root
= jt2
->kids
[dir
];
425 jt
->kids
[otherDir
] = root
->kids
[dir
];
426 root
->kids
[dir
] = jt
;
428 jt2
->kids
[dir
] = root
->kids
[otherDir
];
429 root
->kids
[otherDir
] = jt2
;
432 root
->kids
[JT_LEFT
]->balance
= -JS_MAX(root
->balance
, 0);
433 root
->kids
[JT_RIGHT
]->balance
= -JS_MIN(root
->balance
, 0);
436 *jtp
= root
= jt
->kids
[otherDir
];
437 jt
->kids
[otherDir
] = root
->kids
[dir
];
438 root
->kids
[dir
] = jt
;
440 heightChanged
= (root
->balance
!= 0);
441 jt
->balance
= -((dir
== JT_LEFT
) ? --root
->balance
: ++root
->balance
);
444 return heightChanged
;
447 typedef struct AddJumpTargetArgs
{
455 AddJumpTarget(AddJumpTargetArgs
*args
, JSJumpTarget
**jtp
)
462 JSCodeGenerator
*cg
= args
->cg
;
466 cg
->jtFreeList
= jt
->kids
[JT_LEFT
];
468 JS_ARENA_ALLOCATE_CAST(jt
, JSJumpTarget
*, &args
->cx
->tempPool
,
471 js_ReportOutOfScriptQuota(args
->cx
);
475 jt
->offset
= args
->offset
;
477 jt
->kids
[JT_LEFT
] = jt
->kids
[JT_RIGHT
] = NULL
;
478 cg
->numJumpTargets
++;
484 if (jt
->offset
== args
->offset
) {
489 if (args
->offset
< jt
->offset
)
490 balanceDelta
= -AddJumpTarget(args
, &jt
->kids
[JT_LEFT
]);
492 balanceDelta
= AddJumpTarget(args
, &jt
->kids
[JT_RIGHT
]);
496 jt
->balance
+= balanceDelta
;
497 return (balanceDelta
&& jt
->balance
)
498 ? 1 - BalanceJumpTargets(jtp
)
503 static int AVLCheck(JSJumpTarget
*jt
)
508 JS_ASSERT(-1 <= jt
->balance
&& jt
->balance
<= 1);
509 lh
= AVLCheck(jt
->kids
[JT_LEFT
]);
510 rh
= AVLCheck(jt
->kids
[JT_RIGHT
]);
511 JS_ASSERT(jt
->balance
== rh
- lh
);
512 return 1 + JS_MAX(lh
, rh
);
517 SetSpanDepTarget(JSContext
*cx
, JSCodeGenerator
*cg
, JSSpanDep
*sd
,
520 AddJumpTargetArgs args
;
522 if (off
< JUMPX_OFFSET_MIN
|| JUMPX_OFFSET_MAX
< off
) {
523 ReportStatementTooLarge(cx
, cg
);
529 args
.offset
= sd
->top
+ off
;
531 AddJumpTarget(&args
, &cg
->jumpTargets
);
536 AVLCheck(cg
->jumpTargets
);
539 SD_SET_TARGET(sd
, args
.node
);
543 #define SPANDEPS_MIN 256
544 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
545 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
548 AddSpanDep(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
, jsbytecode
*pc2
,
552 JSSpanDep
*sdbase
, *sd
;
555 index
= cg
->numSpanDeps
;
556 if (index
+ 1 == 0) {
557 ReportStatementTooLarge(cx
, cg
);
561 if ((index
& (index
- 1)) == 0 &&
562 (!(sdbase
= cg
->spanDeps
) || index
>= SPANDEPS_MIN
)) {
563 size
= sdbase
? SPANDEPS_SIZE(index
) : SPANDEPS_SIZE_MIN
/ 2;
564 sdbase
= (JSSpanDep
*) cx
->realloc(sdbase
, size
+ size
);
567 cg
->spanDeps
= sdbase
;
570 cg
->numSpanDeps
= index
+ 1;
571 sd
= cg
->spanDeps
+ index
;
572 sd
->top
= pc
- CG_BASE(cg
);
573 sd
->offset
= sd
->before
= pc2
- CG_BASE(cg
);
575 if (js_CodeSpec
[*pc
].format
& JOF_BACKPATCH
) {
576 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
578 JS_ASSERT(off
>= 1 + JUMP_OFFSET_LEN
);
579 if (off
> BPDELTA_MAX
) {
580 ReportStatementTooLarge(cx
, cg
);
584 SD_SET_BPDELTA(sd
, off
);
585 } else if (off
== 0) {
586 /* Jump offset will be patched directly, without backpatch chaining. */
587 SD_SET_TARGET(sd
, 0);
589 /* The jump offset in off is non-zero, therefore it's already known. */
590 if (!SetSpanDepTarget(cx
, cg
, sd
, off
))
594 if (index
> SPANDEP_INDEX_MAX
)
595 index
= SPANDEP_INDEX_HUGE
;
596 SET_SPANDEP_INDEX(pc2
, index
);
601 AddSwitchSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
)
607 uintN njumps
, indexlen
;
610 JS_ASSERT(op
== JSOP_TABLESWITCH
|| op
== JSOP_LOOKUPSWITCH
);
612 off
= GET_JUMP_OFFSET(pc2
);
613 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
615 pc2
+= JUMP_OFFSET_LEN
;
616 if (op
== JSOP_TABLESWITCH
) {
617 low
= GET_JUMP_OFFSET(pc2
);
618 pc2
+= JUMP_OFFSET_LEN
;
619 high
= GET_JUMP_OFFSET(pc2
);
620 pc2
+= JUMP_OFFSET_LEN
;
621 njumps
= (uintN
) (high
- low
+ 1);
624 njumps
= GET_UINT16(pc2
);
626 indexlen
= INDEX_LEN
;
631 off
= GET_JUMP_OFFSET(pc2
);
632 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
634 pc2
+= JUMP_OFFSET_LEN
;
640 BuildSpanDepTable(JSContext
*cx
, JSCodeGenerator
*cg
)
642 jsbytecode
*pc
, *end
;
644 const JSCodeSpec
*cs
;
647 pc
= CG_BASE(cg
) + cg
->spanDepTodo
;
652 cs
= &js_CodeSpec
[op
];
654 switch (JOF_TYPE(cs
->format
)) {
655 case JOF_TABLESWITCH
:
656 case JOF_LOOKUPSWITCH
:
657 pc
= AddSwitchSpanDeps(cx
, cg
, pc
);
663 off
= GET_JUMP_OFFSET(pc
);
664 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
677 GetSpanDep(JSCodeGenerator
*cg
, jsbytecode
*pc
)
684 index
= GET_SPANDEP_INDEX(pc
);
685 if (index
!= SPANDEP_INDEX_HUGE
)
686 return cg
->spanDeps
+ index
;
688 offset
= pc
- CG_BASE(cg
);
690 hi
= cg
->numSpanDeps
- 1;
693 sd
= cg
->spanDeps
+ mid
;
694 if (sd
->before
== offset
)
696 if (sd
->before
< offset
)
707 SetBackPatchDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
712 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
713 if (!cg
->spanDeps
&& delta
< JUMP_OFFSET_MAX
) {
714 SET_JUMP_OFFSET(pc
, delta
);
718 if (delta
> BPDELTA_MAX
) {
719 ReportStatementTooLarge(cx
, cg
);
723 if (!cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
726 sd
= GetSpanDep(cg
, pc
);
727 JS_ASSERT(SD_GET_BPDELTA(sd
) == 0);
728 SD_SET_BPDELTA(sd
, delta
);
733 UpdateJumpTargets(JSJumpTarget
*jt
, ptrdiff_t pivot
, ptrdiff_t delta
)
735 if (jt
->offset
> pivot
) {
737 if (jt
->kids
[JT_LEFT
])
738 UpdateJumpTargets(jt
->kids
[JT_LEFT
], pivot
, delta
);
740 if (jt
->kids
[JT_RIGHT
])
741 UpdateJumpTargets(jt
->kids
[JT_RIGHT
], pivot
, delta
);
745 FindNearestSpanDep(JSCodeGenerator
*cg
, ptrdiff_t offset
, int lo
,
749 JSSpanDep
*sdbase
, *sd
;
751 num
= cg
->numSpanDeps
;
754 sdbase
= cg
->spanDeps
;
758 if (sd
->before
== offset
)
760 if (sd
->before
< offset
)
768 JS_ASSERT(sd
->before
>= offset
&& (lo
== 0 || sd
[-1].before
< offset
));
773 FreeJumpTargets(JSCodeGenerator
*cg
, JSJumpTarget
*jt
)
775 if (jt
->kids
[JT_LEFT
])
776 FreeJumpTargets(cg
, jt
->kids
[JT_LEFT
]);
777 if (jt
->kids
[JT_RIGHT
])
778 FreeJumpTargets(cg
, jt
->kids
[JT_RIGHT
]);
779 jt
->kids
[JT_LEFT
] = cg
->jtFreeList
;
784 OptimizeSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
)
786 jsbytecode
*pc
, *oldpc
, *base
, *limit
, *next
;
787 JSSpanDep
*sd
, *sd2
, *sdbase
, *sdlimit
, *sdtop
, guard
;
788 ptrdiff_t offset
, growth
, delta
, top
, pivot
, span
, length
, target
;
793 jssrcnote
*sn
, *snlimit
;
795 uintN i
, n
, noteIndex
;
802 sdbase
= cg
->spanDeps
;
803 sdlimit
= sdbase
+ cg
->numSpanDeps
;
804 offset
= CG_OFFSET(cg
);
819 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
820 JS_ASSERT(JT_HAS_TAG(sd
->target
));
823 if (sd
->top
!= top
) {
826 JS_ASSERT(top
== sd
->before
);
830 type
= JOF_OPTYPE(op
);
831 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
833 * We already extended all the jump offset operands for
834 * the opcode at sd->top. Jumps and branches have only
835 * one jump offset operand, but switches have many, all
836 * of which are adjacent in cg->spanDeps.
841 JS_ASSERT(type
== JOF_JUMP
||
842 type
== JOF_TABLESWITCH
||
843 type
== JOF_LOOKUPSWITCH
);
846 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
847 span
= SD_SPAN(sd
, pivot
);
848 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
849 ptrdiff_t deltaFromTop
= 0;
854 case JSOP_GOTO
: op
= JSOP_GOTOX
; break;
855 case JSOP_IFEQ
: op
= JSOP_IFEQX
; break;
856 case JSOP_IFNE
: op
= JSOP_IFNEX
; break;
857 case JSOP_OR
: op
= JSOP_ORX
; break;
858 case JSOP_AND
: op
= JSOP_ANDX
; break;
859 case JSOP_GOSUB
: op
= JSOP_GOSUBX
; break;
860 case JSOP_CASE
: op
= JSOP_CASEX
; break;
861 case JSOP_DEFAULT
: op
= JSOP_DEFAULTX
; break;
862 case JSOP_TABLESWITCH
: op
= JSOP_TABLESWITCHX
; break;
863 case JSOP_LOOKUPSWITCH
: op
= JSOP_LOOKUPSWITCHX
; break;
865 ReportStatementTooLarge(cx
, cg
);
868 *pc
= (jsbytecode
) op
;
870 for (sd2
= sdtop
; sd2
< sdlimit
&& sd2
->top
== top
; sd2
++) {
873 * sd2->offset already includes delta as it stood
874 * before we entered this loop, but it must also
875 * include the delta relative to top due to all the
876 * extended jump offset immediates for the opcode
877 * starting at top, which we extend in this loop.
879 * If there is only one extended jump offset, then
880 * sd2->offset won't change and this for loop will
883 sd2
->offset
+= deltaFromTop
;
884 deltaFromTop
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
887 * sd2 comes after sd, and won't be revisited by
888 * the outer for loop, so we have to increase its
889 * offset by delta, not merely by deltaFromTop.
891 sd2
->offset
+= delta
;
894 delta
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
895 UpdateJumpTargets(cg
->jumpTargets
, sd2
->offset
,
896 JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
908 TokenStream
*ts
= &cg
->parser
->tokenStream
;
910 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
911 ts
->filename
? ts
->filename
: "stdin", cg
->firstLine
,
912 growth
/ (JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
), cg
->numSpanDeps
,
913 passes
, offset
+ growth
, offset
, growth
);
917 * Ensure that we have room for the extended jumps, but don't round up
918 * to a power of two -- we're done generating code, so we cut to fit.
920 limit
= CG_LIMIT(cg
);
921 length
= offset
+ growth
;
922 next
= base
+ length
;
924 JS_ASSERT(length
> BYTECODE_CHUNK
);
925 size
= BYTECODE_SIZE(limit
- base
);
926 incr
= BYTECODE_SIZE(length
) - size
;
927 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
929 js_ReportOutOfScriptQuota(cx
);
933 CG_LIMIT(cg
) = next
= base
+ length
;
938 * Set up a fake span dependency record to guard the end of the code
939 * being generated. This guard record is returned as a fencepost by
940 * FindNearestSpanDep if there is no real spandep at or above a given
941 * unextended code offset.
944 guard
.offset
= offset
+ growth
;
945 guard
.before
= offset
;
950 * Now work backwards through the span dependencies, copying chunks of
951 * bytecode between each extended jump toward the end of the grown code
952 * space, and restoring immediate offset operands for all jump bytecodes.
953 * The first chunk of bytecodes, starting at base and ending at the first
954 * extended jump offset (NB: this chunk includes the operation bytecode
955 * just before that immediate jump offset), doesn't need to be copied.
957 JS_ASSERT(sd
== sdlimit
);
959 while (--sd
>= sdbase
) {
960 if (sd
->top
!= top
) {
962 op
= (JSOp
) base
[top
];
963 type
= JOF_OPTYPE(op
);
965 for (sd2
= sd
- 1; sd2
>= sdbase
&& sd2
->top
== top
; sd2
--)
969 JS_ASSERT(top
== sd2
->before
);
972 oldpc
= base
+ sd
->before
;
973 span
= SD_SPAN(sd
, pivot
);
976 * If this jump didn't need to be extended, restore its span immediate
977 * offset operand now, overwriting the index of sd within cg->spanDeps
978 * that was stored temporarily after *pc when BuildSpanDepTable ran.
980 * Note that span might fit in 16 bits even for an extended jump op,
981 * if the op has multiple span operands, not all of which overflowed
982 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
983 * range for a short jump, but others are not).
985 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
986 JS_ASSERT(JUMP_OFFSET_MIN
<= span
&& span
<= JUMP_OFFSET_MAX
);
987 SET_JUMP_OFFSET(oldpc
, span
);
992 * Set up parameters needed to copy the next run of bytecode starting
993 * at offset (which is a cursor into the unextended, original bytecode
994 * vector), down to sd->before (a cursor of the same scale as offset,
995 * it's the index of the original jump pc). Reuse delta to count the
996 * nominal number of bytes to copy.
998 pc
= base
+ sd
->offset
;
999 delta
= offset
- sd
->before
;
1000 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
1003 * Don't bother copying the jump offset we're about to reset, but do
1004 * copy the bytecode at oldpc (which comes just before its immediate
1005 * jump offset operand), on the next iteration through the loop, by
1006 * including it in offset's new value.
1008 offset
= sd
->before
+ 1;
1009 size
= BYTECODE_SIZE(delta
- (1 + JUMP_OFFSET_LEN
));
1011 memmove(pc
+ 1 + JUMPX_OFFSET_LEN
,
1012 oldpc
+ 1 + JUMP_OFFSET_LEN
,
1016 SET_JUMPX_OFFSET(pc
, span
);
1021 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1022 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1023 * at each sd that moved. The future may bring different offset sizes
1024 * for span-dependent instruction operands. However, we fix only main
1025 * notes here, not prolog notes -- we know that prolog opcodes are not
1026 * span-dependent, and aren't likely ever to be.
1028 offset
= growth
= 0;
1030 for (sn
= cg
->main
.notes
, snlimit
= sn
+ cg
->main
.noteCount
;
1034 * Recall that the offset of a given note includes its delta, and
1035 * tells the offset of the annotated bytecode from the main entry
1036 * point of the script.
1038 offset
+= SN_DELTA(sn
);
1039 while (sd
< sdlimit
&& sd
->before
< offset
) {
1041 * To compute the delta to add to sn, we need to look at the
1042 * spandep after sd, whose offset - (before + growth) tells by
1043 * how many bytes sd's instruction grew.
1048 delta
= sd2
->offset
- (sd2
->before
+ growth
);
1050 JS_ASSERT(delta
== JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
1051 sn
= js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
);
1054 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1061 * If sn has span-dependent offset operands, check whether each
1062 * covers further span-dependencies, and increase those operands
1063 * accordingly. Some source notes measure offset not from the
1064 * annotated pc, but from that pc plus some small bias. NB: we
1065 * assume that spec->offsetBias can't itself span span-dependent
1068 spec
= &js_SrcNoteSpec
[SN_TYPE(sn
)];
1069 if (spec
->isSpanDep
) {
1070 pivot
= offset
+ spec
->offsetBias
;
1072 for (i
= 0; i
< n
; i
++) {
1073 span
= js_GetSrcNoteOffset(sn
, i
);
1076 target
= pivot
+ span
* spec
->isSpanDep
;
1077 sd2
= FindNearestSpanDep(cg
, target
,
1084 * Increase target by sd2's before-vs-after offset delta,
1085 * which is absolute (i.e., relative to start of script,
1086 * as is target). Recompute the span by subtracting its
1087 * adjusted pivot from target.
1089 target
+= sd2
->offset
- sd2
->before
;
1090 span
= target
- (pivot
+ growth
);
1091 span
*= spec
->isSpanDep
;
1092 noteIndex
= sn
- cg
->main
.notes
;
1093 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, i
, span
))
1095 sn
= cg
->main
.notes
+ noteIndex
;
1096 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1100 cg
->main
.lastNoteOffset
+= growth
;
1103 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1104 * not clear how we can beat that).
1106 for (tryNode
= cg
->lastTryNode
; tryNode
; tryNode
= tryNode
->prev
) {
1108 * First, look for the nearest span dependency at/above tn->start.
1109 * There may not be any such spandep, in which case the guard will
1112 offset
= tryNode
->note
.start
;
1113 sd
= FindNearestSpanDep(cg
, offset
, 0, &guard
);
1114 delta
= sd
->offset
- sd
->before
;
1115 tryNode
->note
.start
= offset
+ delta
;
1118 * Next, find the nearest spandep at/above tn->start + tn->length.
1119 * Use its delta minus tn->start's delta to increase tn->length.
1121 length
= tryNode
->note
.length
;
1122 sd2
= FindNearestSpanDep(cg
, offset
+ length
, sd
- sdbase
, &guard
);
1124 tryNode
->note
.length
=
1125 length
+ sd2
->offset
- sd2
->before
- delta
;
1130 #ifdef DEBUG_brendan
1134 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
1135 offset
= sd
->offset
;
1137 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1138 if (sd
->top
!= top
) {
1139 JS_ASSERT(top
== -1 ||
1140 !JOF_TYPE_IS_EXTENDED_JUMP(type
) ||
1144 JS_ASSERT(top
== sd
->before
);
1145 op
= (JSOp
) base
[offset
];
1146 type
= JOF_OPTYPE(op
);
1147 JS_ASSERT(type
== JOF_JUMP
||
1148 type
== JOF_JUMPX
||
1149 type
== JOF_TABLESWITCH
||
1150 type
== JOF_TABLESWITCHX
||
1151 type
== JOF_LOOKUPSWITCH
||
1152 type
== JOF_LOOKUPSWITCHX
);
1157 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
1158 span
= GET_JUMPX_OFFSET(pc
);
1159 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
1162 JS_ASSERT(type
== JOF_TABLESWITCHX
||
1163 type
== JOF_LOOKUPSWITCHX
);
1166 span
= GET_JUMP_OFFSET(pc
);
1168 JS_ASSERT(SD_SPAN(sd
, pivot
) == span
);
1170 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type
) || bigspans
!= 0);
1175 * Reset so we optimize at most once -- cg may be used for further code
1176 * generation of successive, independent, top-level statements. No jump
1177 * can span top-level statements, because JS lacks goto.
1179 size
= SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg
->numSpanDeps
)));
1180 cx
->free(cg
->spanDeps
);
1181 cg
->spanDeps
= NULL
;
1182 FreeJumpTargets(cg
, cg
->jumpTargets
);
1183 cg
->jumpTargets
= NULL
;
1184 cg
->numSpanDeps
= cg
->numJumpTargets
= 0;
1185 cg
->spanDepTodo
= CG_OFFSET(cg
);
1190 EmitJump(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t off
)
1196 extend
= off
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< off
;
1197 if (extend
&& !cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
1200 jmp
= js_Emit3(cx
, cg
, op
, JUMP_OFFSET_HI(off
), JUMP_OFFSET_LO(off
));
1201 if (jmp
>= 0 && (extend
|| cg
->spanDeps
)) {
1202 pc
= CG_CODE(cg
, jmp
);
1203 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
1210 GetJumpOffset(JSCodeGenerator
*cg
, jsbytecode
*pc
)
1217 return GET_JUMP_OFFSET(pc
);
1219 sd
= GetSpanDep(cg
, pc
);
1221 if (!JT_HAS_TAG(jt
))
1222 return JT_TO_BPDELTA(jt
);
1225 while (--sd
>= cg
->spanDeps
&& sd
->top
== top
)
1228 return JT_CLR_TAG(jt
)->offset
- sd
->offset
;
1232 js_SetJumpOffset(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
1235 if (!cg
->spanDeps
) {
1236 if (JUMP_OFFSET_MIN
<= off
&& off
<= JUMP_OFFSET_MAX
) {
1237 SET_JUMP_OFFSET(pc
, off
);
1241 if (!BuildSpanDepTable(cx
, cg
))
1245 return SetSpanDepTarget(cx
, cg
, GetSpanDep(cg
, pc
), off
);
1249 JSTreeContext::inStatement(JSStmtType type
)
1251 for (JSStmtInfo
*stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
1252 if (stmt
->type
== type
)
1259 JSTreeContext::ensureSharpSlots()
1261 #if JS_HAS_SHARP_VARS
1262 JS_STATIC_ASSERT(SHARP_NSLOTS
== 2);
1264 if (sharpSlotBase
>= 0) {
1265 JS_ASSERT(flags
& TCF_HAS_SHARPS
);
1269 JS_ASSERT(!(flags
& TCF_HAS_SHARPS
));
1271 JSContext
*cx
= parser
->context
;
1272 JSAtom
*sharpArrayAtom
= js_Atomize(cx
, "#array", 6, 0);
1273 JSAtom
*sharpDepthAtom
= js_Atomize(cx
, "#depth", 6, 0);
1274 if (!sharpArrayAtom
|| !sharpDepthAtom
)
1277 sharpSlotBase
= fun
->u
.i
.nvars
;
1278 if (!js_AddLocal(cx
, fun
, sharpArrayAtom
, JSLOCAL_VAR
))
1280 if (!js_AddLocal(cx
, fun
, sharpDepthAtom
, JSLOCAL_VAR
))
1284 * Compiler::compileScript will rebase immediate operands indexing
1285 * the sharp slots to come at the end of the global script's |nfixed|
1286 * slots storage, after gvars and regexps.
1290 flags
|= TCF_HAS_SHARPS
;
1296 JSTreeContext::skipSpansGenerator(unsigned skip
)
1298 JSTreeContext
*tc
= this;
1299 for (unsigned i
= 0; i
< skip
; ++i
, tc
= tc
->parent
) {
1302 if (tc
->flags
& TCF_FUN_IS_GENERATOR
)
1309 js_PushStatement(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSStmtType type
,
1314 stmt
->blockid
= tc
->blockid();
1315 SET_STATEMENT_TOP(stmt
, top
);
1317 JS_ASSERT(!stmt
->blockObj
);
1318 stmt
->down
= tc
->topStmt
;
1320 if (STMT_LINKS_SCOPE(stmt
)) {
1321 stmt
->downScope
= tc
->topScopeStmt
;
1322 tc
->topScopeStmt
= stmt
;
1324 stmt
->downScope
= NULL
;
1329 js_PushBlockScope(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSObject
*blockObj
,
1332 js_PushStatement(tc
, stmt
, STMT_BLOCK
, top
);
1333 stmt
->flags
|= SIF_SCOPE
;
1334 blockObj
->setParent(tc
->blockChain
);
1335 stmt
->downScope
= tc
->topScopeStmt
;
1336 tc
->topScopeStmt
= stmt
;
1337 tc
->blockChain
= blockObj
;
1338 stmt
->blockObj
= blockObj
;
1342 * Emit a backpatch op with offset pointing to the previous jump of this type,
1343 * so that we can walk back up the chain fixing up the op and jump offset.
1346 EmitBackPatchOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t *lastp
)
1348 ptrdiff_t offset
, delta
;
1350 offset
= CG_OFFSET(cg
);
1351 delta
= offset
- *lastp
;
1353 JS_ASSERT(delta
> 0);
1354 return EmitJump(cx
, cg
, op
, delta
);
1358 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1359 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1360 * NB: We use cx and cg from our caller's lexical environment, and return
1363 #define EMIT_UINT16_IMM_OP(op, i) \
1365 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1369 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1371 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1374 jsbytecode *pc_ = CG_CODE(cg, off_); \
1375 SET_UINT16(pc_, i); \
1376 pc_ += UINT16_LEN; \
1377 SET_UINT16(pc_, j); \
1381 FlushPops(JSContext
*cx
, JSCodeGenerator
*cg
, intN
*npops
)
1383 JS_ASSERT(*npops
!= 0);
1384 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1386 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
1392 * Emit additional bytecode(s) for non-local jumps.
1395 EmitNonLocalJumpFixup(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
)
1401 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1402 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1403 * end of a with statement, so we save cg->stackDepth here and restore it
1404 * just before a successful return.
1406 depth
= cg
->stackDepth
;
1409 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1411 for (stmt
= cg
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
1412 switch (stmt
->type
) {
1415 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1417 if (EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(*stmt
)) < 0)
1422 /* There's a With object on the stack that we need to pop. */
1424 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1426 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
1430 case STMT_FOR_IN_LOOP
:
1432 * The iterator and the object being iterated need to be popped.
1435 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1437 if (js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0)
1441 case STMT_SUBROUTINE
:
1443 * There's a [exception or hole, retsub pc-index] pair on the
1444 * stack that we need to pop.
1452 if (stmt
->flags
& SIF_SCOPE
) {
1455 /* There is a Block object with locals on the stack to pop. */
1457 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1459 i
= OBJ_BLOCK_COUNT(cx
, stmt
->blockObj
);
1460 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, i
);
1465 cg
->stackDepth
= depth
;
1472 EmitGoto(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
,
1473 ptrdiff_t *lastp
, JSAtomListElement
*label
, JSSrcNoteType noteType
)
1477 if (!EmitNonLocalJumpFixup(cx
, cg
, toStmt
))
1481 index
= js_NewSrcNote2(cx
, cg
, noteType
, (ptrdiff_t) ALE_INDEX(label
));
1482 else if (noteType
!= SRC_NULL
)
1483 index
= js_NewSrcNote(cx
, cg
, noteType
);
1489 return EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, lastp
);
1493 BackPatch(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t last
,
1494 jsbytecode
*target
, jsbytecode op
)
1496 jsbytecode
*pc
, *stop
;
1497 ptrdiff_t delta
, span
;
1499 pc
= CG_CODE(cg
, last
);
1500 stop
= CG_CODE(cg
, -1);
1501 while (pc
!= stop
) {
1502 delta
= GetJumpOffset(cg
, pc
);
1504 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, span
);
1507 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1508 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1509 * and need to see the JSOP_BACKPATCH* op at *pc).
1518 js_PopStatement(JSTreeContext
*tc
)
1523 tc
->topStmt
= stmt
->down
;
1524 if (STMT_LINKS_SCOPE(stmt
)) {
1525 tc
->topScopeStmt
= stmt
->downScope
;
1526 if (stmt
->flags
& SIF_SCOPE
) {
1527 tc
->blockChain
= stmt
->blockObj
->getParent();
1528 JS_SCOPE_DEPTH_METERING(--tc
->scopeDepth
);
1534 js_PopStatementCG(JSContext
*cx
, JSCodeGenerator
*cg
)
1539 if (!STMT_IS_TRYING(stmt
) &&
1540 (!BackPatch(cx
, cg
, stmt
->breaks
, CG_NEXT(cg
), JSOP_GOTO
) ||
1541 !BackPatch(cx
, cg
, stmt
->continues
, CG_CODE(cg
, stmt
->update
),
1545 js_PopStatement(cg
);
1550 js_DefineCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1553 /* XXX just do numbers for now */
1554 if (pn
->pn_type
== TOK_NUMBER
) {
1555 if (!cg
->constMap
.put(atom
, NumberValue(pn
->pn_dval
)))
1562 js_LexicalLookup(JSTreeContext
*tc
, JSAtom
*atom
, jsint
*slotp
, JSStmtInfo
*stmt
)
1566 JSScopeProperty
*sprop
;
1569 stmt
= tc
->topScopeStmt
;
1570 for (; stmt
; stmt
= stmt
->downScope
) {
1571 if (stmt
->type
== STMT_WITH
)
1574 /* Skip "maybe scope" statements that don't contain let bindings. */
1575 if (!(stmt
->flags
& SIF_SCOPE
))
1578 obj
= stmt
->blockObj
;
1579 JS_ASSERT(obj
->getClass() == &js_BlockClass
);
1580 scope
= obj
->scope();
1581 sprop
= scope
->lookup(ATOM_TO_JSID(atom
));
1583 JS_ASSERT(sprop
->hasShortID());
1586 JS_ASSERT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
].isInt32());
1587 *slotp
= obj
->fslots
[JSSLOT_BLOCK_DEPTH
].toInt32() +
1600 * The function sets vp to NO_CONSTANT when the atom does not corresponds to a
1601 * name defining a constant.
1604 LookupCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1611 * Chase down the cg stack, but only until we reach the outermost cg.
1612 * This enables propagating consts from top-level into switch cases in a
1613 * function compiled along with the top-level script.
1615 constp
->setMagic(JS_NO_CONSTANT
);
1617 if (cg
->inFunction() || cg
->compileAndGo()) {
1618 /* XXX this will need revising if 'const' becomes block-scoped. */
1619 stmt
= js_LexicalLookup(cg
, atom
, NULL
);
1623 if (JSCodeGenerator::ConstMap::Ptr p
= cg
->constMap
.lookup(atom
)) {
1624 JS_ASSERT(!p
->value
.isMagic(JS_NO_CONSTANT
));
1630 * Try looking in the variable object for a direct property that
1631 * is readonly and permanent. We know such a property can't be
1632 * shadowed by another property on obj's prototype chain, or a
1633 * with object or catch variable; nor can prop's value be changed,
1634 * nor can prop be deleted.
1636 if (cg
->inFunction()) {
1637 if (js_LookupLocal(cx
, cg
->fun
, atom
, NULL
) != JSLOCAL_NONE
)
1640 JS_ASSERT(cg
->compileAndGo());
1641 obj
= cg
->scopeChain
;
1643 JS_LOCK_OBJ(cx
, obj
);
1644 JSScope
*scope
= obj
->scope();
1645 JSScopeProperty
*sprop
= scope
->lookup(ATOM_TO_JSID(atom
));
1648 * We're compiling code that will be executed immediately,
1649 * not re-executed against a different scope chain and/or
1650 * variable object. Therefore we can get constant values
1651 * from our variable object here.
1653 if (!sprop
->writable() && !sprop
->configurable() &&
1654 sprop
->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop
, scope
)) {
1655 *constp
= obj
->lockedGetSlot(sprop
->slot
);
1658 JS_UNLOCK_SCOPE(cx
, scope
);
1664 } while ((cg
= (JSCodeGenerator
*) cg
->parent
) != NULL
);
1669 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1670 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1671 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1672 * after the main bytecode sequence.
1675 EmitBigIndexPrefix(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
1680 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1683 JS_STATIC_ASSERT(INDEX_LIMIT
<= JS_BIT(24));
1684 JS_STATIC_ASSERT(INDEX_LIMIT
>=
1685 (JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 2) << 16);
1687 if (index
< JS_BIT(16))
1689 indexBase
= index
>> 16;
1690 if (indexBase
<= JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 1) {
1691 if (js_Emit1(cx
, cg
, (JSOp
)(JSOP_INDEXBASE1
+ indexBase
- 1)) < 0)
1693 return JSOP_RESETBASE0
;
1696 if (index
>= INDEX_LIMIT
) {
1697 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1698 JSMSG_TOO_MANY_LITERALS
);
1702 if (js_Emit2(cx
, cg
, JSOP_INDEXBASE
, (JSOp
)indexBase
) < 0)
1704 return JSOP_RESETBASE
;
1708 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1709 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1710 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1711 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1712 * with single-byte JSOP_INDEXBASE[123] codes.
1714 * Such prefixing currently requires a suffix to restore the "zero segment"
1715 * register setting, but this could be optimized further.
1718 EmitIndexOp(JSContext
*cx
, JSOp op
, uintN index
, JSCodeGenerator
*cg
)
1722 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1723 if (bigSuffix
== JSOP_FALSE
)
1725 EMIT_UINT16_IMM_OP(op
, index
);
1726 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1730 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1731 * caller's lexical environment, and embedding a false return on error.
1733 #define EMIT_INDEX_OP(op, index) \
1735 if (!EmitIndexOp(cx, op, index, cg)) \
1740 EmitAtomOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
1742 JSAtomListElement
*ale
;
1744 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1745 if (op
== JSOP_GETPROP
&&
1746 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
1747 return js_Emit1(cx
, cg
, JSOP_LENGTH
) >= 0;
1749 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
1752 return EmitIndexOp(cx
, op
, ALE_INDEX(ale
), cg
);
1756 EmitObjectOp(JSContext
*cx
, JSObjectBox
*objbox
, JSOp op
,
1757 JSCodeGenerator
*cg
)
1759 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1760 return EmitIndexOp(cx
, op
, cg
->objectList
.index(objbox
), cg
);
1764 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1765 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1766 * other parts of the code there's no necessary relationship between the two.
1767 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1768 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1770 JS_STATIC_ASSERT(ARGNO_LEN
== 2);
1771 JS_STATIC_ASSERT(SLOTNO_LEN
== 2);
1774 EmitSlotIndexOp(JSContext
*cx
, JSOp op
, uintN slot
, uintN index
,
1775 JSCodeGenerator
*cg
)
1781 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SLOTATOM
||
1782 JOF_OPTYPE(op
) == JOF_SLOTOBJECT
);
1783 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1784 if (bigSuffix
== JSOP_FALSE
)
1787 /* Emit [op, slot, index]. */
1788 off
= js_EmitN(cx
, cg
, op
, 2 + INDEX_LEN
);
1791 pc
= CG_CODE(cg
, off
);
1792 SET_UINT16(pc
, slot
);
1794 SET_INDEX(pc
, index
);
1795 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1799 * Adjust the slot for a block local to account for the number of variables
1800 * that share the same index space with locals. Due to the incremental code
1801 * generation for top-level script, we do the adjustment via code patching in
1802 * Compiler::compileScript; see comments there.
1804 * The function returns -1 on failures.
1807 AdjustBlockSlot(JSContext
*cx
, JSCodeGenerator
*cg
, jsint slot
)
1809 JS_ASSERT((jsuint
) slot
< cg
->maxStackDepth
);
1810 if (cg
->inFunction()) {
1811 slot
+= cg
->fun
->u
.i
.nvars
;
1812 if ((uintN
) slot
>= SLOTNO_LIMIT
) {
1813 ReportCompileErrorNumber(cx
, CG_TS(cg
), NULL
, JSREPORT_ERROR
, JSMSG_TOO_MANY_LOCALS
);
1821 EmitEnterBlock(JSContext
*cx
, JSParseNode
*pn
, JSCodeGenerator
*cg
)
1823 JS_ASSERT(PN_TYPE(pn
) == TOK_LEXICALSCOPE
);
1824 if (!EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_ENTERBLOCK
, cg
))
1827 JSObject
*blockObj
= pn
->pn_objbox
->object
;
1828 jsint depth
= AdjustBlockSlot(cx
, cg
, OBJ_BLOCK_DEPTH(cx
, blockObj
));
1832 uintN base
= JSSLOT_FREE(&js_BlockClass
);
1833 for (uintN slot
= base
, limit
= base
+ OBJ_BLOCK_COUNT(cx
, blockObj
); slot
< limit
; slot
++) {
1834 const Value
&v
= blockObj
->getSlot(slot
);
1836 /* Beware the empty destructuring dummy. */
1837 if (v
.isUndefined()) {
1838 JS_ASSERT(slot
+ 1 <= limit
);
1842 JSDefinition
*dn
= (JSDefinition
*) v
.toPrivate();
1843 JS_ASSERT(dn
->pn_defn
);
1844 JS_ASSERT(uintN(dn
->frameSlot() + depth
) < JS_BIT(16));
1845 dn
->pn_cookie
.set(dn
->pn_cookie
.level(), dn
->frameSlot() + depth
);
1847 for (JSParseNode
*pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
1848 JS_ASSERT(pnu
->pn_lexdef
== dn
);
1849 JS_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
1850 JS_ASSERT(pnu
->pn_cookie
.isFree());
1855 blockObj
->scope()->freeslot
= base
;
1856 return blockObj
->growSlots(cx
, base
);
1860 * When eval is called from a function, the eval code or function code it
1861 * compiles may reference upvars that live in the eval-calling function. The
1862 * eval-invoked compiler does not have explicit definitions for these upvars
1863 * and we do not attempt to create them a-priori (by inspecting the function's
1864 * args and vars) -- we could, but we'd take an avoidable penalty for each
1865 * function local not referenced by any upvar. Instead, we map such upvars
1866 * lazily, growing upvarMap.vector by powers of two.
1868 * This function knows that it is called with pn pointing to a PN_NAME-arity
1869 * node, and cg->parser->callerFrame having a non-null fun member, and the
1870 * static level of cg at least one greater than the eval-calling function's
1874 MakeUpvarForEval(JSParseNode
*pn
, JSCodeGenerator
*cg
)
1876 JSContext
*cx
= cg
->parser
->context
;
1877 JSFunction
*fun
= cg
->parser
->callerFrame
->fun
;
1878 uintN upvarLevel
= fun
->u
.i
.script
->staticLevel
;
1880 JSFunctionBox
*funbox
= cg
->funbox
;
1883 * Treat top-level function definitions as escaping (i.e., as funargs),
1884 * required since we compile each such top level function or statement
1885 * and throw away the AST, so we can't yet see all funarg uses of this
1886 * function being compiled (cg->funbox->object). See bug 493177.
1888 if (funbox
->level
== fun
->u
.i
.script
->staticLevel
+ 1U &&
1889 !(((JSFunction
*) funbox
->object
)->flags
& JSFUN_LAMBDA
)) {
1890 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
1891 ((JSFunction
*) funbox
->object
)->atom
);
1895 while (funbox
->level
>= upvarLevel
) {
1896 if (funbox
->node
->pn_dflags
& PND_FUNARG
)
1898 funbox
= funbox
->parent
;
1904 JSAtom
*atom
= pn
->pn_atom
;
1907 JSLocalKind localKind
= js_LookupLocal(cx
, fun
, atom
, &index
);
1908 if (localKind
== JSLOCAL_NONE
)
1911 JS_ASSERT(cg
->staticLevel
> upvarLevel
);
1912 if (cg
->staticLevel
>= UpvarCookie::UPVAR_LEVEL_LIMIT
)
1915 JSAtomListElement
*ale
= cg
->upvarList
.lookup(atom
);
1917 if (cg
->inFunction() &&
1918 !js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
)) {
1922 ale
= cg
->upvarList
.add(cg
->parser
, atom
);
1925 JS_ASSERT(ALE_INDEX(ale
) == cg
->upvarList
.count
- 1);
1927 UpvarCookie
*vector
= cg
->upvarMap
.vector
;
1928 uint32 length
= cg
->upvarMap
.length
;
1930 JS_ASSERT(ALE_INDEX(ale
) <= length
);
1931 if (ALE_INDEX(ale
) == length
) {
1932 length
= 2 * JS_MAX(2, length
);
1933 vector
= reinterpret_cast<UpvarCookie
*>(cx
->realloc(vector
, length
* sizeof *vector
));
1936 cg
->upvarMap
.vector
= vector
;
1937 cg
->upvarMap
.length
= length
;
1940 if (localKind
!= JSLOCAL_ARG
)
1941 index
+= fun
->nargs
;
1942 JS_ASSERT(index
< JS_BIT(16));
1944 uintN skip
= cg
->staticLevel
- upvarLevel
;
1945 vector
[ALE_INDEX(ale
)].set(skip
, index
);
1948 pn
->pn_op
= JSOP_GETUPVAR
;
1949 pn
->pn_cookie
.set(cg
->staticLevel
, ALE_INDEX(ale
));
1950 pn
->pn_dflags
|= PND_BOUND
;
1955 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1956 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1957 * It returns false on error, true on success.
1959 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1960 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1961 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1962 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1963 * pn->pn_op was modified, if this function finds an argument or local variable
1964 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1965 * successful return.
1967 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1968 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1972 BindNameToSlot(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
1977 JSDefinition::Kind dn_kind
;
1978 JSAtomListElement
*ale
;
1981 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
1983 /* Idempotency tests come first, since we may be called more than once. */
1984 if (pn
->pn_dflags
& PND_BOUND
)
1987 /* No cookie initialized for these two, they're pre-bound by definition. */
1988 JS_ASSERT(pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
);
1991 * The parser linked all uses (including forward references) to their
1992 * definitions, unless a with statement or direct eval intervened.
1995 JS_ASSERT(pn
->pn_cookie
.isFree());
1997 JS_ASSERT(dn
->pn_defn
);
1998 if (pn
->isDeoptimized())
2000 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
2004 dn
= (JSDefinition
*) pn
;
2011 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2013 UpvarCookie cookie
= dn
->pn_cookie
;
2014 dn_kind
= dn
->kind();
2017 * Turn attempts to mutate const-declared bindings into get ops (for
2018 * pre-increment and pre-decrement ops, our caller will have to emit
2019 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2021 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2022 * bindings visible to the compiler are permanent in JS unless the
2023 * declaration originates in eval code. We detect eval code by testing
2024 * cg->parser->callerFrame, which is set only by eval or a debugger
2027 * Note that this callerFrame non-null test must be qualified by testing
2028 * !cg->funbox to exclude function code nested in eval code, which is not
2029 * subject to the deletable binding exception.
2036 if (dn_kind
!= JSDefinition::UNKNOWN
) {
2037 if (cg
->parser
->callerFrame
&& !cg
->funbox
)
2038 JS_ASSERT(cg
->compileAndGo());
2040 pn
->pn_op
= JSOP_FALSE
;
2041 pn
->pn_dflags
|= PND_BOUND
;
2047 pn
->pn_op
= op
= JSOP_NAME
;
2050 if (cookie
.isFree()) {
2051 JSStackFrame
*caller
= cg
->parser
->callerFrame
;
2053 JS_ASSERT(cg
->compileAndGo());
2056 * Don't generate upvars on the left side of a for loop. See
2059 if (cg
->flags
& TCF_IN_FOR_INIT
)
2062 JS_ASSERT(caller
->script
);
2067 * Make sure the variable object used by the compiler to initialize
2068 * parent links matches the caller's varobj. Compile-n-go compiler-
2069 * created function objects have the top-level cg's scopeChain set
2070 * as their parent by Parser::newFunction.
2072 JSObject
*scopeobj
= cg
->inFunction()
2073 ? FUN_OBJECT(cg
->fun
)->getParent()
2075 if (scopeobj
!= cg
->parser
->callerVarObj
)
2079 * We are compiling eval or debug script inside a function frame
2080 * and the scope chain matches the function's variable object.
2081 * Optimize access to function's arguments and variable and the
2084 if (op
!= JSOP_NAME
)
2088 * Generator functions may be resumed from any call stack, which
2089 * defeats the display optimization to static link searching used
2090 * by JSOP_{GET,CALL}UPVAR.
2092 JSFunction
*fun
= cg
->parser
->callerFrame
->fun
;
2093 JS_ASSERT(cg
->staticLevel
>= fun
->u
.i
.script
->staticLevel
);
2094 unsigned skip
= cg
->staticLevel
- fun
->u
.i
.script
->staticLevel
;
2095 if (cg
->skipSpansGenerator(skip
))
2098 return MakeUpvarForEval(pn
, cg
);
2103 if (dn
->pn_dflags
& PND_GVAR
) {
2105 * If this is a global reference from within a function, leave pn_op as
2106 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2107 * only we could depend on the global frame's slots being valid for all
2108 * calls to the function, and if we could equate the atom index in the
2109 * function's atom map for every global name with its frame slot.
2111 if (cg
->inFunction())
2115 * We are optimizing global variables and there may be no pre-existing
2116 * global property named atom when this global script runs. If atom was
2117 * declared via const or var, optimize pn to access fp->vars using the
2118 * appropriate JSOP_*GVAR op.
2120 * FIXME: should be able to optimize global function access too.
2122 JS_ASSERT(dn_kind
== JSDefinition::VAR
|| dn_kind
== JSDefinition::CONST
);
2125 case JSOP_NAME
: op
= JSOP_GETGVAR
; break;
2126 case JSOP_SETNAME
: op
= JSOP_SETGVAR
; break;
2127 case JSOP_SETCONST
: /* NB: no change */ break;
2128 case JSOP_INCNAME
: op
= JSOP_INCGVAR
; break;
2129 case JSOP_NAMEINC
: op
= JSOP_GVARINC
; break;
2130 case JSOP_DECNAME
: op
= JSOP_DECGVAR
; break;
2131 case JSOP_NAMEDEC
: op
= JSOP_GVARDEC
; break;
2132 case JSOP_FORNAME
: /* NB: no change */ break;
2133 case JSOP_DELNAME
: /* NB: no change */ break;
2134 default: JS_NOT_REACHED("gvar");
2137 pn
->pn_cookie
.set(cookie
);
2138 pn
->pn_dflags
|= PND_BOUND
;
2142 uintN level
= cookie
.level();
2143 JS_ASSERT(cg
->staticLevel
>= level
);
2146 * A JSDefinition witnessed as a declaration by the parser cannot be an
2147 * upvar, unless it is the degenerate kind of upvar selected above (in the
2148 * code before the PND_GVAR test) for the special case of compile-and-go
2149 * code generated from eval called from a function, where the eval code
2150 * uses local vars defined in the function. We detect this upvar-for-eval
2151 * case by checking dn's op.
2153 if (PN_OP(dn
) == JSOP_GETUPVAR
) {
2154 JS_ASSERT(cg
->staticLevel
>= level
);
2155 if (op
!= JSOP_NAME
)
2159 JSStackFrame
*caller
= cg
->parser
->callerFrame
;
2162 JS_ASSERT(caller
->script
);
2164 JSTreeContext
*tc
= cg
;
2165 while (tc
->staticLevel
!= level
)
2167 JS_ASSERT(tc
->compiling());
2169 JSCodeGenerator
*evalcg
= (JSCodeGenerator
*) tc
;
2170 JS_ASSERT(evalcg
->compileAndGo());
2171 JS_ASSERT(caller
->fun
&& cg
->parser
->callerVarObj
== evalcg
->scopeChain
);
2174 * Don't generate upvars on the left side of a for loop. See
2175 * bug 470758 and bug 520513.
2177 if (evalcg
->flags
& TCF_IN_FOR_INIT
)
2180 if (cg
->staticLevel
== level
) {
2181 pn
->pn_op
= JSOP_GETUPVAR
;
2182 pn
->pn_cookie
= cookie
;
2183 pn
->pn_dflags
|= PND_BOUND
;
2187 return MakeUpvarForEval(pn
, cg
);
2190 const uintN skip
= cg
->staticLevel
- level
;
2192 JS_ASSERT(cg
->inFunction());
2193 JS_ASSERT_IF(cookie
.slot() != UpvarCookie::CALLEE_SLOT
, cg
->lexdeps
.lookup(atom
));
2194 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2195 JS_ASSERT(cg
->fun
->u
.i
.skipmin
<= skip
);
2198 * If op is a mutating opcode, this upvar's lookup skips too many levels,
2199 * or the function is heavyweight, we fall back on JSOP_*NAME*.
2201 if (op
!= JSOP_NAME
)
2203 if (level
>= UpvarCookie::UPVAR_LEVEL_LIMIT
)
2205 if (cg
->flags
& TCF_FUN_HEAVYWEIGHT
)
2208 if (FUN_FLAT_CLOSURE(cg
->fun
)) {
2212 * The function we're compiling may not be heavyweight, but if it
2213 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2214 * Parser::analyzeFunctions has arranged for this function's
2215 * enclosing functions to be heavyweight, so we can safely stick
2216 * with JSOP_NAME/JSOP_CALLNAME.
2218 if (cg
->funbox
->node
->pn_dflags
& PND_FUNARG
)
2222 * Generator functions may be resumed from any call stack, which
2223 * defeats the display optimization to static link searching used
2224 * by JSOP_{GET,CALL}UPVAR.
2226 if (cg
->skipSpansGenerator(skip
))
2232 ale
= cg
->upvarList
.lookup(atom
);
2234 index
= ALE_INDEX(ale
);
2236 if (!js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
))
2239 ale
= cg
->upvarList
.add(cg
->parser
, atom
);
2242 index
= ALE_INDEX(ale
);
2243 JS_ASSERT(index
== cg
->upvarList
.count
- 1);
2245 UpvarCookie
*vector
= cg
->upvarMap
.vector
;
2247 uint32 length
= cg
->lexdeps
.count
;
2249 vector
= (UpvarCookie
*) js_calloc(length
* sizeof *vector
);
2251 JS_ReportOutOfMemory(cx
);
2254 cg
->upvarMap
.vector
= vector
;
2255 cg
->upvarMap
.length
= length
;
2258 uintN slot
= cookie
.slot();
2259 if (slot
!= UpvarCookie::CALLEE_SLOT
&& dn_kind
!= JSDefinition::ARG
) {
2260 JSTreeContext
*tc
= cg
;
2263 } while (tc
->staticLevel
!= level
);
2264 if (tc
->inFunction())
2265 slot
+= tc
->fun
->nargs
;
2268 vector
[index
].set(skip
, slot
);
2272 JS_ASSERT((index
& JS_BITMASK(16)) == index
);
2273 pn
->pn_cookie
.set(0, index
);
2274 pn
->pn_dflags
|= PND_BOUND
;
2279 * We are compiling a function body and may be able to optimize name
2280 * to stack slot. Look for an argument or variable in the function and
2281 * rewrite pn_op and update pn accordingly.
2284 case JSDefinition::UNKNOWN
:
2287 case JSDefinition::LET
:
2289 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2290 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2291 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2292 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2293 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2294 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2295 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2296 default: JS_NOT_REACHED("let");
2300 case JSDefinition::ARG
:
2302 case JSOP_NAME
: op
= JSOP_GETARG
; break;
2303 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
2304 case JSOP_INCNAME
: op
= JSOP_INCARG
; break;
2305 case JSOP_NAMEINC
: op
= JSOP_ARGINC
; break;
2306 case JSOP_DECNAME
: op
= JSOP_DECARG
; break;
2307 case JSOP_NAMEDEC
: op
= JSOP_ARGDEC
; break;
2308 case JSOP_FORNAME
: op
= JSOP_FORARG
; break;
2309 default: JS_NOT_REACHED("arg");
2311 JS_ASSERT(!pn
->isConst());
2314 case JSDefinition::VAR
:
2315 if (PN_OP(dn
) == JSOP_CALLEE
) {
2316 JS_ASSERT(op
!= JSOP_CALLEE
);
2317 JS_ASSERT((cg
->fun
->flags
& JSFUN_LAMBDA
) && atom
== cg
->fun
->atom
);
2320 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2321 * cannot be sure cg->fun is not something of the form:
2323 * var ff = (function f(s) { eval(s); return f; });
2325 * where a caller invokes ff("var f = 42"). The result returned for
2326 * such an invocation must be 42, since the callee name is
2327 * lexically bound in an outer declarative environment from the
2328 * function's activation. See jsfun.cpp:call_resolve.
2330 JS_ASSERT(op
!= JSOP_DELNAME
);
2331 if (!(cg
->flags
& TCF_FUN_HEAVYWEIGHT
)) {
2333 pn
->pn_dflags
|= PND_CONST
;
2337 pn
->pn_dflags
|= PND_BOUND
;
2343 JS_ASSERT_IF(dn_kind
!= JSDefinition::FUNCTION
,
2344 dn_kind
== JSDefinition::VAR
||
2345 dn_kind
== JSDefinition::CONST
);
2347 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2348 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2349 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
2350 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2351 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2352 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2353 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2354 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2355 default: JS_NOT_REACHED("local");
2357 JS_ASSERT_IF(dn_kind
== JSDefinition::CONST
, pn
->pn_dflags
& PND_CONST
);
2361 JS_ASSERT(op
!= PN_OP(pn
));
2363 pn
->pn_cookie
.set(0, cookie
.slot());
2364 pn
->pn_dflags
|= PND_BOUND
;
2369 * If pn contains a useful expression, return true with *answer set to true.
2370 * If pn contains a useless expression, return true with *answer set to false.
2371 * Return false on error.
2373 * The caller should initialize *answer to false and invoke this function on
2374 * an expression statement or similar subtree to decide whether the tree could
2375 * produce code that has any side effects. For an expression statement, we
2376 * define useless code as code with no side effects, because the main effect,
2377 * the value left on the stack after the code executes, will be discarded by a
2381 CheckSideEffects(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2391 switch (pn
->pn_arity
) {
2394 * A named function, contrary to ES3, is no longer useful, because we
2395 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2396 * Object instance and binding a readonly, permanent property in it
2397 * (the object and binding can be detected and hijacked or captured).
2398 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2404 if (pn
->pn_op
== JSOP_NOP
||
2405 pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2406 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2408 * Non-operators along with ||, &&, ===, and !== never invoke
2409 * toString or valueOf.
2411 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
2412 ok
&= CheckSideEffects(cx
, cg
, pn2
, answer
);
2415 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2416 * are presumed to be useful, because they may have side effects
2417 * even if their main effect (their return value) is discarded.
2419 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2420 * to avoid too much recursion. All such lists must be presumed
2421 * to be useful because each index operation could invoke a getter
2422 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2423 * does not apply here: arguments[i][j] might invoke a getter).
2425 * Likewise, array and object initialisers may call prototype
2426 * setters (the __defineSetter__ built-in, and writable __proto__
2427 * on Array.prototype create this hazard). Initialiser list nodes
2428 * have JSOP_NEWINIT in their pn_op.
2435 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid1
, answer
) &&
2436 CheckSideEffects(cx
, cg
, pn
->pn_kid2
, answer
) &&
2437 CheckSideEffects(cx
, cg
, pn
->pn_kid3
, answer
);
2441 if (pn
->pn_type
== TOK_ASSIGN
) {
2443 * Assignment is presumed to be useful, even if the next operation
2444 * is another assignment overwriting this one's ostensible effect,
2445 * because the left operand may be a property with a setter that
2448 * The only exception is assignment of a useless value to a const
2449 * declared in the function currently being compiled.
2452 if (pn2
->pn_type
!= TOK_NAME
) {
2455 if (!BindNameToSlot(cx
, cg
, pn2
))
2457 if (!CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
))
2459 if (!*answer
&& (pn
->pn_op
!= JSOP_NOP
|| !pn2
->isConst()))
2463 if (pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2464 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2466 * ||, &&, ===, and !== do not convert their operands via
2467 * toString or valueOf method calls.
2469 ok
= CheckSideEffects(cx
, cg
, pn
->pn_left
, answer
) &&
2470 CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
);
2473 * We can't easily prove that neither operand ever denotes an
2474 * object with a toString or valueOf method.
2482 switch (pn
->pn_type
) {
2485 switch (pn2
->pn_type
) {
2487 if (!BindNameToSlot(cx
, cg
, pn2
))
2489 if (pn2
->isConst()) {
2495 #if JS_HAS_XML_SUPPORT
2500 /* All these delete addressing modes have effects too. */
2504 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2510 if (pn
->pn_op
== JSOP_NOT
) {
2511 /* ! does not convert its operand via toString or valueOf. */
2512 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2519 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2520 * have direct effects. Of the remaining unary-arity node types,
2521 * we can't easily prove that the operand never denotes an object
2522 * with a toString or valueOf method.
2531 * Take care to avoid trying to bind a label name (labels, both for
2532 * statements and property values in object initialisers, have pn_op
2533 * defaulted to JSOP_NOP).
2535 if (pn
->pn_type
== TOK_NAME
&& pn
->pn_op
!= JSOP_NOP
) {
2536 if (!BindNameToSlot(cx
, cg
, pn
))
2538 if (pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
&&
2539 pn
->pn_cookie
.isFree()) {
2541 * Not an argument or local variable use, and not a use of a
2542 * unshadowed named function expression's given name, so this
2543 * expression could invoke a getter that has side effects.
2548 pn2
= pn
->maybeExpr();
2549 if (pn
->pn_type
== TOK_DOT
) {
2550 if (pn2
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn2
))
2552 if (!(pn2
->pn_op
== JSOP_ARGUMENTS
&&
2553 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
)) {
2555 * Any dotted property reference could call a getter, except
2556 * for arguments.length where arguments is unambiguous.
2561 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2565 ok
= CheckSideEffects(cx
, cg
, pn
->pn_tree
, answer
);
2569 if (pn
->pn_type
== TOK_DEBUGGER
)
2577 EmitNameOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2582 if (!BindNameToSlot(cx
, cg
, pn
))
2592 JS_ASSERT(!cg
->funbox
);
2599 op
= JSOP_CALLLOCAL
;
2602 op
= JSOP_CALLUPVAR
;
2605 op
= JSOP_CALLDSLOT
;
2608 JS_ASSERT(op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
);
2613 if (op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
) {
2614 if (js_Emit1(cx
, cg
, op
) < 0)
2616 if (callContext
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
2619 if (!pn
->pn_cookie
.isFree()) {
2620 EMIT_UINT16_IMM_OP(op
, pn
->pn_cookie
.asInteger());
2622 if (!EmitAtomOp(cx
, pn
, op
, cg
))
2630 #if JS_HAS_XML_SUPPORT
2632 EmitXMLName(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2637 JS_ASSERT(pn
->pn_type
== TOK_UNARYOP
);
2638 JS_ASSERT(pn
->pn_op
== JSOP_XMLNAME
);
2639 JS_ASSERT(op
== JSOP_XMLNAME
|| op
== JSOP_CALLXMLNAME
);
2642 oldflags
= cg
->flags
;
2643 cg
->flags
&= ~TCF_IN_FOR_INIT
;
2644 if (!js_EmitTree(cx
, cg
, pn2
))
2646 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
2647 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2648 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2652 return js_Emit1(cx
, cg
, op
) >= 0;
2657 EmitSpecialPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2660 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2661 * interpreter and trace recorder, which skip dense array instances by
2662 * going up to Array.prototype before looking up the property name.
2664 JSAtomListElement
*ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
2667 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
2669 if (js_Emit1(cx
, cg
, op
) < 0)
2675 EmitPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
,
2678 JSParseNode
*pn2
, *pndot
, *pnup
, *pndown
;
2681 JS_ASSERT(pn
->pn_arity
== PN_NAME
);
2682 pn2
= pn
->maybeExpr();
2684 /* Special case deoptimization for __proto__. */
2685 if ((op
== JSOP_GETPROP
|| op
== JSOP_CALLPROP
) &&
2686 pn
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
2687 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
2689 return EmitSpecialPropOp(cx
, pn
, callContext
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
);
2693 JS_ASSERT(pn
->pn_type
== TOK_DOT
);
2694 JS_ASSERT(op
== JSOP_GETPROP
);
2696 } else if (op
== JSOP_GETPROP
&& pn
->pn_type
== TOK_DOT
) {
2697 if (pn2
->pn_op
== JSOP_THIS
) {
2698 if (pn
->pn_atom
!= cx
->runtime
->atomState
.lengthAtom
) {
2699 /* Fast path for gets of |this.foo|. */
2700 return EmitAtomOp(cx
, pn
, JSOP_GETTHISPROP
, cg
);
2702 } else if (pn2
->pn_type
== TOK_NAME
) {
2705 * - arguments.length into JSOP_ARGCNT
2706 * - argname.prop into JSOP_GETARGPROP
2707 * - localname.prop into JSOP_GETLOCALPROP
2708 * but don't do this if the property is 'length' -- prefer to emit
2709 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2711 if (!BindNameToSlot(cx
, cg
, pn2
))
2713 if (pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
2714 if (pn2
->pn_op
== JSOP_ARGUMENTS
)
2715 return js_Emit1(cx
, cg
, JSOP_ARGCNT
) >= 0;
2717 switch (pn2
->pn_op
) {
2719 op
= JSOP_GETARGPROP
;
2722 op
= JSOP_GETLOCALPROP
;
2724 JSAtomListElement
*ale
;
2727 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
2730 atomIndex
= ALE_INDEX(ale
);
2731 return EmitSlotIndexOp(cx
, op
, pn2
->pn_cookie
.asInteger(), atomIndex
, cg
);
2741 * If the object operand is also a dotted property reference, reverse the
2742 * list linked via pn_expr temporarily so we can iterate over it from the
2743 * bottom up (reversing again as we go), to avoid excessive recursion.
2745 if (pn2
->pn_type
== TOK_DOT
) {
2748 top
= CG_OFFSET(cg
);
2750 /* Reverse pndot->pn_expr to point up, not down. */
2751 pndot
->pn_offset
= top
;
2752 JS_ASSERT(!pndot
->pn_used
);
2753 pndown
= pndot
->pn_expr
;
2754 pndot
->pn_expr
= pnup
;
2755 if (pndown
->pn_type
!= TOK_DOT
)
2761 /* pndown is a primary expression, not a dotted property reference. */
2762 if (!js_EmitTree(cx
, cg
, pndown
))
2766 /* Walk back up the list, emitting annotated name ops. */
2767 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2768 CG_OFFSET(cg
) - pndown
->pn_offset
) < 0) {
2772 /* Special case deoptimization on __proto__, as above. */
2773 if (pndot
->pn_arity
== PN_NAME
&& pndot
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
2774 if (!EmitSpecialPropOp(cx
, pndot
, JSOP_GETELEM
, cg
))
2776 } else if (!EmitAtomOp(cx
, pndot
, PN_OP(pndot
), cg
)) {
2780 /* Reverse the pn_expr link again. */
2781 pnup
= pndot
->pn_expr
;
2782 pndot
->pn_expr
= pndown
;
2784 } while ((pndot
= pnup
) != NULL
);
2786 if (!js_EmitTree(cx
, cg
, pn2
))
2790 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2791 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2795 return EmitAtomOp(cx
, pn
, op
, cg
);
2799 EmitElemOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2802 JSParseNode
*left
, *right
, *next
, ltmp
, rtmp
;
2805 top
= CG_OFFSET(cg
);
2806 if (pn
->pn_arity
== PN_LIST
) {
2807 /* Left-associative operator chain to avoid too much recursion. */
2808 JS_ASSERT(pn
->pn_op
== JSOP_GETELEM
);
2809 JS_ASSERT(pn
->pn_count
>= 3);
2812 next
= left
->pn_next
;
2813 JS_ASSERT(next
!= right
);
2816 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2817 * one or more index expression and JSOP_GETELEM op pairs.
2819 if (left
->pn_type
== TOK_NAME
&& next
->pn_type
== TOK_NUMBER
) {
2820 if (!BindNameToSlot(cx
, cg
, left
))
2822 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2823 JSDOUBLE_IS_INT32(next
->pn_dval
, &slot
) &&
2824 (jsuint
)slot
< JS_BIT(16)) {
2826 * arguments[i]() requires arguments object as "this".
2827 * Check that we never generates list for that usage.
2829 JS_ASSERT(op
!= JSOP_CALLELEM
|| next
->pn_next
);
2830 left
->pn_offset
= next
->pn_offset
= top
;
2831 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2833 next
= left
->pn_next
;
2838 * Check whether we generated JSOP_ARGSUB, just above, and have only
2839 * one more index expression to emit. Given arguments[0][j], we must
2840 * skip the while loop altogether, falling through to emit code for j
2841 * (in the subtree referenced by right), followed by the annotated op,
2842 * at the bottom of this function.
2844 JS_ASSERT(next
!= right
|| pn
->pn_count
== 3);
2845 if (left
== pn
->pn_head
) {
2846 if (!js_EmitTree(cx
, cg
, left
))
2849 while (next
!= right
) {
2850 if (!js_EmitTree(cx
, cg
, next
))
2852 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2854 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
2856 next
= next
->pn_next
;
2859 if (pn
->pn_arity
== PN_NAME
) {
2861 * Set left and right so pn appears to be a TOK_LB node, instead
2862 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2863 * EmitDestructuringOps nearer below. In the destructuring case,
2864 * the base expression (pn_expr) of the name may be null, which
2865 * means we have to emit a JSOP_BINDNAME.
2867 left
= pn
->maybeExpr();
2870 left
->pn_type
= TOK_STRING
;
2871 left
->pn_op
= JSOP_BINDNAME
;
2872 left
->pn_arity
= PN_NULLARY
;
2873 left
->pn_pos
= pn
->pn_pos
;
2874 left
->pn_atom
= pn
->pn_atom
;
2877 right
->pn_type
= TOK_STRING
;
2878 right
->pn_op
= js_IsIdentifier(ATOM_TO_STRING(pn
->pn_atom
))
2881 right
->pn_arity
= PN_NULLARY
;
2882 right
->pn_pos
= pn
->pn_pos
;
2883 right
->pn_atom
= pn
->pn_atom
;
2885 JS_ASSERT(pn
->pn_arity
== PN_BINARY
);
2887 right
= pn
->pn_right
;
2890 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2891 if (op
== JSOP_GETELEM
&&
2892 left
->pn_type
== TOK_NAME
&&
2893 right
->pn_type
== TOK_NUMBER
) {
2894 if (!BindNameToSlot(cx
, cg
, left
))
2896 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2897 JSDOUBLE_IS_INT32(right
->pn_dval
, &slot
) &&
2898 (jsuint
)slot
< JS_BIT(16)) {
2899 left
->pn_offset
= right
->pn_offset
= top
;
2900 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2905 if (!js_EmitTree(cx
, cg
, left
))
2909 /* The right side of the descendant operator is implicitly quoted. */
2910 JS_ASSERT(op
!= JSOP_DESCENDANTS
|| right
->pn_type
!= TOK_STRING
||
2911 right
->pn_op
== JSOP_QNAMEPART
);
2912 if (!js_EmitTree(cx
, cg
, right
))
2914 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2916 return js_Emit1(cx
, cg
, op
) >= 0;
2920 EmitNumberOp(JSContext
*cx
, jsdouble dval
, JSCodeGenerator
*cg
)
2927 if (JSDOUBLE_IS_INT32(dval
, &ival
)) {
2929 return js_Emit1(cx
, cg
, JSOP_ZERO
) >= 0;
2931 return js_Emit1(cx
, cg
, JSOP_ONE
) >= 0;
2932 if ((jsint
)(int8
)ival
== ival
)
2933 return js_Emit2(cx
, cg
, JSOP_INT8
, (jsbytecode
)(int8
)ival
) >= 0;
2936 if (u
< JS_BIT(16)) {
2937 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2938 } else if (u
< JS_BIT(24)) {
2939 off
= js_EmitN(cx
, cg
, JSOP_UINT24
, 3);
2942 pc
= CG_CODE(cg
, off
);
2945 off
= js_EmitN(cx
, cg
, JSOP_INT32
, 4);
2948 pc
= CG_CODE(cg
, off
);
2949 SET_INT32(pc
, ival
);
2954 if (!cg
->constList
.append(DoubleValue(dval
)))
2957 return EmitIndexOp(cx
, JSOP_DOUBLE
, cg
->constList
.length() - 1, cg
);
2961 * To avoid bloating all parse nodes for the special case of switch, values are
2962 * allocated in the temp pool and pointed to by the parse node. These values
2963 * are not currently recycled (like parse nodes) and the temp pool is only
2964 * flushed at the end of compiling a script, so these values are technically
2965 * leaked. This would only be a problem for scripts containing a large number
2966 * of large switches, which seems unlikely.
2969 AllocateSwitchConstant(JSContext
*cx
)
2972 JS_ARENA_ALLOCATE_TYPE(pv
, Value
, &cx
->tempPool
);
2977 EmitSwitch(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2978 JSStmtInfo
*stmtInfo
)
2981 JSBool ok
, hasDefault
, constPropagated
;
2982 ptrdiff_t top
, off
, defaultOffset
;
2983 JSParseNode
*pn2
, *pn3
, *pn4
;
2984 uint32 caseCount
, tableLength
;
2985 JSParseNode
**table
;
2986 int32_t i
, low
, high
;
2987 JSAtomListElement
*ale
;
2989 size_t switchSize
, tableSize
;
2990 jsbytecode
*pc
, *savepc
;
2991 #if JS_HAS_BLOCK_SCOPE
2995 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2996 switchOp
= JSOP_TABLESWITCH
;
2998 hasDefault
= constPropagated
= JS_FALSE
;
3002 * If the switch contains let variables scoped by its body, model the
3003 * resulting block on the stack first, before emitting the discriminant's
3004 * bytecode (in case the discriminant contains a stack-model dependency
3005 * such as a let expression).
3008 #if JS_HAS_BLOCK_SCOPE
3009 if (pn2
->pn_type
== TOK_LEXICALSCOPE
) {
3011 * Push the body's block scope before discriminant code-gen for proper
3012 * static block scope linkage in case the discriminant contains a let
3013 * expression. The block's locals must lie under the discriminant on
3014 * the stack so that case-dispatch bytecodes can find the discriminant
3017 count
= OBJ_BLOCK_COUNT(cx
, pn2
->pn_objbox
->object
);
3018 js_PushBlockScope(cg
, stmtInfo
, pn2
->pn_objbox
->object
, -1);
3019 stmtInfo
->type
= STMT_SWITCH
;
3021 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3022 if (!EmitEnterBlock(cx
, pn2
, cg
))
3026 * Pop the switch's statement info around discriminant code-gen. Note
3027 * how this leaves cg->blockChain referencing the switch's
3028 * block scope object, which is necessary for correct block parenting
3029 * in the case where the discriminant contains a let expression.
3031 cg
->topStmt
= stmtInfo
->down
;
3032 cg
->topScopeStmt
= stmtInfo
->downScope
;
3042 * Emit code for the discriminant first (or nearly first, in the case of a
3043 * switch whose body is a block scope).
3045 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
3048 /* Switch bytecodes run from here till end of final case. */
3049 top
= CG_OFFSET(cg
);
3050 #if !JS_HAS_BLOCK_SCOPE
3051 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3053 if (pn2
->pn_type
== TOK_LC
) {
3054 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3056 /* Re-push the switch's statement info record. */
3057 cg
->topStmt
= cg
->topScopeStmt
= stmtInfo
;
3059 /* Set the statement info record's idea of top. */
3060 stmtInfo
->update
= top
;
3062 /* Advance pn2 to refer to the switch case list. */
3067 caseCount
= pn2
->pn_count
;
3071 if (caseCount
== 0 ||
3073 (hasDefault
= (pn2
->pn_head
->pn_type
== TOK_DEFAULT
)))) {
3078 #define INTMAP_LENGTH 256
3079 jsbitmap intmap_space
[INTMAP_LENGTH
];
3080 jsbitmap
*intmap
= NULL
;
3081 int32 intmap_bitlen
= 0;
3083 low
= JSVAL_INT_MAX
;
3084 high
= JSVAL_INT_MIN
;
3086 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3087 if (pn3
->pn_type
== TOK_DEFAULT
) {
3088 hasDefault
= JS_TRUE
;
3089 caseCount
--; /* one of the "cases" was the default */
3093 JS_ASSERT(pn3
->pn_type
== TOK_CASE
);
3094 if (switchOp
== JSOP_CONDSWITCH
)
3098 while (pn4
->pn_type
== TOK_RP
)
3102 switch (pn4
->pn_type
) {
3104 constVal
.setNumber(pn4
->pn_dval
);
3107 constVal
.setString(ATOM_TO_STRING(pn4
->pn_atom
));
3110 if (!pn4
->maybeExpr()) {
3111 ok
= LookupCompileTimeConstant(cx
, cg
, pn4
->pn_atom
, &constVal
);
3114 if (!constVal
.isMagic(JS_NO_CONSTANT
)) {
3115 if (constVal
.isObject()) {
3117 * XXX JSOP_LOOKUPSWITCH does not support const-
3118 * propagated object values, see bug 407186.
3120 switchOp
= JSOP_CONDSWITCH
;
3123 constPropagated
= JS_TRUE
;
3129 if (pn4
->pn_op
== JSOP_TRUE
) {
3130 constVal
.setBoolean(true);
3133 if (pn4
->pn_op
== JSOP_FALSE
) {
3134 constVal
.setBoolean(false);
3137 if (pn4
->pn_op
== JSOP_NULL
) {
3143 switchOp
= JSOP_CONDSWITCH
;
3146 JS_ASSERT(constVal
.isPrimitive());
3148 pn3
->pn_pval
= AllocateSwitchConstant(cx
);
3149 if (!pn3
->pn_pval
) {
3154 *pn3
->pn_pval
= constVal
;
3156 if (switchOp
!= JSOP_TABLESWITCH
)
3158 if (!pn3
->pn_pval
->isInt32()) {
3159 switchOp
= JSOP_LOOKUPSWITCH
;
3162 i
= pn3
->pn_pval
->toInt32();
3163 if ((jsuint
)(i
+ (jsint
)JS_BIT(15)) >= (jsuint
)JS_BIT(16)) {
3164 switchOp
= JSOP_LOOKUPSWITCH
;
3173 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3174 * We bias i by 65536 if it's negative, and hope that's a rare
3175 * case (because it requires a malloc'd bitmap).
3179 if (i
>= intmap_bitlen
) {
3181 i
< (INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
)) {
3182 intmap
= intmap_space
;
3183 intmap_bitlen
= INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
;
3185 /* Just grab 8K for the worst-case bitmap. */
3186 intmap_bitlen
= JS_BIT(16);
3187 intmap
= (jsbitmap
*)
3188 cx
->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2
)
3189 * sizeof(jsbitmap
));
3191 JS_ReportOutOfMemory(cx
);
3195 memset(intmap
, 0, intmap_bitlen
>> JS_BITS_PER_BYTE_LOG2
);
3197 if (JS_TEST_BIT(intmap
, i
)) {
3198 switchOp
= JSOP_LOOKUPSWITCH
;
3201 JS_SET_BIT(intmap
, i
);
3205 if (intmap
&& intmap
!= intmap_space
)
3211 * Compute table length and select lookup instead if overlarge or
3212 * more than half-sparse.
3214 if (switchOp
== JSOP_TABLESWITCH
) {
3215 tableLength
= (uint32
)(high
- low
+ 1);
3216 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
3217 switchOp
= JSOP_LOOKUPSWITCH
;
3218 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3220 * Lookup switch supports only atom indexes below 64K limit.
3221 * Conservatively estimate the maximum possible index during
3222 * switch generation and use conditional switch if it exceeds
3225 if (caseCount
+ cg
->constList
.length() > JS_BIT(16))
3226 switchOp
= JSOP_CONDSWITCH
;
3231 * Emit a note with two offsets: first tells total switch code length,
3232 * second tells offset to first JSOP_CASE if condswitch.
3234 noteIndex
= js_NewSrcNote3(cx
, cg
, SRC_SWITCH
, 0, 0);
3238 if (switchOp
== JSOP_CONDSWITCH
) {
3240 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3243 } else if (switchOp
== JSOP_TABLESWITCH
) {
3245 * 3 offsets (len, low, high) before the table, 1 per entry.
3247 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
3250 * JSOP_LOOKUPSWITCH:
3251 * 1 offset (len) and 1 atom index (npairs) before the table,
3252 * 1 atom index and 1 jump offset per entry.
3254 switchSize
= (size_t)(JUMP_OFFSET_LEN
+ INDEX_LEN
+
3255 (INDEX_LEN
+ JUMP_OFFSET_LEN
) * caseCount
);
3259 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3261 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3262 * to emit the immediate operand(s) by which bytecode readers such as
3263 * BuildSpanDepTable discover the length of the switch opcode *before*
3264 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3265 * also important to zero all unknown jump offset immediate operands,
3266 * so they can be converted to span dependencies with null targets to
3267 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3269 if (js_EmitN(cx
, cg
, switchOp
, switchSize
) < 0)
3273 if (switchOp
== JSOP_CONDSWITCH
) {
3274 intN caseNoteIndex
= -1;
3275 JSBool beforeCases
= JS_TRUE
;
3277 /* Emit code for evaluating cases and jumping to case statements. */
3278 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3280 if (pn4
&& !js_EmitTree(cx
, cg
, pn4
))
3282 if (caseNoteIndex
>= 0) {
3283 /* off is the previous JSOP_CASE's bytecode offset. */
3284 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3285 CG_OFFSET(cg
) - off
)) {
3290 JS_ASSERT(pn3
->pn_type
== TOK_DEFAULT
);
3293 caseNoteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
3294 if (caseNoteIndex
< 0)
3296 off
= EmitJump(cx
, cg
, JSOP_CASE
, 0);
3299 pn3
->pn_offset
= off
;
3301 uintN noteCount
, noteCountDelta
;
3303 /* Switch note's second offset is to first JSOP_CASE. */
3304 noteCount
= CG_NOTE_COUNT(cg
);
3305 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
3309 noteCountDelta
= CG_NOTE_COUNT(cg
) - noteCount
;
3310 if (noteCountDelta
!= 0)
3311 caseNoteIndex
+= noteCountDelta
;
3312 beforeCases
= JS_FALSE
;
3317 * If we didn't have an explicit default (which could fall in between
3318 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3319 * call in the loop above), link the last case to the implicit default
3320 * for the decompiler.
3323 caseNoteIndex
>= 0 &&
3324 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3325 CG_OFFSET(cg
) - off
)) {
3329 /* Emit default even if no explicit default statement. */
3330 defaultOffset
= EmitJump(cx
, cg
, JSOP_DEFAULT
, 0);
3331 if (defaultOffset
< 0)
3334 pc
= CG_CODE(cg
, top
+ JUMP_OFFSET_LEN
);
3336 if (switchOp
== JSOP_TABLESWITCH
) {
3337 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3338 SET_JUMP_OFFSET(pc
, low
);
3339 pc
+= JUMP_OFFSET_LEN
;
3340 SET_JUMP_OFFSET(pc
, high
);
3341 pc
+= JUMP_OFFSET_LEN
;
3344 * Use malloc to avoid arena bloat for programs with many switches.
3345 * We free table if non-null at label out, so all control flow must
3346 * exit this function through goto out or goto bad.
3348 if (tableLength
!= 0) {
3349 tableSize
= (size_t)tableLength
* sizeof *table
;
3350 table
= (JSParseNode
**) cx
->malloc(tableSize
);
3353 memset(table
, 0, tableSize
);
3354 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3355 if (pn3
->pn_type
== TOK_DEFAULT
)
3357 i
= pn3
->pn_pval
->toInt32();
3359 JS_ASSERT((uint32
)i
< tableLength
);
3364 JS_ASSERT(switchOp
== JSOP_LOOKUPSWITCH
);
3366 /* Fill in the number of cases. */
3367 SET_INDEX(pc
, caseCount
);
3372 * After this point, all control flow involving JSOP_TABLESWITCH
3373 * must set ok and goto out to exit this function. To keep things
3374 * simple, all switchOp cases exit that way.
3376 MUST_FLOW_THROUGH("out");
3379 * We have already generated at least one big jump so we must
3380 * explicitly add span dependencies for the switch jumps. When
3381 * called below, js_SetJumpOffset can only do it when patching
3382 * the first big jump or when cg->spanDeps is null.
3384 if (!AddSwitchSpanDeps(cx
, cg
, CG_CODE(cg
, top
)))
3388 if (constPropagated
) {
3390 * Skip switchOp, as we are not setting jump offsets in the two
3391 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3392 * unless there was an error.
3394 savepc
= CG_NEXT(cg
);
3395 CG_NEXT(cg
) = pc
+ 1;
3396 if (switchOp
== JSOP_TABLESWITCH
) {
3397 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3400 (pn4
= pn3
->pn_left
) != NULL
&&
3401 pn4
->pn_type
== TOK_NAME
) {
3402 /* Note a propagated constant with the const's name. */
3403 JS_ASSERT(!pn4
->maybeExpr());
3404 ale
= cg
->atomList
.add(cg
->parser
, pn4
->pn_atom
);
3408 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3409 ALE_INDEX(ale
)) < 0) {
3413 pc
+= JUMP_OFFSET_LEN
;
3416 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3418 if (pn4
&& pn4
->pn_type
== TOK_NAME
) {
3419 /* Note a propagated constant with the const's name. */
3420 JS_ASSERT(!pn4
->maybeExpr());
3421 ale
= cg
->atomList
.add(cg
->parser
, pn4
->pn_atom
);
3425 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3426 ALE_INDEX(ale
)) < 0) {
3430 pc
+= INDEX_LEN
+ JUMP_OFFSET_LEN
;
3433 CG_NEXT(cg
) = savepc
;
3437 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3438 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3439 if (switchOp
== JSOP_CONDSWITCH
&& pn3
->pn_type
!= TOK_DEFAULT
)
3440 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx
, cg
, pn3
->pn_offset
, goto bad
);
3441 pn4
= pn3
->pn_right
;
3442 ok
= js_EmitTree(cx
, cg
, pn4
);
3445 pn3
->pn_offset
= pn4
->pn_offset
;
3446 if (pn3
->pn_type
== TOK_DEFAULT
)
3447 off
= pn3
->pn_offset
- top
;
3451 /* If no default case, offset for default is to end of switch. */
3452 off
= CG_OFFSET(cg
) - top
;
3455 /* We better have set "off" by now. */
3456 JS_ASSERT(off
!= -1);
3458 /* Set the default offset (to end of switch if no default). */
3459 if (switchOp
== JSOP_CONDSWITCH
) {
3461 JS_ASSERT(defaultOffset
!= -1);
3462 ok
= js_SetJumpOffset(cx
, cg
, CG_CODE(cg
, defaultOffset
),
3463 off
- (defaultOffset
- top
));
3467 pc
= CG_CODE(cg
, top
);
3468 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3471 pc
+= JUMP_OFFSET_LEN
;
3474 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3475 off
= CG_OFFSET(cg
) - top
;
3476 ok
= js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, off
);
3480 if (switchOp
== JSOP_TABLESWITCH
) {
3481 /* Skip over the already-initialized switch bounds. */
3482 pc
+= 2 * JUMP_OFFSET_LEN
;
3484 /* Fill in the jump table, if there is one. */
3485 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3487 off
= pn3
? pn3
->pn_offset
- top
: 0;
3488 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3491 pc
+= JUMP_OFFSET_LEN
;
3493 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3494 /* Skip over the already-initialized number of cases. */
3497 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3498 if (pn3
->pn_type
== TOK_DEFAULT
)
3500 if (!cg
->constList
.append(*pn3
->pn_pval
))
3502 SET_INDEX(pc
, cg
->constList
.length() - 1);
3505 off
= pn3
->pn_offset
- top
;
3506 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3509 pc
+= JUMP_OFFSET_LEN
;
3517 ok
= js_PopStatementCG(cx
, cg
);
3519 #if JS_HAS_BLOCK_SCOPE
3520 if (ok
&& pn
->pn_right
->pn_type
== TOK_LEXICALSCOPE
)
3521 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
3532 js_EmitFunctionScript(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*body
)
3534 if (cg
->flags
& TCF_FUN_IS_GENERATOR
) {
3535 /* JSOP_GENERATOR must be the first instruction. */
3536 CG_SWITCH_TO_PROLOG(cg
);
3537 JS_ASSERT(CG_NEXT(cg
) == CG_BASE(cg
));
3538 if (js_Emit1(cx
, cg
, JSOP_GENERATOR
) < 0)
3540 CG_SWITCH_TO_MAIN(cg
);
3543 * Emit a trace hint opcode only if not in a generator, since generators
3544 * are not yet traced and both want to be the first instruction.
3546 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
3550 if (cg
->flags
& TCF_FUN_UNBRAND_THIS
) {
3551 if (js_Emit1(cx
, cg
, JSOP_UNBRANDTHIS
) < 0)
3555 return js_EmitTree(cx
, cg
, body
) &&
3556 js_Emit1(cx
, cg
, JSOP_STOP
) >= 0 &&
3557 js_NewScriptFromCG(cx
, cg
);
3560 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3561 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3563 uintN line_ = (line); \
3564 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3565 if (delta_ != 0) { \
3567 * Encode any change in the current source line number by using \
3568 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3569 * whichever consumes less space. \
3571 * NB: We handle backward line number deltas (possible with for \
3572 * loops where the update part is emitted after the body, but its \
3573 * line number is <= any line number in the body) here by letting \
3574 * unsigned delta_ wrap to a very large number, which triggers a \
3577 CG_CURRENT_LINE(cg) = line_; \
3578 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3579 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3583 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3585 } while (--delta_ != 0); \
3590 /* A function, so that we avoid macro-bloating all the other callsites. */
3592 UpdateLineNumberNotes(JSContext
*cx
, JSCodeGenerator
*cg
, uintN line
)
3594 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, line
);
3599 MaybeEmitVarDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3600 JSParseNode
*pn
, jsatomid
*result
)
3603 JSAtomListElement
*ale
;
3605 if (!pn
->pn_cookie
.isFree()) {
3606 atomIndex
= (jsatomid
) pn
->pn_cookie
.slot();
3608 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
3611 atomIndex
= ALE_INDEX(ale
);
3614 if (JOF_OPTYPE(pn
->pn_op
) == JOF_ATOM
&&
3615 (!cg
->inFunction() || (cg
->flags
& TCF_FUN_HEAVYWEIGHT
))) {
3616 CG_SWITCH_TO_PROLOG(cg
);
3617 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.begin
.lineno
))
3619 EMIT_INDEX_OP(prologOp
, atomIndex
);
3620 CG_SWITCH_TO_MAIN(cg
);
3624 *result
= atomIndex
;
3628 #if JS_HAS_DESTRUCTURING
3631 (*DestructuringDeclEmitter
)(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3635 EmitDestructuringDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3638 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
3639 if (!BindNameToSlot(cx
, cg
, pn
))
3642 JS_ASSERT(PN_OP(pn
) != JSOP_ARGUMENTS
&& PN_OP(pn
) != JSOP_CALLEE
);
3643 return MaybeEmitVarDecl(cx
, cg
, prologOp
, pn
, NULL
);
3647 EmitDestructuringDecls(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3650 JSParseNode
*pn2
, *pn3
;
3651 DestructuringDeclEmitter emitter
;
3653 if (pn
->pn_type
== TOK_RB
) {
3654 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3655 if (pn2
->pn_type
== TOK_COMMA
)
3657 emitter
= (pn2
->pn_type
== TOK_NAME
)
3658 ? EmitDestructuringDecl
3659 : EmitDestructuringDecls
;
3660 if (!emitter(cx
, cg
, prologOp
, pn2
))
3664 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3665 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3666 pn3
= pn2
->pn_right
;
3667 emitter
= (pn3
->pn_type
== TOK_NAME
)
3668 ? EmitDestructuringDecl
3669 : EmitDestructuringDecls
;
3670 if (!emitter(cx
, cg
, prologOp
, pn3
))
3678 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
);
3681 EmitDestructuringLHS(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3684 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3685 * destructuring initialiser-form, call ourselves to handle it, then
3686 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3687 * ending with a JSOP_ENUMELEM or equivalent op.
3689 if (pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
) {
3690 if (!EmitDestructuringOpsHelper(cx
, cg
, pn
))
3692 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3695 if (pn
->pn_type
== TOK_NAME
) {
3696 if (!BindNameToSlot(cx
, cg
, pn
))
3698 if (pn
->isConst() && !pn
->isInitialized())
3699 return js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3702 switch (pn
->pn_op
) {
3705 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3706 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3707 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3709 if (!EmitElemOp(cx
, pn
, JSOP_ENUMELEM
, cg
))
3714 if (!EmitElemOp(cx
, pn
, JSOP_ENUMCONSTELEM
, cg
))
3720 jsuint slot
= pn
->pn_cookie
.asInteger();
3721 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, slot
);
3728 jsuint slot
= pn
->pn_cookie
.asInteger();
3729 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
3730 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3739 top
= CG_OFFSET(cg
);
3740 if (!js_EmitTree(cx
, cg
, pn
))
3742 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
3744 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
3758 * Recursive helper for EmitDestructuringOps.
3760 * Given a value to destructure on the stack, walk over an object or array
3761 * initialiser at pn, emitting bytecodes to match property values and store
3762 * them in the lvalues identified by the matched property names.
3765 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3768 JSParseNode
*pn2
, *pn3
;
3772 intN stackDepth
= cg
->stackDepth
;
3773 JS_ASSERT(stackDepth
!= 0);
3774 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
3775 JS_ASSERT(pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
);
3778 if (pn
->pn_count
== 0) {
3779 /* Emit a DUP;POP sequence for the decompiler. */
3780 return js_Emit1(cx
, cg
, JSOP_DUP
) >= 0 &&
3781 js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3785 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3787 * Duplicate the value being destructured to use as a reference base.
3788 * If dup is not the first one, annotate it for the decompiler.
3790 if (pn2
!= pn
->pn_head
&& js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
3792 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
3796 * Now push the property name currently being matched, which is either
3797 * the array initialiser's current index, or the current property name
3798 * "label" on the left of a colon in the object initialiser. Set pn3
3799 * to the lvalue node, which is in the value-initializing position.
3802 if (pn
->pn_type
== TOK_RB
) {
3803 if (!EmitNumberOp(cx
, index
, cg
))
3807 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3808 JS_ASSERT(pn2
->pn_type
== TOK_COLON
);
3810 if (pn3
->pn_type
== TOK_NUMBER
) {
3812 * If we are emitting an object destructuring initialiser,
3813 * annotate the index op with SRC_INITPROP so we know we are
3814 * not decompiling an array initialiser.
3816 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
3818 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
3821 JS_ASSERT(pn3
->pn_type
== TOK_STRING
||
3822 pn3
->pn_type
== TOK_NAME
);
3823 if (!EmitAtomOp(cx
, pn3
, JSOP_GETPROP
, cg
))
3825 doElemOp
= JS_FALSE
;
3827 pn3
= pn2
->pn_right
;
3832 * Ok, get the value of the matching property name. This leaves
3833 * that value on top of the value being destructured, so the stack
3834 * is one deeper than when we started.
3836 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
3838 JS_ASSERT(cg
->stackDepth
== stackDepth
+ 1);
3841 /* Nullary comma node makes a hole in the array destructurer. */
3842 if (pn3
->pn_type
== TOK_COMMA
&& pn3
->pn_arity
== PN_NULLARY
) {
3843 JS_ASSERT(pn
->pn_type
== TOK_RB
);
3844 JS_ASSERT(pn2
== pn3
);
3845 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3848 if (!EmitDestructuringLHS(cx
, cg
, pn3
))
3852 JS_ASSERT(cg
->stackDepth
== stackDepth
);
3860 OpToDeclType(JSOp op
)
3864 return SRC_DECL_LET
;
3866 return SRC_DECL_CONST
;
3868 return SRC_DECL_VAR
;
3870 return SRC_DECL_NONE
;
3875 EmitDestructuringOps(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3879 * If we're called from a variable declaration, help the decompiler by
3880 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3881 * If the destructuring initialiser is empty, our helper will emit a
3882 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3884 if (js_NewSrcNote2(cx
, cg
, SRC_DESTRUCT
, OpToDeclType(prologOp
)) < 0)
3888 * Call our recursive helper to emit the destructuring assignments and
3889 * related stack manipulations.
3891 return EmitDestructuringOpsHelper(cx
, cg
, pn
);
3895 EmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3896 JSParseNode
*lhs
, JSParseNode
*rhs
)
3898 jsuint depth
, limit
, i
, nslots
;
3901 depth
= limit
= (uintN
) cg
->stackDepth
;
3902 for (pn
= rhs
->pn_head
; pn
; pn
= pn
->pn_next
) {
3903 if (limit
== JS_BIT(16)) {
3904 ReportCompileErrorNumber(cx
, CG_TS(cg
), rhs
, JSREPORT_ERROR
, JSMSG_ARRAY_INIT_TOO_BIG
);
3908 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3909 JS_ASSERT(!(pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
));
3910 if (!js_EmitTree(cx
, cg
, pn
))
3915 if (js_NewSrcNote2(cx
, cg
, SRC_GROUPASSIGN
, OpToDeclType(prologOp
)) < 0)
3919 for (pn
= lhs
->pn_head
; pn
; pn
= pn
->pn_next
, ++i
) {
3920 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3921 JS_ASSERT(i
< limit
);
3922 jsint slot
= AdjustBlockSlot(cx
, cg
, i
);
3925 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL
, slot
);
3927 if (pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
) {
3928 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3931 if (!EmitDestructuringLHS(cx
, cg
, pn
))
3936 nslots
= limit
- depth
;
3937 EMIT_UINT16_IMM_OP(JSOP_POPN
, nslots
);
3938 cg
->stackDepth
= (uintN
) depth
;
3943 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3944 * can emit a group assignment sequence, which results in 0 stack depth delta,
3945 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3948 MaybeEmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3949 JSParseNode
*pn
, JSOp
*pop
)
3951 JSParseNode
*lhs
, *rhs
;
3953 JS_ASSERT(pn
->pn_type
== TOK_ASSIGN
);
3954 JS_ASSERT(*pop
== JSOP_POP
|| *pop
== JSOP_POPV
);
3957 if (lhs
->pn_type
== TOK_RB
&& rhs
->pn_type
== TOK_RB
&&
3958 !(rhs
->pn_xflags
& PNX_HOLEY
) &&
3959 lhs
->pn_count
<= rhs
->pn_count
) {
3960 if (!EmitGroupAssignment(cx
, cg
, prologOp
, lhs
, rhs
))
3967 #endif /* JS_HAS_DESTRUCTURING */
3970 EmitVariables(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
3971 JSBool inLetHead
, ptrdiff_t *headNoteIndex
)
3973 bool let
, forInVar
, first
;
3974 #if JS_HAS_BLOCK_SCOPE
3975 bool forInLet
, popScope
;
3976 JSStmtInfo
*stmt
, *scopeStmt
;
3978 ptrdiff_t off
, noteIndex
, tmp
;
3979 JSParseNode
*pn2
, *pn3
, *next
;
3984 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3985 *headNoteIndex
= -1;
3988 * Let blocks and expressions have a parenthesized head in which the new
3989 * scope is not yet open. Initializer evaluation uses the parent node's
3990 * lexical scope. If popScope is true below, then we hide the top lexical
3991 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3992 * it won't find any names in the new let block.
3994 * The same goes for let declarations in the head of any kind of for loop.
3995 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3996 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3997 * in the containing scope, and puts x in the loop body's scope.
3999 let
= (pn
->pn_op
== JSOP_NOP
);
4000 forInVar
= (pn
->pn_xflags
& PNX_FORINVAR
) != 0;
4001 #if JS_HAS_BLOCK_SCOPE
4002 forInLet
= let
&& forInVar
;
4003 popScope
= (inLetHead
|| (let
&& (cg
->flags
& TCF_IN_FOR_INIT
)));
4006 scopeStmt
= cg
->topScopeStmt
;
4009 else stmt
= scopeStmt
= NULL
; /* quell GCC overwarning */
4011 JS_ASSERT(!popScope
|| let
);
4014 off
= noteIndex
= -1;
4015 for (pn2
= pn
->pn_head
; ; pn2
= next
) {
4016 first
= pn2
== pn
->pn_head
;
4017 next
= pn2
->pn_next
;
4019 if (pn2
->pn_type
!= TOK_NAME
) {
4020 #if JS_HAS_DESTRUCTURING
4021 if (pn2
->pn_type
== TOK_RB
|| pn2
->pn_type
== TOK_RC
) {
4023 * Emit variable binding ops, but not destructuring ops.
4024 * The parser (see Variables, jsparse.c) has ensured that
4025 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4026 * and that case will emit the destructuring code only after
4027 * emitting an enumerating opcode and a branch that tests
4028 * whether the enumeration ended.
4030 JS_ASSERT(forInVar
);
4031 JS_ASSERT(pn
->pn_count
== 1);
4032 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn2
))
4039 * A destructuring initialiser assignment preceded by var will
4040 * never occur to the left of 'in' in a for-in loop. As with 'for
4041 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4042 * i' to be hoisted out of the loop.
4044 JS_ASSERT(pn2
->pn_type
== TOK_ASSIGN
);
4045 JS_ASSERT(!forInVar
);
4048 * To allow the front end to rewrite var f = x; as f = x; when a
4049 * function f(){} precedes the var, detect simple name assignment
4050 * here and initialize the name.
4052 #if !JS_HAS_DESTRUCTURING
4053 JS_ASSERT(pn2
->pn_left
->pn_type
== TOK_NAME
);
4055 if (pn2
->pn_left
->pn_type
== TOK_NAME
)
4058 pn3
= pn2
->pn_right
;
4063 #if JS_HAS_DESTRUCTURING
4064 if (pn
->pn_count
== 1) {
4066 * If this is the only destructuring assignment in the list,
4067 * try to optimize to a group assignment. If we're in a let
4068 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4069 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4071 JS_ASSERT(noteIndex
< 0 && !pn2
->pn_next
);
4073 if (!MaybeEmitGroupAssignment(cx
, cg
,
4074 inLetHead
? JSOP_POP
: PN_OP(pn
),
4078 if (op
== JSOP_NOP
) {
4079 pn
->pn_xflags
= (pn
->pn_xflags
& ~PNX_POPVAR
) | PNX_GROUPINIT
;
4085 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn3
))
4088 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4092 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4093 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4094 * we will emit at the bottom of this function.
4096 if (!EmitDestructuringOps(cx
, cg
,
4097 inLetHead
? JSOP_POP
: PN_OP(pn
),
4106 * Load initializer early to share code above that jumps to do_name.
4107 * NB: if this var redeclares an existing binding, then pn2 is linked
4108 * on its definition's use-chain and pn_expr has been overlayed with
4111 pn3
= pn2
->maybeExpr();
4114 if (!BindNameToSlot(cx
, cg
, pn2
))
4118 if (op
== JSOP_ARGUMENTS
) {
4119 /* JSOP_ARGUMENTS => no initializer */
4120 JS_ASSERT(!pn3
&& !let
);
4123 atomIndex
= 0; /* quell GCC overwarning */
4126 JS_ASSERT(op
!= JSOP_CALLEE
);
4127 JS_ASSERT(!pn2
->pn_cookie
.isFree() || !let
);
4128 if (!MaybeEmitVarDecl(cx
, cg
, PN_OP(pn
), pn2
, &atomIndex
))
4132 JS_ASSERT(!forInVar
);
4133 if (op
== JSOP_SETNAME
) {
4135 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
4137 if (pn
->pn_op
== JSOP_DEFCONST
&&
4138 !js_DefineCompileTimeConstant(cx
, cg
, pn2
->pn_atom
, pn3
)) {
4142 #if JS_HAS_BLOCK_SCOPE
4143 /* Evaluate expr in the outer lexical scope if requested. */
4145 cg
->topStmt
= stmt
->down
;
4146 cg
->topScopeStmt
= scopeStmt
->downScope
;
4150 oldflags
= cg
->flags
;
4151 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4152 if (!js_EmitTree(cx
, cg
, pn3
))
4154 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
4156 #if JS_HAS_BLOCK_SCOPE
4159 cg
->topScopeStmt
= scopeStmt
;
4166 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4167 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4168 * a TOK_SEQ node to make the two statements appear as one. Therefore
4169 * if this declaration is part of a for-in loop head, we do not need to
4170 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4171 * js_EmitTree, will annotate appropriately.
4173 JS_ASSERT_IF(pn2
->pn_defn
, pn3
== pn2
->pn_expr
);
4175 JS_ASSERT(pn
->pn_count
== 1);
4182 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4183 (pn
->pn_op
== JSOP_DEFCONST
)
4185 : (pn
->pn_op
== JSOP_DEFVAR
)
4187 : SRC_DECL_LET
) < 0) {
4190 if (op
== JSOP_ARGUMENTS
) {
4191 if (js_Emit1(cx
, cg
, op
) < 0)
4193 } else if (!pn2
->pn_cookie
.isFree()) {
4194 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4196 EMIT_INDEX_OP(op
, atomIndex
);
4199 #if JS_HAS_DESTRUCTURING
4202 tmp
= CG_OFFSET(cg
);
4203 if (noteIndex
>= 0) {
4204 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
4210 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
4211 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4215 /* If this is a let head, emit and return a srcnote on the pop. */
4217 *headNoteIndex
= js_NewSrcNote(cx
, cg
, SRC_DECL
);
4218 if (*headNoteIndex
< 0)
4220 if (!(pn
->pn_xflags
& PNX_POPVAR
))
4221 return js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4224 return !(pn
->pn_xflags
& PNX_POPVAR
) || js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
4227 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4229 GettableNoteForNextOp(JSCodeGenerator
*cg
)
4231 ptrdiff_t offset
, target
;
4232 jssrcnote
*sn
, *end
;
4235 target
= CG_OFFSET(cg
);
4236 for (sn
= CG_NOTES(cg
), end
= sn
+ CG_NOTE_COUNT(cg
); sn
< end
;
4238 if (offset
== target
&& SN_IS_GETTABLE(sn
))
4240 offset
+= SN_DELTA(sn
);
4246 /* Top-level named functions need a nop for decompilation. */
4248 EmitFunctionDefNop(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
4250 return js_NewSrcNote2(cx
, cg
, SRC_FUNCDEF
, (ptrdiff_t)index
) >= 0 &&
4251 js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4255 EmitNewInit(JSContext
*cx
, JSCodeGenerator
*cg
, JSProtoKey key
, JSParseNode
*pn
, int sharpnum
)
4257 if (js_Emit2(cx
, cg
, JSOP_NEWINIT
, (jsbytecode
) key
) < 0)
4259 #if JS_HAS_SHARP_VARS
4260 if (cg
->hasSharps()) {
4261 if (pn
->pn_count
!= 0)
4262 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4264 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, sharpnum
);
4266 JS_ASSERT(sharpnum
< 0);
4273 EmitEndInit(JSContext
*cx
, JSCodeGenerator
*cg
, uint32 count
)
4275 #if JS_HAS_SHARP_VARS
4276 /* Emit an op for sharp array cleanup and decompilation. */
4277 if (cg
->hasSharps() && count
!= 0)
4278 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4280 return js_Emit1(cx
, cg
, JSOP_ENDINIT
) >= 0;
4283 /* See the SRC_FOR source note offsetBias comments later in this file. */
4284 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
4285 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4288 js_EmitTree(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
4290 JSBool ok
, useful
, wantval
;
4291 JSStmtInfo
*stmt
, stmtInfo
;
4292 ptrdiff_t top
, off
, tmp
, beq
, jmp
;
4293 JSParseNode
*pn2
, *pn3
;
4295 JSAtomListElement
*ale
;
4298 ptrdiff_t noteIndex
;
4299 JSSrcNoteType noteType
;
4304 #if JS_HAS_SHARP_VARS
4308 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
4312 pn
->pn_offset
= top
= CG_OFFSET(cg
);
4314 /* Emit notes to tell the current bytecode's source line number. */
4315 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
->pn_pos
.begin
.lineno
);
4317 switch (pn
->pn_type
) {
4323 #if JS_HAS_XML_SUPPORT
4324 if (pn
->pn_arity
== PN_NULLARY
) {
4325 if (js_Emit1(cx
, cg
, JSOP_GETFUNNS
) < 0)
4331 fun
= (JSFunction
*) pn
->pn_funbox
->object
;
4332 JS_ASSERT(FUN_INTERPRETED(fun
));
4333 if (fun
->u
.i
.script
) {
4335 * This second pass is needed to emit JSOP_NOP with a source note
4336 * for the already-emitted function definition prolog opcode. See
4337 * comments in the TOK_LC case.
4339 JS_ASSERT(pn
->pn_op
== JSOP_NOP
);
4340 JS_ASSERT(cg
->inFunction());
4341 if (!EmitFunctionDefNop(cx
, cg
, pn
->pn_index
))
4346 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
4348 (!pn
->pn_used
&& !pn
->isTopLevel()) ||
4349 (fun
->flags
& JSFUN_LAMBDA
));
4351 JS_ASSERT_IF(pn
->pn_funbox
->tcflags
& TCF_FUN_HEAVYWEIGHT
,
4352 FUN_KIND(fun
) == JSFUN_INTERPRETED
);
4354 /* Generate code for the function's body. */
4355 void *cg2mark
= JS_ARENA_MARK(cg
->codePool
);
4357 JS_ARENA_ALLOCATE_TYPE(cg2space
, JSCodeGenerator
, cg
->codePool
);
4359 js_ReportOutOfScriptQuota(cx
);
4362 JSCodeGenerator
*cg2
=
4363 new (cg2space
) JSCodeGenerator(cg
->parser
,
4364 cg
->codePool
, cg
->notePool
,
4365 pn
->pn_pos
.begin
.lineno
);
4370 cg2
->flags
= pn
->pn_funbox
->tcflags
| TCF_IN_FUNCTION
;
4371 #if JS_HAS_SHARP_VARS
4372 if (cg2
->flags
& TCF_HAS_SHARPS
) {
4373 cg2
->sharpSlotBase
= fun
->sharpSlotBase(cx
);
4374 if (cg2
->sharpSlotBase
< 0)
4379 cg2
->funbox
= pn
->pn_funbox
;
4383 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4384 * bits and to reserve the all-ones value, thereby reserving the magic
4385 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4387 JS_ASSERT(cg
->staticLevel
< JS_BITMASK(16) - 1);
4388 cg2
->staticLevel
= cg
->staticLevel
+ 1;
4390 /* We measured the max scope depth when we parsed the function. */
4391 JS_SCOPE_DEPTH_METERING(cg2
->maxScopeDepth
= uint16(-1));
4392 if (!js_EmitFunctionScript(cx
, cg2
, pn
->pn_body
))
4395 cg2
->~JSCodeGenerator();
4396 JS_ARENA_RELEASE(cg
->codePool
, cg2mark
);
4401 /* Make the function object a literal in the outer script's pool. */
4402 index
= cg
->objectList
.index(pn
->pn_funbox
);
4404 /* Emit a bytecode pointing to the closure object in its immediate. */
4406 if (op
!= JSOP_NOP
) {
4407 if ((pn
->pn_funbox
->tcflags
& TCF_GENEXP_LAMBDA
) &&
4408 js_NewSrcNote(cx
, cg
, SRC_GENEXP
) < 0) {
4411 EMIT_INDEX_OP(op
, index
);
4416 * For a script we emit the code as we parse. Thus the bytecode for
4417 * top-level functions should go in the prolog to predefine their
4418 * names in the variable object before the already-generated main code
4419 * is executed. This extra work for top-level scripts is not necessary
4420 * when we emit the code for a function. It is fully parsed prior to
4421 * invocation of the emitter and calls to js_EmitTree for function
4422 * definitions can be scheduled before generating the rest of code.
4424 if (!cg
->inFunction()) {
4425 JS_ASSERT(!cg
->topStmt
);
4426 CG_SWITCH_TO_PROLOG(cg
);
4427 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFFUN_FC
: JSOP_DEFFUN
;
4428 EMIT_INDEX_OP(op
, index
);
4429 CG_SWITCH_TO_MAIN(cg
);
4431 /* Emit NOP for the decompiler. */
4432 if (!EmitFunctionDefNop(cx
, cg
, index
))
4436 JSLocalKind localKind
=
4438 js_LookupLocal(cx
, cg
->fun
, fun
->atom
, &slot
);
4439 JS_ASSERT(localKind
== JSLOCAL_VAR
|| localKind
== JSLOCAL_CONST
);
4440 JS_ASSERT(index
< JS_BIT(20));
4441 pn
->pn_index
= index
;
4442 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFLOCALFUN_FC
: JSOP_DEFLOCALFUN
;
4443 if (!EmitSlotIndexOp(cx
, op
, slot
, index
, cg
))
4450 ok
= js_EmitTree(cx
, cg
, pn
->last());
4454 JS_ASSERT(cg
->lexdeps
.count
== 0);
4455 JS_ASSERT(pn
->pn_names
.count
!= 0);
4456 cg
->lexdeps
= pn
->pn_names
;
4457 ok
= js_EmitTree(cx
, cg
, pn
->pn_tree
);
4461 /* Initialize so we can detect else-if chains and avoid recursion. */
4462 stmtInfo
.type
= STMT_IF
;
4467 /* Emit code for the condition before pushing stmtInfo. */
4468 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4470 top
= CG_OFFSET(cg
);
4471 if (stmtInfo
.type
== STMT_IF
) {
4472 js_PushStatement(cg
, &stmtInfo
, STMT_IF
, top
);
4475 * We came here from the goto further below that detects else-if
4476 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4477 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4478 * to help the decompiler. Actually, we need two offsets, one for
4479 * decompiling any else clause and the second for decompiling an
4480 * else-if chain without bracing, overindenting, or incorrectly
4481 * scoping let declarations.
4483 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4484 stmtInfo
.type
= STMT_IF
;
4485 stmtInfo
.update
= top
;
4486 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4488 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 1, top
- beq
))
4492 /* Emit an annotated branch-if-false around the then part. */
4494 noteIndex
= js_NewSrcNote(cx
, cg
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4497 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4501 /* Emit code for the then and optional else parts. */
4502 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4505 /* Modify stmtInfo so we know we're in the else part. */
4506 stmtInfo
.type
= STMT_ELSE
;
4509 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4510 * around the else part. The js_PopStatementCG call at the bottom
4511 * of this switch case will fix up the backpatch chain linked from
4514 jmp
= EmitGoto(cx
, cg
, &stmtInfo
, &stmtInfo
.breaks
, NULL
, SRC_NULL
);
4518 /* Ensure the branch-if-false comes here, then emit the else. */
4519 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4520 if (pn3
->pn_type
== TOK_IF
) {
4525 if (!js_EmitTree(cx
, cg
, pn3
))
4529 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4530 * the decompiler's benefit. We can't just "back up" from the pc
4531 * of the else clause, because we don't know whether an extended
4532 * jump was required to leap from the end of the then clause over
4535 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4538 /* No else part, fixup the branch-if-false to come here. */
4539 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4541 ok
= js_PopStatementCG(cx
, cg
);
4545 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4546 ok
= EmitSwitch(cx
, cg
, pn
, &stmtInfo
);
4551 * Minimize bytecodes issued for one or more iterations by jumping to
4552 * the condition below the body and closing the loop if the condition
4553 * is true with a backward branch. For iteration count i:
4555 * i test at the top test at the bottom
4556 * = =============== ==================
4557 * 0 ifeq-pass goto; ifne-fail
4558 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4559 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4561 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4563 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4564 * test at the top. When JSParseNode trees were added during the ES3
4565 * work (1998-9), the code generation scheme was not optimized, and
4566 * the decompiler continued to take advantage of the branch and jump
4567 * that bracketed the body. But given the SRC_WHILE note, it is easy
4568 * to support the more efficient scheme.
4570 js_PushStatement(cg
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
4571 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4574 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4577 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4580 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4582 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4583 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4585 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4588 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, beq
- jmp
))
4590 ok
= js_PopStatementCG(cx
, cg
);
4594 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4595 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4596 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
4599 /* Compile the loop body. */
4600 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4603 js_PushStatement(cg
, &stmtInfo
, STMT_DO_LOOP
, top
);
4604 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4607 /* Set loop and enclosing label update offsets, for continue. */
4610 stmt
->update
= CG_OFFSET(cg
);
4611 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4613 /* Compile the loop condition, now that continues know where to go. */
4614 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4618 * Since we use JSOP_IFNE for other purposes as well as for do-while
4619 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4620 * and the decompiler must get that delta and decompile recursively.
4622 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4625 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, 1 + (beq
- top
)))
4627 ok
= js_PopStatementCG(cx
, cg
);
4631 beq
= 0; /* suppress gcc warnings */
4634 js_PushStatement(cg
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4636 if (pn2
->pn_type
== TOK_IN
) {
4637 /* Set stmtInfo type for later testing. */
4638 stmtInfo
.type
= STMT_FOR_IN_LOOP
;
4641 * If the left part is 'var x', emit code to define x if necessary
4642 * using a prolog opcode, but do not emit a pop. If the left part
4643 * is 'var x = i', emit prolog code to define x if necessary; then
4644 * emit code to evaluate i, assign the result to x, and pop the
4645 * result off the stack.
4647 * All the logic to do this is implemented in the outer switch's
4648 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4650 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4651 * called here will generate the proper note for the assignment
4652 * op that sets x = i, hoisting the initialized var declaration
4653 * out of the loop: 'var x = i; for (x in o) ...'.
4655 * In the 'for (var x in o) ...' case, nothing but the prolog op
4656 * (if needed) should be generated here, we must emit the note
4657 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4658 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4660 * A 'for (let x = i in o)' loop must not be hoisted, since in
4661 * this form the let variable is scoped by the loop body (but not
4662 * the head). The initializer expression i must be evaluated for
4663 * any side effects. So we hoist only i in the let case.
4666 type
= PN_TYPE(pn3
);
4667 cg
->flags
|= TCF_IN_FOR_INIT
;
4668 if (TokenKindIsDecl(type
) && !js_EmitTree(cx
, cg
, pn3
))
4670 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4672 /* Compile the object expression to the right of 'in'. */
4673 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4677 * Emit a bytecode to convert top of stack value to the iterator
4678 * object depending on the loop variant (for-in, for-each-in, or
4679 * destructuring for-in).
4681 JS_ASSERT(pn
->pn_op
== JSOP_ITER
);
4682 if (js_Emit2(cx
, cg
, JSOP_ITER
, (uint8
) pn
->pn_iflags
) < 0)
4685 /* Annotate so the decompiler can find the loop-closing jump. */
4686 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR_IN
);
4691 * Jump down to the loop condition to minimize overhead assuming at
4692 * least one iteration, as the other loop forms do.
4694 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4698 top
= CG_OFFSET(cg
);
4699 SET_STATEMENT_TOP(&stmtInfo
, top
);
4700 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4704 intN loopDepth
= cg
->stackDepth
;
4708 * Compile a JSOP_FOR* bytecode based on the left hand side.
4710 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4711 * or similar, to signify assignment, rather than declaration, to
4712 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4713 * parameter and emits the appropriate source note, defaulting to
4714 * assignment, so JSOP_SETNAME is not critical here; many similar
4715 * ops could be used -- just not JSOP_NOP (which means 'let').
4719 #if JS_HAS_BLOCK_SCOPE
4723 JS_ASSERT(pn3
->pn_arity
== PN_LIST
&& pn3
->pn_count
== 1);
4725 #if JS_HAS_DESTRUCTURING
4726 if (pn3
->pn_type
== TOK_ASSIGN
) {
4728 JS_ASSERT(pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
);
4730 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4731 op
= PN_OP(pn2
->pn_left
);
4732 goto destructuring_for
;
4735 JS_ASSERT(pn3
->pn_type
== TOK_NAME
);
4741 * Always annotate JSOP_FORLOCAL if given input of the form
4742 * 'for (let x in * o)' -- the decompiler must not hoist the
4743 * 'let x' out of the loop head, or x will be bound in the
4744 * wrong scope. Likewise, but in this case only for the sake
4745 * of higher decompilation fidelity only, do not hoist 'var x'
4746 * when given 'for (var x in o)'.
4749 #if JS_HAS_BLOCK_SCOPE
4752 (type
== TOK_VAR
&& !pn3
->maybeExpr())) &&
4753 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4756 : SRC_DECL_LET
) < 0) {
4759 if (!pn3
->pn_cookie
.isFree()) {
4762 case JSOP_GETARG
: /* FALL THROUGH */
4763 case JSOP_SETARG
: op
= JSOP_FORARG
; break;
4764 case JSOP_GETGVAR
: /* FALL THROUGH */
4765 case JSOP_SETGVAR
: op
= JSOP_FORNAME
; break;
4766 case JSOP_GETLOCAL
: /* FALL THROUGH */
4767 case JSOP_SETLOCAL
: op
= JSOP_FORLOCAL
; break;
4768 default: JS_ASSERT(0);
4771 pn3
->pn_op
= JSOP_FORNAME
;
4772 if (!BindNameToSlot(cx
, cg
, pn3
))
4776 if (pn3
->isConst()) {
4777 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn3
, JSREPORT_ERROR
,
4778 JSMSG_BAD_FOR_LEFTSIDE
);
4781 if (!pn3
->pn_cookie
.isFree()) {
4782 atomIndex
= (jsatomid
) pn3
->pn_cookie
.asInteger();
4783 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4785 if (!EmitAtomOp(cx
, pn3
, op
, cg
))
4792 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4793 * has no side effects.
4796 if (!CheckSideEffects(cx
, cg
, pn3
->expr(), &useful
))
4799 if (!EmitPropOp(cx
, pn3
, JSOP_FORPROP
, cg
, JS_FALSE
))
4805 #if JS_HAS_DESTRUCTURING
4809 if (js_Emit1(cx
, cg
, JSOP_FORELEM
) < 0)
4811 JS_ASSERT(cg
->stackDepth
>= 2);
4813 #if JS_HAS_DESTRUCTURING
4814 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4815 if (!EmitDestructuringOps(cx
, cg
, op
, pn3
))
4817 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4821 if (pn3
->pn_type
== TOK_LP
) {
4822 JS_ASSERT(pn3
->pn_op
== JSOP_SETCALL
);
4823 if (!js_EmitTree(cx
, cg
, pn3
))
4825 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4828 #if JS_HAS_XML_SUPPORT
4829 if (pn3
->pn_type
== TOK_UNARYOP
) {
4830 JS_ASSERT(pn3
->pn_op
== JSOP_BINDXMLNAME
);
4831 if (!js_EmitTree(cx
, cg
, pn3
))
4833 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4837 if (!EmitElemOp(cx
, pn3
, JSOP_ENUMELEM
, cg
))
4842 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4843 JS_ASSERT(cg
->stackDepth
== loopDepth
);
4845 /* Set the first srcnote offset so we can find the start of the loop body. */
4846 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, CG_OFFSET(cg
) - jmp
))
4849 /* Emit code for the loop body. */
4850 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4853 /* Set loop and enclosing "update" offsets, for continue. */
4856 stmt
->update
= CG_OFFSET(cg
);
4857 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4860 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4862 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4863 if (js_Emit1(cx
, cg
, JSOP_MOREITER
) < 0)
4865 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4869 /* Set the second srcnote offset so we can find the closing jump. */
4870 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1, beq
- jmp
))
4873 /* C-style for (init; cond; update) ... loop. */
4877 /* No initializer: emit an annotated nop for the decompiler. */
4880 cg
->flags
|= TCF_IN_FOR_INIT
;
4881 #if JS_HAS_DESTRUCTURING
4882 if (pn3
->pn_type
== TOK_ASSIGN
&&
4883 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4887 if (op
== JSOP_POP
) {
4888 if (!js_EmitTree(cx
, cg
, pn3
))
4890 if (TokenKindIsDecl(PN_TYPE(pn3
))) {
4892 * Check whether a destructuring-initialized var decl
4893 * was optimized to a group assignment. If so, we do
4894 * not need to emit a pop below, so switch to a nop,
4895 * just for the decompiler.
4897 JS_ASSERT(pn3
->pn_arity
== PN_LIST
);
4898 if (pn3
->pn_xflags
& PNX_GROUPINIT
)
4902 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4906 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4907 * Use tmp to hold the biased srcnote "top" offset, which differs
4908 * from the top local variable by the length of the JSOP_GOTO{,X}
4909 * emitted in between tmp and top if this loop has a condition.
4911 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR
);
4912 if (noteIndex
< 0 || js_Emit1(cx
, cg
, op
) < 0)
4914 tmp
= CG_OFFSET(cg
);
4917 /* Goto the loop condition, which branches back to iterate. */
4918 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4923 top
= CG_OFFSET(cg
);
4924 SET_STATEMENT_TOP(&stmtInfo
, top
);
4926 /* Emit code for the loop body. */
4927 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4929 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4932 /* Set the second note offset so we can find the update part. */
4933 JS_ASSERT(noteIndex
!= -1);
4934 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
4935 CG_OFFSET(cg
) - tmp
)) {
4939 /* Set loop and enclosing "update" offsets, for continue. */
4942 stmt
->update
= CG_OFFSET(cg
);
4943 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4945 /* Check for update code to do before the condition (if any). */
4949 #if JS_HAS_DESTRUCTURING
4950 if (pn3
->pn_type
== TOK_ASSIGN
&&
4951 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4955 if (op
== JSOP_POP
&& !js_EmitTree(cx
, cg
, pn3
))
4958 /* Always emit the POP or NOP, to help the decompiler. */
4959 if (js_Emit1(cx
, cg
, op
) < 0)
4962 /* Restore the absolute line number for source note readers. */
4963 off
= (ptrdiff_t) pn
->pn_pos
.end
.lineno
;
4964 if (CG_CURRENT_LINE(cg
) != (uintN
) off
) {
4965 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, off
) < 0)
4967 CG_CURRENT_LINE(cg
) = (uintN
) off
;
4971 /* Set the first note offset so we can find the loop condition. */
4972 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
4973 CG_OFFSET(cg
) - tmp
)) {
4978 /* Fix up the goto from top to target the loop condition. */
4979 JS_ASSERT(jmp
>= 0);
4980 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4982 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid2
))
4986 /* The third note offset helps us find the loop-closing jump. */
4987 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 2,
4988 CG_OFFSET(cg
) - tmp
)) {
4993 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4997 /* No loop condition -- emit the loop-closing jump. */
4998 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, top
- CG_OFFSET(cg
));
5004 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5005 if (!js_PopStatementCG(cx
, cg
))
5008 if (pn2
->pn_type
== TOK_IN
) {
5009 if (!NewTryNote(cx
, cg
, JSTRY_ITER
, cg
->stackDepth
, top
, CG_OFFSET(cg
)) ||
5010 js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0) {
5020 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5023 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
)
5025 noteType
= SRC_BREAK2LABEL
;
5028 while (!STMT_IS_LOOP(stmt
) && stmt
->type
!= STMT_SWITCH
)
5030 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_NULL
: SRC_BREAK
;
5033 if (EmitGoto(cx
, cg
, stmt
, &stmt
->breaks
, ale
, noteType
) < 0)
5041 /* Find the loop statement enclosed by the matching label. */
5042 JSStmtInfo
*loop
= NULL
;
5043 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5046 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
) {
5047 if (STMT_IS_LOOP(stmt
))
5052 noteType
= SRC_CONT2LABEL
;
5055 while (!STMT_IS_LOOP(stmt
))
5057 noteType
= SRC_CONTINUE
;
5060 if (EmitGoto(cx
, cg
, stmt
, &stmt
->continues
, ale
, noteType
) < 0)
5065 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5067 js_PushStatement(cg
, &stmtInfo
, STMT_WITH
, CG_OFFSET(cg
));
5068 if (js_Emit1(cx
, cg
, JSOP_ENTERWITH
) < 0)
5070 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5072 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
5074 ok
= js_PopStatementCG(cx
, cg
);
5079 ptrdiff_t tryStart
, tryEnd
, catchJump
, finallyStart
;
5081 JSParseNode
*lastCatch
;
5086 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5089 * When a finally block is active (STMT_FINALLY in our tree context),
5090 * non-local jumps (including jumps-over-catches) result in a GOSUB
5091 * being written into the bytecode stream and fixed-up later (c.f.
5092 * EmitBackPatchOp and BackPatch).
5094 js_PushStatement(cg
, &stmtInfo
,
5095 pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
,
5099 * Since an exception can be thrown at any place inside the try block,
5100 * we need to restore the stack and the scope chain before we transfer
5101 * the control to the exception handler.
5103 * For that we store in a try note associated with the catch or
5104 * finally block the stack depth upon the try entry. The interpreter
5105 * uses this depth to properly unwind the stack and the scope chain.
5107 depth
= cg
->stackDepth
;
5109 /* Mark try location for decompilation, then emit try block. */
5110 if (js_Emit1(cx
, cg
, JSOP_TRY
) < 0)
5112 tryStart
= CG_OFFSET(cg
);
5113 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5115 JS_ASSERT(depth
== cg
->stackDepth
);
5117 /* GOSUB to finally, if present. */
5119 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5121 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(stmtInfo
));
5126 /* Emit (hidden) jump over catch and/or finally. */
5127 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5129 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5133 tryEnd
= CG_OFFSET(cg
);
5135 /* If this try has a catch block, emit it. */
5139 jsint count
= 0; /* previous catch block's population */
5142 * The emitted code for a catch block looks like:
5144 * [throwing] only if 2nd+ catch block
5145 * [leaveblock] only if 2nd+ catch block
5146 * enterblock with SRC_CATCH
5148 * [dup] only if catchguard
5149 * setlocalpop <slot> or destructuring code
5150 * [< catchguard code >] if there's a catchguard
5151 * [ifeq <offset to next catch block>] " "
5152 * [pop] only if catchguard
5153 * < catch block contents >
5155 * goto <end of catch blocks> non-local; finally applies
5157 * If there's no catch block without a catchguard, the last
5158 * <offset to next catch block> points to rethrow code. This
5159 * code will [gosub] to the finally code if appropriate, and is
5160 * also used for the catch-all trynote for capturing exceptions
5161 * thrown from catch{} blocks.
5163 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
5164 ptrdiff_t guardJump
, catchNote
;
5166 JS_ASSERT(cg
->stackDepth
== depth
);
5167 guardJump
= GUARDJUMP(stmtInfo
);
5168 if (guardJump
!= -1) {
5169 /* Fix up and clean up previous catch block. */
5170 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, guardJump
);
5173 * Account for JSOP_ENTERBLOCK (whose block object count
5174 * is saved below) and pushed exception object that we
5175 * still have after the jumping from the previous guard.
5177 cg
->stackDepth
= depth
+ count
+ 1;
5180 * Move exception back to cx->exception to prepare for
5181 * the next catch. We hide [throwing] from the decompiler
5182 * since it compensates for the hidden JSOP_DUP at the
5183 * start of the previous guarded catch.
5185 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5186 js_Emit1(cx
, cg
, JSOP_THROWING
) < 0) {
5189 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5191 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
5192 JS_ASSERT(cg
->stackDepth
== depth
);
5196 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5197 * by the call to js_EmitTree immediately below. Save this
5198 * source note's index in stmtInfo for use by the TOK_CATCH:
5199 * case, where the length of the catch guard is set as the
5202 catchNote
= js_NewSrcNote2(cx
, cg
, SRC_CATCH
, 0);
5205 CATCHNOTE(stmtInfo
) = catchNote
;
5208 * Emit the lexical scope and catch body. Save the catch's
5209 * block object population via count, for use when targeting
5210 * guardJump at the next catch (the guard mismatch case).
5212 JS_ASSERT(pn3
->pn_type
== TOK_LEXICALSCOPE
);
5213 count
= OBJ_BLOCK_COUNT(cx
, pn3
->pn_objbox
->object
);
5214 if (!js_EmitTree(cx
, cg
, pn3
))
5217 /* gosub <finally>, if required */
5219 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
,
5223 JS_ASSERT(cg
->stackDepth
== depth
);
5227 * Jump over the remaining catch blocks. This will get fixed
5228 * up to jump to after catch/finally.
5230 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5232 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5237 * Save a pointer to the last catch node to handle try-finally
5238 * and try-catch(guard)-finally special cases.
5240 lastCatch
= pn3
->expr();
5245 * Last catch guard jumps to the rethrow code sequence if none of the
5246 * guards match. Target guardJump at the beginning of the rethrow
5247 * sequence, just in case a guard expression throws and leaves the
5250 if (lastCatch
&& lastCatch
->pn_kid2
) {
5251 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, GUARDJUMP(stmtInfo
));
5253 /* Sync the stack to take into account pushed exception. */
5254 JS_ASSERT(cg
->stackDepth
== depth
);
5255 cg
->stackDepth
= depth
+ 1;
5258 * Rethrow the exception, delegating executing of finally if any
5259 * to the exception handler.
5261 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5262 js_Emit1(cx
, cg
, JSOP_THROW
) < 0) {
5267 JS_ASSERT(cg
->stackDepth
== depth
);
5269 /* Emit finally handler if any. */
5270 finallyStart
= 0; /* to quell GCC uninitialized warnings */
5273 * Fix up the gosubs that might have been emitted before non-local
5274 * jumps to the finally code.
5276 if (!BackPatch(cx
, cg
, GOSUBS(stmtInfo
), CG_NEXT(cg
), JSOP_GOSUB
))
5279 finallyStart
= CG_OFFSET(cg
);
5281 /* Indicate that we're emitting a subroutine body. */
5282 stmtInfo
.type
= STMT_SUBROUTINE
;
5283 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_kid3
->pn_pos
.begin
.lineno
))
5285 if (js_Emit1(cx
, cg
, JSOP_FINALLY
) < 0 ||
5286 !js_EmitTree(cx
, cg
, pn
->pn_kid3
) ||
5287 js_Emit1(cx
, cg
, JSOP_RETSUB
) < 0) {
5290 JS_ASSERT(cg
->stackDepth
== depth
);
5292 if (!js_PopStatementCG(cx
, cg
))
5295 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5296 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5300 /* Fix up the end-of-try/catch jumps to come here. */
5301 if (!BackPatch(cx
, cg
, catchJump
, CG_NEXT(cg
), JSOP_GOTO
))
5305 * Add the try note last, to let post-order give us the right ordering
5306 * (first to last for a given nesting level, inner to outer by level).
5309 !NewTryNote(cx
, cg
, JSTRY_CATCH
, depth
, tryStart
, tryEnd
)) {
5314 * If we've got a finally, mark try+catch region with additional
5315 * trynote to catch exceptions (re)thrown from a catch block or
5316 * for the try{}finally{} case.
5319 !NewTryNote(cx
, cg
, JSTRY_FINALLY
, depth
, tryStart
, finallyStart
)) {
5327 ptrdiff_t catchStart
, guardJump
;
5331 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5332 * and save the block object atom.
5335 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& (stmt
->flags
& SIF_SCOPE
));
5336 stmt
->type
= STMT_CATCH
;
5337 catchStart
= stmt
->update
;
5338 blockObj
= stmt
->blockObj
;
5340 /* Go up one statement info record to the TRY or FINALLY record. */
5342 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
5344 /* Pick up the pending exception and bind it to the catch variable. */
5345 if (js_Emit1(cx
, cg
, JSOP_EXCEPTION
) < 0)
5349 * Dup the exception object if there is a guard for rethrowing to use
5350 * it later when rethrowing or in other catches.
5352 if (pn
->pn_kid2
&& js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5356 switch (pn2
->pn_type
) {
5357 #if JS_HAS_DESTRUCTURING
5360 if (!EmitDestructuringOps(cx
, cg
, JSOP_NOP
, pn2
))
5362 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5368 /* Inline and specialize BindNameToSlot for pn2. */
5369 JS_ASSERT(!pn2
->pn_cookie
.isFree());
5370 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, pn2
->pn_cookie
.asInteger());
5377 /* Emit the guard expression, if there is one. */
5379 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5381 if (!js_SetSrcNoteOffset(cx
, cg
, CATCHNOTE(*stmt
), 0,
5382 CG_OFFSET(cg
) - catchStart
)) {
5385 /* ifeq <next block> */
5386 guardJump
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5389 GUARDJUMP(*stmt
) = guardJump
;
5391 /* Pop duplicated exception object as we no longer need it. */
5392 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5396 /* Emit the catch body. */
5397 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5401 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5402 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5404 off
= cg
->stackDepth
;
5405 if (js_NewSrcNote2(cx
, cg
, SRC_CATCH
, off
) < 0)
5411 if (!EmitVariables(cx
, cg
, pn
, JS_FALSE
, ¬eIndex
))
5416 /* Push a return value */
5419 if (!js_EmitTree(cx
, cg
, pn2
))
5422 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5427 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5428 * blocks having finally clauses and to exit intermingled let blocks.
5429 * We can't simply transfer control flow to our caller in that case,
5430 * because we must gosub to those finally clauses from inner to outer,
5431 * with the correct stack pointer (i.e., after popping any with,
5432 * for/in, etc., slots nested inside the finally's try).
5434 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5435 * extra JSOP_RETRVAL after the fixups.
5437 top
= CG_OFFSET(cg
);
5438 if (js_Emit1(cx
, cg
, JSOP_RETURN
) < 0)
5440 if (!EmitNonLocalJumpFixup(cx
, cg
, NULL
))
5442 if (top
+ JSOP_RETURN_LENGTH
!= CG_OFFSET(cg
)) {
5443 CG_BASE(cg
)[top
] = JSOP_SETRVAL
;
5444 if (js_Emit1(cx
, cg
, JSOP_RETRVAL
) < 0)
5449 #if JS_HAS_GENERATORS
5451 if (!cg
->inFunction()) {
5452 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
5453 JSMSG_BAD_RETURN_OR_YIELD
,
5458 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5461 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5464 if (pn
->pn_hidden
&& js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5466 if (js_Emit1(cx
, cg
, JSOP_YIELD
) < 0)
5473 #if JS_HAS_XML_SUPPORT
5474 if (pn
->pn_arity
== PN_UNARY
) {
5475 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5477 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5483 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5486 tmp
= CG_OFFSET(cg
);
5487 if (pn
->pn_xflags
& PNX_NEEDBRACES
) {
5488 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5489 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
5493 js_PushStatement(cg
, &stmtInfo
, STMT_BLOCK
, top
);
5495 JSParseNode
*pnchild
= pn
->pn_head
;
5496 if (pn
->pn_xflags
& PNX_FUNCDEFS
) {
5498 * This block contains top-level function definitions. To ensure
5499 * that we emit the bytecode defining them before the rest of code
5500 * in the block we use a separate pass over functions. During the
5501 * main pass later the emitter will add JSOP_NOP with source notes
5502 * for the function to preserve the original functions position
5505 * Currently this is used only for functions, as compile-as-we go
5506 * mode for scripts does not allow separate emitter passes.
5508 JS_ASSERT(cg
->inFunction());
5509 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
5511 * Assign the destructuring arguments before defining any
5512 * functions, see bug 419662.
5514 JS_ASSERT(pnchild
->pn_type
== TOK_SEMI
);
5515 JS_ASSERT(pnchild
->pn_kid
->pn_type
== TOK_COMMA
);
5516 if (!js_EmitTree(cx
, cg
, pnchild
))
5518 pnchild
= pnchild
->pn_next
;
5521 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5522 if (pn2
->pn_type
== TOK_FUNCTION
) {
5523 if (pn2
->pn_op
== JSOP_NOP
) {
5524 if (!js_EmitTree(cx
, cg
, pn2
))
5528 * JSOP_DEFFUN in a top-level block with function
5529 * definitions appears, for example, when "if (true)"
5530 * is optimized away from "if (true) function x() {}".
5533 JS_ASSERT(pn2
->pn_op
== JSOP_DEFFUN
);
5538 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5539 if (!js_EmitTree(cx
, cg
, pn2
))
5543 if (noteIndex
>= 0 &&
5544 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5545 CG_OFFSET(cg
) - tmp
)) {
5549 ok
= js_PopStatementCG(cx
, cg
);
5554 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5555 js_PushStatement(cg
, &stmtInfo
, STMT_SEQ
, top
);
5556 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5557 if (!js_EmitTree(cx
, cg
, pn2
))
5560 ok
= js_PopStatementCG(cx
, cg
);
5567 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5568 * debugger, and eval frames may need the value of the ultimate
5569 * expression statement as the script's result, despite the fact
5570 * that it appears useless to the compiler.
5572 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5573 * calling JS_Compile* to suppress JSOP_POPV.
5575 useful
= wantval
= !(cg
->flags
& (TCF_IN_FUNCTION
| TCF_NO_SCRIPT_RVAL
));
5577 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5582 * Don't eliminate apparently useless expressions if they are
5583 * labeled expression statements. The tc->topStmt->update test
5584 * catches the case where we are nesting in js_EmitTree for a
5585 * labeled compound statement.
5589 cg
->topStmt
->type
!= STMT_LABEL
||
5590 cg
->topStmt
->update
< CG_OFFSET(cg
))) {
5591 CG_CURRENT_LINE(cg
) = pn2
->pn_pos
.begin
.lineno
;
5592 if (!ReportCompileErrorNumber(cx
, CG_TS(cg
), pn2
,
5593 JSREPORT_WARNING
| JSREPORT_STRICT
,
5594 JSMSG_USELESS_EXPR
)) {
5598 op
= wantval
? JSOP_POPV
: JSOP_POP
;
5599 #if JS_HAS_DESTRUCTURING
5601 pn2
->pn_type
== TOK_ASSIGN
&&
5602 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn2
, &op
)) {
5606 if (op
!= JSOP_NOP
) {
5608 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5609 * avoid null closure cloning. Do this only for assignment
5610 * statements that are not completion values wanted by a
5611 * script evaluator, to ensure that the joined function
5612 * can't escape directly.
5615 PN_TYPE(pn2
) == TOK_ASSIGN
&&
5616 PN_OP(pn2
) == JSOP_NOP
&&
5617 PN_OP(pn2
->pn_left
) == JSOP_SETPROP
&&
5618 PN_OP(pn2
->pn_right
) == JSOP_LAMBDA
&&
5619 pn2
->pn_right
->pn_funbox
->joinable()) {
5620 pn2
->pn_left
->pn_op
= JSOP_SETMETHOD
;
5622 if (!js_EmitTree(cx
, cg
, pn2
))
5624 if (js_Emit1(cx
, cg
, op
) < 0)
5632 /* Emit an annotated nop so we know to decompile a label. */
5634 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5638 noteType
= (pn2
->pn_type
== TOK_LC
||
5639 (pn2
->pn_type
== TOK_LEXICALSCOPE
&&
5640 pn2
->expr()->pn_type
== TOK_LC
))
5643 noteIndex
= js_NewSrcNote2(cx
, cg
, noteType
,
5644 (ptrdiff_t) ALE_INDEX(ale
));
5645 if (noteIndex
< 0 ||
5646 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5650 /* Emit code for the labeled statement. */
5651 js_PushStatement(cg
, &stmtInfo
, STMT_LABEL
, CG_OFFSET(cg
));
5652 stmtInfo
.label
= atom
;
5653 if (!js_EmitTree(cx
, cg
, pn2
))
5655 if (!js_PopStatementCG(cx
, cg
))
5658 /* If the statement was compound, emit a note for the end brace. */
5659 if (noteType
== SRC_LABELBRACE
) {
5660 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5661 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5669 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5670 * These notes help the decompiler bracket the bytecodes generated
5671 * from each sub-expression that follows a comma.
5673 off
= noteIndex
= -1;
5674 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
5675 if (!js_EmitTree(cx
, cg
, pn2
))
5677 tmp
= CG_OFFSET(cg
);
5678 if (noteIndex
>= 0) {
5679 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5685 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5686 if (noteIndex
< 0 ||
5687 js_Emit1(cx
, cg
, JSOP_POP
) < 0) {
5695 * Check left operand type and generate specialized code for it.
5696 * Specialize to avoid ECMA "reference type" values on the operand
5697 * stack, which impose pervasive runtime "GetValue" costs.
5700 atomIndex
= (jsatomid
) -1; /* quell GCC overwarning */
5701 switch (PN_TYPE(pn2
)) {
5703 if (!BindNameToSlot(cx
, cg
, pn2
))
5705 if (!pn2
->pn_cookie
.isFree()) {
5706 atomIndex
= (jsatomid
) pn2
->pn_cookie
.asInteger();
5708 ale
= cg
->atomList
.add(cg
->parser
, pn2
->pn_atom
);
5711 atomIndex
= ALE_INDEX(ale
);
5712 if (!pn2
->isConst())
5713 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
5717 if (!js_EmitTree(cx
, cg
, pn2
->expr()))
5719 ale
= cg
->atomList
.add(cg
->parser
, pn2
->pn_atom
);
5722 atomIndex
= ALE_INDEX(ale
);
5725 JS_ASSERT(pn2
->pn_arity
== PN_BINARY
);
5726 if (!js_EmitTree(cx
, cg
, pn2
->pn_left
))
5728 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
5731 #if JS_HAS_DESTRUCTURING
5737 if (!js_EmitTree(cx
, cg
, pn2
))
5740 #if JS_HAS_XML_SUPPORT
5742 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5743 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5745 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5754 if (op
!= JSOP_NOP
) {
5755 switch (pn2
->pn_type
) {
5757 if (pn2
->isConst()) {
5758 if (PN_OP(pn2
) == JSOP_CALLEE
) {
5759 if (js_Emit1(cx
, cg
, JSOP_CALLEE
) < 0)
5762 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5764 } else if (PN_OP(pn2
) == JSOP_SETNAME
) {
5765 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5767 EMIT_INDEX_OP(JSOP_GETXPROP
, atomIndex
);
5769 JS_ASSERT(PN_OP(pn2
) != JSOP_GETUPVAR
);
5770 EMIT_UINT16_IMM_OP((PN_OP(pn2
) == JSOP_SETGVAR
)
5772 : (PN_OP(pn2
) == JSOP_SETARG
)
5779 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5781 if (pn2
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
5782 if (js_Emit1(cx
, cg
, JSOP_LENGTH
) < 0)
5784 } else if (pn2
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
5785 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, atomIndex
, cg
))
5787 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5790 EMIT_INDEX_OP(JSOP_GETPROP
, atomIndex
);
5795 #if JS_HAS_XML_SUPPORT
5798 if (js_Emit1(cx
, cg
, JSOP_DUP2
) < 0)
5800 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5807 /* Now emit the right operand (it may affect the namespace). */
5808 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5811 /* If += etc., emit the binary operator with a decompiler note. */
5812 if (op
!= JSOP_NOP
) {
5814 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5815 * declared in the current compilation unit, as in this case (just
5816 * a bit further below) we will avoid emitting the assignment op.
5818 if (pn2
->pn_type
!= TOK_NAME
|| !pn2
->isConst()) {
5819 if (js_NewSrcNote(cx
, cg
, SRC_ASSIGNOP
) < 0)
5822 if (js_Emit1(cx
, cg
, op
) < 0)
5826 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5827 if (pn2
->pn_type
!= TOK_NAME
&&
5828 #if JS_HAS_DESTRUCTURING
5829 pn2
->pn_type
!= TOK_RB
&&
5830 pn2
->pn_type
!= TOK_RC
&&
5832 js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0) {
5836 /* Finally, emit the specialized assignment bytecode. */
5837 switch (pn2
->pn_type
) {
5843 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5847 if (js_Emit1(cx
, cg
, JSOP_SETELEM
) < 0)
5850 #if JS_HAS_DESTRUCTURING
5853 if (!EmitDestructuringOps(cx
, cg
, JSOP_SETNAME
, pn2
))
5857 #if JS_HAS_XML_SUPPORT
5859 if (js_Emit1(cx
, cg
, JSOP_SETXMLNAME
) < 0)
5869 /* Emit the condition, then branch if false to the else part. */
5870 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5872 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_COND
);
5875 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5876 if (beq
< 0 || !js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5879 /* Jump around else, fixup the branch, emit else, fixup jump. */
5880 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
5883 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
5886 * Because each branch pushes a single value, but our stack budgeting
5887 * analysis ignores branches, we now have to adjust cg->stackDepth to
5888 * ignore the value pushed by the first branch. Execution will follow
5889 * only one path, so we must decrement cg->stackDepth.
5891 * Failing to do this will foil code, such as the try/catch/finally
5892 * exception handling code generator, that samples cg->stackDepth for
5893 * use at runtime (JSOP_SETSP), or in let expression and block code
5894 * generation, which must use the stack depth to compute local stack
5895 * indexes correctly.
5897 JS_ASSERT(cg
->stackDepth
> 0);
5899 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5901 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5902 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
5909 * JSOP_OR converts the operand on the stack to boolean, and if true,
5910 * leaves the original operand value on the stack and jumps; otherwise
5911 * it pops and falls into the next bytecode, which evaluates the right
5912 * operand. The jump goes around the right operand evaluation.
5914 * JSOP_AND converts the operand on the stack to boolean, and if false,
5915 * leaves the original operand value on the stack and jumps; otherwise
5916 * it pops and falls into the right operand's bytecode.
5918 if (pn
->pn_arity
== PN_BINARY
) {
5919 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5921 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5924 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5926 off
= CG_OFFSET(cg
);
5927 pc
= CG_CODE(cg
, top
);
5928 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5931 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5932 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5934 /* Left-associative operator chain: avoid too much recursion. */
5936 if (!js_EmitTree(cx
, cg
, pn2
))
5938 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5942 /* Emit nodes between the head and the tail. */
5944 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5945 if (!js_EmitTree(cx
, cg
, pn2
))
5947 off
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5950 if (!SetBackPatchDelta(cx
, cg
, CG_CODE(cg
, jmp
), off
- jmp
))
5955 if (!js_EmitTree(cx
, cg
, pn2
))
5959 off
= CG_OFFSET(cg
);
5961 pc
= CG_CODE(cg
, top
);
5962 tmp
= GetJumpOffset(cg
, pc
);
5963 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5966 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5971 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5972 if (pn
->pn_arity
== PN_LIST
&& pn
->pn_count
< JS_BIT(16) &&
5974 /* Emit up to the first string literal conventionally. */
5975 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5976 if (pn2
->pn_type
== TOK_STRING
)
5978 if (!js_EmitTree(cx
, cg
, pn2
))
5980 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
5988 * Having seen a string literal, we know statically that the rest
5989 * of the additions are string concatenation, so we emit them as a
5990 * single concatn. First, do string conversion on the result of the
5991 * preceding zero or more additions so that any side effects of
5992 * string conversion occur before the next operand begins.
5994 if (pn2
== pn
->pn_head
) {
5997 if (!js_Emit1(cx
, cg
, JSOP_OBJTOSTR
))
6002 for (; pn2
; pn2
= pn2
->pn_next
, index
++) {
6003 if (!js_EmitTree(cx
, cg
, pn2
))
6005 if (!pn2
->isLiteral() && js_Emit1(cx
, cg
, JSOP_OBJTOSTR
) < 0)
6009 EMIT_UINT16_IMM_OP(JSOP_CONCATN
, index
);
6018 case TOK_INSTANCEOF
:
6023 if (pn
->pn_arity
== PN_LIST
) {
6024 /* Left-associative operator chain: avoid too much recursion. */
6026 if (!js_EmitTree(cx
, cg
, pn2
))
6029 while ((pn2
= pn2
->pn_next
) != NULL
) {
6030 if (!js_EmitTree(cx
, cg
, pn2
))
6032 if (js_Emit1(cx
, cg
, op
) < 0)
6036 #if JS_HAS_XML_SUPPORT
6040 if (pn
->pn_arity
== PN_NAME
) {
6041 if (!js_EmitTree(cx
, cg
, pn
->expr()))
6043 if (!EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
))
6049 * Binary :: has a right operand that brackets arbitrary code,
6050 * possibly including a let (a = b) ... expression. We must clear
6051 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6053 oldflags
= cg
->flags
;
6054 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6057 /* Binary operators that evaluate both operands unconditionally. */
6058 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6060 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6062 #if JS_HAS_XML_SUPPORT
6063 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6065 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6071 #if JS_HAS_XML_SUPPORT
6074 JS_ASSERT(pn
->pn_arity
== PN_UNARY
);
6081 /* Unary op, including unary +/-. */
6083 #if JS_HAS_XML_SUPPORT
6084 if (op
== JSOP_XMLNAME
) {
6085 if (!EmitXMLName(cx
, pn
, op
, cg
))
6092 if (op
== JSOP_TYPEOF
&& pn2
->pn_type
!= TOK_NAME
)
6093 op
= JSOP_TYPEOFEXPR
;
6095 oldflags
= cg
->flags
;
6096 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6097 if (!js_EmitTree(cx
, cg
, pn2
))
6099 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6100 if (js_Emit1(cx
, cg
, op
) < 0)
6107 /* Emit lvalue-specialized code for ++/-- operators. */
6109 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
6111 switch (pn2
->pn_type
) {
6113 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
6115 if (!BindNameToSlot(cx
, cg
, pn2
))
6118 if (op
== JSOP_CALLEE
) {
6119 if (js_Emit1(cx
, cg
, op
) < 0)
6121 } else if (!pn2
->pn_cookie
.isFree()) {
6122 atomIndex
= (jsatomid
) pn2
->pn_cookie
.asInteger();
6123 EMIT_UINT16_IMM_OP(op
, atomIndex
);
6125 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
6126 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6130 if (pn2
->isConst()) {
6131 if (js_Emit1(cx
, cg
, JSOP_POS
) < 0)
6134 if (!(js_CodeSpec
[op
].format
& JOF_POST
)) {
6135 if (js_Emit1(cx
, cg
, JSOP_ONE
) < 0)
6137 op
= (js_CodeSpec
[op
].format
& JOF_INC
) ? JSOP_ADD
: JSOP_SUB
;
6138 if (js_Emit1(cx
, cg
, op
) < 0)
6144 if (!EmitPropOp(cx
, pn2
, op
, cg
, JS_FALSE
))
6148 if (!EmitElemOp(cx
, pn2
, op
, cg
))
6152 if (!js_EmitTree(cx
, cg
, pn2
))
6154 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
6155 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
6158 if (js_Emit1(cx
, cg
, op
) < 0)
6161 #if JS_HAS_XML_SUPPORT
6163 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
6164 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
6166 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
6168 if (js_Emit1(cx
, cg
, op
) < 0)
6177 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6178 * must evaluate the operand if it appears it might have side effects.
6181 switch (pn2
->pn_type
) {
6183 if (!BindNameToSlot(cx
, cg
, pn2
))
6186 if (op
== JSOP_FALSE
) {
6187 if (js_Emit1(cx
, cg
, op
) < 0)
6190 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6195 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, cg
, JS_FALSE
))
6198 #if JS_HAS_XML_SUPPORT
6200 if (!EmitElemOp(cx
, pn2
, JSOP_DELDESC
, cg
))
6205 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, cg
))
6210 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6211 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6214 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
6217 off
= noteIndex
= -1;
6219 if (pn2
->pn_op
== JSOP_SETCALL
)
6220 pn2
->pn_op
= JSOP_CALL
;
6221 if (!js_EmitTree(cx
, cg
, pn2
))
6223 off
= CG_OFFSET(cg
);
6224 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
6225 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
6228 if (js_Emit1(cx
, cg
, JSOP_TRUE
) < 0)
6230 if (noteIndex
>= 0) {
6231 tmp
= CG_OFFSET(cg
);
6232 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
6238 #if JS_HAS_XML_SUPPORT
6240 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6242 jmp
= EmitJump(cx
, cg
, JSOP_FILTER
, 0);
6245 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
6248 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6250 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
6251 if (EmitJump(cx
, cg
, JSOP_ENDFILTER
, top
- CG_OFFSET(cg
)) < 0)
6258 * Pop a stack operand, convert it to object, get a property named by
6259 * this bytecode's immediate-indexed atom operand, and push its value
6260 * (not a reference to it).
6262 ok
= EmitPropOp(cx
, pn
, PN_OP(pn
), cg
, JS_FALSE
);
6266 #if JS_HAS_XML_SUPPORT
6270 * Pop two operands, convert the left one to object and the right one
6271 * to property name (atom or tagged int), get the named property, and
6272 * push its value. Set the "obj" register to the result of ToObject
6273 * on the left operand.
6275 ok
= EmitElemOp(cx
, pn
, PN_OP(pn
), cg
);
6281 bool callop
= (PN_TYPE(pn
) == TOK_LP
);
6284 * Emit callable invocation or operator new (constructor call) code.
6285 * First, emit code for the left operand to evaluate the callable or
6286 * constructable object expression.
6288 * For operator new applied to other expressions than E4X ones, we emit
6289 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6290 * interpose the lambda-initialized method read barrier -- see the code
6291 * in jsinterp.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6293 * Then (or in a call case that has no explicit reference-base object)
6294 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6295 * parameter: in the operator new case, the newborn instance; in the
6296 * base-less call case, a cookie meaning "use the global object as the
6297 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6298 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6301 switch (pn2
->pn_type
) {
6303 if (!EmitNameOp(cx
, cg
, pn2
, callop
))
6307 if (!EmitPropOp(cx
, pn2
, PN_OP(pn2
), cg
, callop
))
6311 JS_ASSERT(pn2
->pn_op
== JSOP_GETELEM
);
6312 if (!EmitElemOp(cx
, pn2
, callop
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
))
6316 #if JS_HAS_XML_SUPPORT
6317 if (pn2
->pn_op
== JSOP_XMLNAME
) {
6318 if (!EmitXMLName(cx
, pn2
, JSOP_CALLXMLNAME
, cg
))
6320 callop
= true; /* suppress JSOP_NULL after */
6327 * Push null as a placeholder for the global object, per ECMA-262
6330 if (!js_EmitTree(cx
, cg
, pn2
))
6332 callop
= false; /* trigger JSOP_NULL after */
6335 if (!callop
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
6338 /* Remember start of callable-object bytecode for decompilation hint. */
6342 * Emit code for each argument in order, then emit the JSOP_*CALL or
6343 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6344 * were pushed on the operand stack.
6346 uintN oldflags
= cg
->flags
;
6347 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6348 for (pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
6349 if (!js_EmitTree(cx
, cg
, pn3
))
6352 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6353 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - off
) < 0)
6356 argc
= pn
->pn_count
- 1;
6357 if (js_Emit3(cx
, cg
, PN_OP(pn
), ARGC_HI(argc
), ARGC_LO(argc
)) < 0)
6359 if (PN_OP(pn
) == JSOP_CALL
) {
6360 /* Add a trace hint opcode for recursion. */
6361 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
6364 if (PN_OP(pn
) == JSOP_EVAL
)
6365 EMIT_UINT16_IMM_OP(JSOP_LINENO
, pn
->pn_pos
.begin
.lineno
);
6369 case TOK_LEXICALSCOPE
:
6371 JSObjectBox
*objbox
;
6374 objbox
= pn
->pn_objbox
;
6375 js_PushBlockScope(cg
, &stmtInfo
, objbox
->object
, CG_OFFSET(cg
));
6378 * If this lexical scope is not for a catch block, let block or let
6379 * expression, or any kind of for loop (where the scope starts in the
6380 * head after the first part if for (;;), else in the body if for-in);
6381 * and if our container is top-level but not a function body, or else
6382 * a block statement; then emit a SRC_BRACE note. All other container
6383 * statements get braces by default from the decompiler.
6386 type
= PN_TYPE(pn
->expr());
6387 if (type
!= TOK_CATCH
&& type
!= TOK_LET
&& type
!= TOK_FOR
&&
6388 (!(stmt
= stmtInfo
.down
)
6390 : stmt
->type
== STMT_BLOCK
)) {
6391 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6392 /* There must be no source note already output for the next op. */
6393 JS_ASSERT(CG_NOTE_COUNT(cg
) == 0 ||
6394 CG_LAST_NOTE_OFFSET(cg
) != CG_OFFSET(cg
) ||
6395 !GettableNoteForNextOp(cg
));
6397 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
6402 JS_ASSERT(CG_OFFSET(cg
) == top
);
6403 if (!EmitEnterBlock(cx
, pn
, cg
))
6406 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
6410 if (op
== JSOP_LEAVEBLOCKEXPR
) {
6411 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
6414 if (noteIndex
>= 0 &&
6415 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6416 CG_OFFSET(cg
) - top
)) {
6421 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6422 count
= OBJ_BLOCK_COUNT(cx
, objbox
->object
);
6423 EMIT_UINT16_IMM_OP(op
, count
);
6425 ok
= js_PopStatementCG(cx
, cg
);
6429 #if JS_HAS_BLOCK_SCOPE
6431 /* Let statements have their variable declarations on the left. */
6432 if (pn
->pn_arity
== PN_BINARY
) {
6439 /* Non-null pn2 means that pn is the variable list from a let head. */
6440 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
6441 if (!EmitVariables(cx
, cg
, pn
, pn2
!= NULL
, ¬eIndex
))
6444 /* Thus non-null pn2 is the body of the let block or expression. */
6445 tmp
= CG_OFFSET(cg
);
6446 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
6449 if (noteIndex
>= 0 &&
6450 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6451 CG_OFFSET(cg
) - tmp
)) {
6455 #endif /* JS_HAS_BLOCK_SCOPE */
6457 #if JS_HAS_GENERATORS
6458 case TOK_ARRAYPUSH
: {
6462 * The array object's stack index is in cg->arrayCompDepth. See below
6463 * under the array initialiser code generator for array comprehension
6466 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6468 slot
= AdjustBlockSlot(cx
, cg
, cg
->arrayCompDepth
);
6471 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
6477 #if JS_HAS_GENERATORS
6481 * Emit code for [a, b, c] that is equivalent to constructing a new
6482 * array and in source order evaluating each element value and adding
6483 * it to the array, without invoking latent setters. We use the
6484 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6485 * avoid dup'ing and popping the array as each element is added, as
6486 * JSOP_SETELEM/JSOP_SETPROP would do.
6488 * If no sharp variable is defined, the initializer is not for an array
6489 * comprehension, the initializer is not overlarge, and the initializer
6490 * is not in global code (whose stack growth cannot be precisely modeled
6491 * due to the need to reserve space for global variables and regular
6492 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6493 * array using a fast, all-at-once process rather than a slow, element-
6494 * by-element process.
6496 #if JS_HAS_SHARP_VARS
6501 op
= (JS_LIKELY(pn
->pn_count
< JS_BIT(16)) && cg
->inFunction())
6505 #if JS_HAS_GENERATORS
6506 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6509 #if JS_HAS_SHARP_VARS
6510 JS_ASSERT_IF(sharpnum
>= 0, cg
->hasSharps());
6511 if (cg
->hasSharps())
6515 if (op
== JSOP_NEWINIT
&& !EmitNewInit(cx
, cg
, JSProto_Array
, pn
, sharpnum
))
6518 #if JS_HAS_GENERATORS
6519 if (pn
->pn_type
== TOK_ARRAYCOMP
) {
6523 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6524 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6525 * its kids under pn2 to generate this comprehension.
6527 JS_ASSERT(cg
->stackDepth
> 0);
6528 saveDepth
= cg
->arrayCompDepth
;
6529 cg
->arrayCompDepth
= (uint32
) (cg
->stackDepth
- 1);
6530 if (!js_EmitTree(cx
, cg
, pn
->pn_head
))
6532 cg
->arrayCompDepth
= saveDepth
;
6534 /* Emit the usual op needed for decompilation. */
6535 if (!EmitEndInit(cx
, cg
, 1))
6539 #endif /* JS_HAS_GENERATORS */
6542 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6543 if (op
== JSOP_NEWINIT
&& !EmitNumberOp(cx
, atomIndex
, cg
))
6545 if (pn2
->pn_type
== TOK_COMMA
&& pn2
->pn_arity
== PN_NULLARY
) {
6546 if (js_Emit1(cx
, cg
, JSOP_HOLE
) < 0)
6549 if (!js_EmitTree(cx
, cg
, pn2
))
6552 if (op
== JSOP_NEWINIT
&& js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6555 JS_ASSERT(atomIndex
== pn
->pn_count
);
6557 if (pn
->pn_xflags
& PNX_ENDCOMMA
) {
6558 /* Emit a source note so we know to decompile an extra comma. */
6559 if (js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
6563 if (op
== JSOP_NEWINIT
) {
6565 * Emit an op to finish the array and, secondarily, to aid in sharp
6566 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6568 if (!EmitEndInit(cx
, cg
, atomIndex
))
6573 JS_ASSERT(atomIndex
< JS_BIT(16));
6574 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY
, atomIndex
);
6578 #if JS_HAS_SHARP_VARS
6582 #if JS_HAS_DESTRUCTURING_SHORTHAND
6583 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
6584 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
, JSMSG_BAD_OBJECT_INIT
);
6589 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6590 * a new object and in source order evaluating each property value and
6591 * adding the property to the object, without invoking latent setters.
6592 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6593 * ignore setters and to avoid dup'ing and popping the object as each
6594 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6596 if (!EmitNewInit(cx
, cg
, JSProto_Object
, pn
, sharpnum
))
6599 uintN methodInits
= 0, slowMethodInits
= 0;
6600 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6601 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6603 if (pn3
->pn_type
== TOK_NUMBER
) {
6604 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
6608 /* Emit code for the property initializer. */
6609 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
6613 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
6614 if (js_Emit1(cx
, cg
, op
) < 0)
6618 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6619 if (pn3
->pn_type
== TOK_NUMBER
) {
6620 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
6622 if (js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6625 JS_ASSERT(pn3
->pn_type
== TOK_NAME
||
6626 pn3
->pn_type
== TOK_STRING
);
6627 ale
= cg
->atomList
.add(cg
->parser
, pn3
->pn_atom
);
6631 /* Check whether we can optimize to JSOP_INITMETHOD. */
6632 JSParseNode
*init
= pn2
->pn_right
;
6633 bool lambda
= PN_OP(init
) == JSOP_LAMBDA
;
6636 if (op
== JSOP_INITPROP
&& lambda
&& init
->pn_funbox
->joinable())
6638 op
= JSOP_INITMETHOD
;
6639 pn2
->pn_op
= uint8(op
);
6646 EMIT_INDEX_OP(op
, ALE_INDEX(ale
));
6650 if (cg
->funbox
&& cg
->funbox
->shouldUnbrand(methodInits
, slowMethodInits
)) {
6651 if (js_Emit1(cx
, cg
, JSOP_UNBRAND
) < 0)
6654 if (!EmitEndInit(cx
, cg
, pn
->pn_count
))
6659 #if JS_HAS_SHARP_VARS
6661 JS_ASSERT(cg
->hasSharps());
6662 sharpnum
= pn
->pn_num
;
6664 if (pn
->pn_type
== TOK_RB
)
6666 # if JS_HAS_GENERATORS
6667 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6670 if (pn
->pn_type
== TOK_RC
)
6671 goto do_emit_object
;
6673 if (!js_EmitTree(cx
, cg
, pn
))
6675 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, (jsatomid
) sharpnum
);
6679 JS_ASSERT(cg
->hasSharps());
6680 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP
, cg
->sharpSlotBase
, (jsatomid
) pn
->pn_num
);
6682 #endif /* JS_HAS_SHARP_VARS */
6686 * Cope with a left-over function definition that was replaced by a use
6687 * of a later function definition of the same name. See FunctionDef and
6688 * MakeDefIntoUse in jsparse.cpp.
6690 if (pn
->pn_op
== JSOP_NOP
)
6692 if (!EmitNameOp(cx
, cg
, pn
, JS_FALSE
))
6696 #if JS_HAS_XML_SUPPORT
6701 case TOK_XMLCOMMENT
:
6704 ok
= EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6708 ok
= EmitNumberOp(cx
, pn
->pn_dval
, cg
);
6713 * If the regexp's script is one-shot and the regexp is not used in a
6714 * loop, we can avoid the extra fork-on-exec costs of JSOP_REGEXP by
6715 * selecting JSOP_OBJECT. Otherwise, to avoid incorrect proto, parent,
6716 * and lastIndex sharing, select JSOP_REGEXP.
6718 JS_ASSERT(pn
->pn_op
== JSOP_REGEXP
);
6719 bool singleton
= !cg
->fun
&& cg
->compileAndGo();
6721 for (JSStmtInfo
*stmt
= cg
->topStmt
; stmt
; stmt
= stmt
->down
) {
6722 if (STMT_IS_LOOP(stmt
)) {
6729 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_OBJECT
, cg
);
6731 ok
= EmitIndexOp(cx
, JSOP_REGEXP
,
6732 cg
->regexpList
.index(pn
->pn_objbox
),
6738 #if JS_HAS_XML_SUPPORT
6742 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6746 #if JS_HAS_DEBUGGER_KEYWORD
6748 if (js_Emit1(cx
, cg
, JSOP_DEBUGGER
) < 0)
6751 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6753 #if JS_HAS_XML_SUPPORT
6756 JS_ASSERT(PN_TYPE(pn
) == TOK_XMLLIST
|| pn
->pn_count
!= 0);
6757 switch (pn
->pn_head
? PN_TYPE(pn
->pn_head
) : TOK_XMLLIST
) {
6765 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6769 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6770 if (pn2
->pn_type
== TOK_LC
&&
6771 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6774 if (!js_EmitTree(cx
, cg
, pn2
))
6776 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6780 if (pn
->pn_xflags
& PNX_XMLROOT
) {
6781 if (pn
->pn_count
== 0) {
6782 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
);
6783 atom
= cx
->runtime
->atomState
.emptyAtom
;
6784 ale
= cg
->atomList
.add(cg
->parser
, atom
);
6787 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6789 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6794 JS_ASSERT(pn
->pn_count
!= 0);
6804 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6807 ale
= cg
->atomList
.add(cg
->parser
,
6808 (pn
->pn_type
== TOK_XMLETAGO
)
6809 ? cx
->runtime
->atomState
.etagoAtom
6810 : cx
->runtime
->atomState
.stagoAtom
);
6813 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6815 JS_ASSERT(pn
->pn_count
!= 0);
6817 if (pn2
->pn_type
== TOK_LC
&& js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0)
6819 if (!js_EmitTree(cx
, cg
, pn2
))
6821 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6824 for (pn2
= pn2
->pn_next
, i
= 0; pn2
; pn2
= pn2
->pn_next
, i
++) {
6825 if (pn2
->pn_type
== TOK_LC
&&
6826 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6829 if (!js_EmitTree(cx
, cg
, pn2
))
6831 if ((i
& 1) && pn2
->pn_type
== TOK_LC
) {
6832 if (js_Emit1(cx
, cg
, JSOP_TOATTRVAL
) < 0)
6835 if (js_Emit1(cx
, cg
,
6836 (i
& 1) ? JSOP_ADDATTRVAL
: JSOP_ADDATTRNAME
) < 0) {
6841 ale
= cg
->atomList
.add(cg
->parser
,
6842 (pn
->pn_type
== TOK_XMLPTAGC
)
6843 ? cx
->runtime
->atomState
.ptagcAtom
6844 : cx
->runtime
->atomState
.tagcAtom
);
6847 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6848 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6851 if ((pn
->pn_xflags
& PNX_XMLROOT
) && js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6857 if (pn
->pn_arity
== PN_LIST
) {
6858 JS_ASSERT(pn
->pn_count
!= 0);
6859 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6860 if (pn2
->pn_type
== TOK_LC
&&
6861 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6864 if (!js_EmitTree(cx
, cg
, pn2
))
6866 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6870 JS_ASSERT(pn
->pn_arity
== PN_NULLARY
);
6871 ok
= (pn
->pn_op
== JSOP_OBJECT
)
6872 ? EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
)
6873 : EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6878 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom2
);
6881 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
6883 if (!EmitAtomOp(cx
, pn
, JSOP_XMLPI
, cg
))
6886 #endif /* JS_HAS_XML_SUPPORT */
6892 if (ok
&& --cg
->emitLevel
== 0) {
6894 ok
= OptimizeSpanDeps(cx
, cg
);
6895 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.end
.lineno
))
6903 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6904 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6906 JS_FRIEND_DATA(JSSrcNoteSpec
) js_SrcNoteSpec
[] = {
6909 {"if-else", 2, 0, 1},
6912 {"continue", 0, 0, 0},
6914 {"pcdelta", 1, 0, 1},
6915 {"assignop", 0, 0, 0},
6918 {"hidden", 0, 0, 0},
6919 {"pcbase", 1, 0, -1},
6921 {"labelbrace", 1, 0, 0},
6922 {"endbrace", 0, 0, 0},
6923 {"break2label", 1, 0, 0},
6924 {"cont2label", 1, 0, 0},
6925 {"switch", 2, 0, 1},
6926 {"funcdef", 1, 0, 0},
6928 {"extended", -1, 0, 0},
6929 {"newline", 0, 0, 0},
6930 {"setline", 1, 0, 0},
6931 {"xdelta", 0, 0, 0},
6935 AllocSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
)
6941 index
= CG_NOTE_COUNT(cg
);
6942 if (((uintN
)index
& CG_NOTE_MASK(cg
)) == 0) {
6943 pool
= cg
->notePool
;
6944 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6945 if (!CG_NOTES(cg
)) {
6946 /* Allocate the first note array lazily; leave noteMask alone. */
6947 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
);
6949 /* Grow by doubling note array size; update noteMask on success. */
6950 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6952 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6954 if (!CG_NOTES(cg
)) {
6955 js_ReportOutOfScriptQuota(cx
);
6960 CG_NOTE_COUNT(cg
) = index
+ 1;
6965 js_NewSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
)
6969 ptrdiff_t offset
, delta
, xdelta
;
6972 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6973 * incrementing CG_NOTE_COUNT(cg).
6975 index
= AllocSrcNote(cx
, cg
);
6978 sn
= &CG_NOTES(cg
)[index
];
6981 * Compute delta from the last annotated bytecode's offset. If it's too
6982 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6984 offset
= CG_OFFSET(cg
);
6985 delta
= offset
- CG_LAST_NOTE_OFFSET(cg
);
6986 CG_LAST_NOTE_OFFSET(cg
) = offset
;
6987 if (delta
>= SN_DELTA_LIMIT
) {
6989 xdelta
= JS_MIN(delta
, SN_XDELTA_MASK
);
6990 SN_MAKE_XDELTA(sn
, xdelta
);
6992 index
= AllocSrcNote(cx
, cg
);
6995 sn
= &CG_NOTES(cg
)[index
];
6996 } while (delta
>= SN_DELTA_LIMIT
);
7000 * Initialize type and delta, then allocate the minimum number of notes
7001 * needed for type's arity. Usually, we won't need more, but if an offset
7002 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7004 SN_MAKE_NOTE(sn
, type
, delta
);
7005 for (n
= (intN
)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
7006 if (js_NewSrcNote(cx
, cg
, SRC_NULL
) < 0)
7013 js_NewSrcNote2(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7018 index
= js_NewSrcNote(cx
, cg
, type
);
7020 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset
))
7027 js_NewSrcNote3(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7028 ptrdiff_t offset1
, ptrdiff_t offset2
)
7032 index
= js_NewSrcNote(cx
, cg
, type
);
7034 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset1
))
7036 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 1, offset2
))
7043 GrowSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
)
7048 /* Grow by doubling note array size; update noteMask on success. */
7049 pool
= cg
->notePool
;
7050 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
7051 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
7052 if (!CG_NOTES(cg
)) {
7053 js_ReportOutOfScriptQuota(cx
);
7056 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
7061 js_AddToSrcNoteDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*sn
,
7064 ptrdiff_t base
, limit
, newdelta
, diff
;
7068 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7069 * main script note deltas, and only by a small positive amount.
7071 JS_ASSERT(cg
->current
== &cg
->main
);
7072 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
7074 base
= SN_DELTA(sn
);
7075 limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
7076 newdelta
= base
+ delta
;
7077 if (newdelta
< limit
) {
7078 SN_SET_DELTA(sn
, newdelta
);
7080 index
= sn
- cg
->main
.notes
;
7081 if ((cg
->main
.noteCount
& cg
->main
.noteMask
) == 0) {
7082 if (!GrowSrcNotes(cx
, cg
))
7084 sn
= cg
->main
.notes
+ index
;
7086 diff
= cg
->main
.noteCount
- index
;
7087 cg
->main
.noteCount
++;
7088 memmove(sn
+ 1, sn
, SRCNOTE_SIZE(diff
));
7089 SN_MAKE_XDELTA(sn
, delta
);
7095 JS_FRIEND_API(uintN
)
7096 js_SrcNoteLength(jssrcnote
*sn
)
7101 arity
= (intN
)js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7102 for (base
= sn
++; arity
; sn
++, arity
--) {
7103 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7109 JS_FRIEND_API(ptrdiff_t)
7110 js_GetSrcNoteOffset(jssrcnote
*sn
, uintN which
)
7112 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7113 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7114 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7115 for (sn
++; which
; sn
++, which
--) {
7116 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7119 if (*sn
& SN_3BYTE_OFFSET_FLAG
) {
7120 return (ptrdiff_t)(((uint32
)(sn
[0] & SN_3BYTE_OFFSET_MASK
) << 16)
7124 return (ptrdiff_t)*sn
;
7128 js_SetSrcNoteOffset(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
,
7129 uintN which
, ptrdiff_t offset
)
7134 if ((jsuword
)offset
>= (jsuword
)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG
<< 16)) {
7135 ReportStatementTooLarge(cx
, cg
);
7139 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7140 sn
= &CG_NOTES(cg
)[index
];
7141 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7142 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7143 for (sn
++; which
; sn
++, which
--) {
7144 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7148 /* See if the new offset requires three bytes. */
7149 if (offset
> (ptrdiff_t)SN_3BYTE_OFFSET_MASK
) {
7150 /* Maybe this offset was already set to a three-byte value. */
7151 if (!(*sn
& SN_3BYTE_OFFSET_FLAG
)) {
7152 /* Losing, need to insert another two bytes for this offset. */
7153 index
= sn
- CG_NOTES(cg
);
7156 * Simultaneously test to see if the source note array must grow to
7157 * accommodate either the first or second byte of additional storage
7158 * required by this 3-byte offset.
7160 if (((CG_NOTE_COUNT(cg
) + 1) & CG_NOTE_MASK(cg
)) <= 1) {
7161 if (!GrowSrcNotes(cx
, cg
))
7163 sn
= CG_NOTES(cg
) + index
;
7165 CG_NOTE_COUNT(cg
) += 2;
7167 diff
= CG_NOTE_COUNT(cg
) - (index
+ 3);
7168 JS_ASSERT(diff
>= 0);
7170 memmove(sn
+ 3, sn
+ 1, SRCNOTE_SIZE(diff
));
7172 *sn
++ = (jssrcnote
)(SN_3BYTE_OFFSET_FLAG
| (offset
>> 16));
7173 *sn
++ = (jssrcnote
)(offset
>> 8);
7175 *sn
= (jssrcnote
)offset
;
7180 #define DEBUG_srcnotesize
7183 #ifdef DEBUG_srcnotesize
7185 static uint32 hist
[NBINS
];
7187 void DumpSrcNoteSizeHist()
7193 fp
= fopen("/tmp/srcnotes.hist", "w");
7196 setvbuf(fp
, NULL
, _IONBF
, 0);
7198 fprintf(fp
, "SrcNote size histogram:\n");
7199 for (i
= 0; i
< NBINS
; i
++) {
7200 fprintf(fp
, "%4u %4u ", JS_BIT(i
), hist
[i
]);
7201 for (n
= (int) JS_HOWMANY(hist
[i
], 10); n
> 0; --n
)
7210 * Fill in the storage at notes with prolog and main srcnotes; the space at
7211 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7212 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7213 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7216 js_FinishTakingSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*notes
)
7218 uintN prologCount
, mainCount
, totalCount
;
7219 ptrdiff_t offset
, delta
;
7222 JS_ASSERT(cg
->current
== &cg
->main
);
7224 prologCount
= cg
->prolog
.noteCount
;
7225 if (prologCount
&& cg
->prolog
.currentLine
!= cg
->firstLine
) {
7226 CG_SWITCH_TO_PROLOG(cg
);
7227 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, (ptrdiff_t)cg
->firstLine
) < 0)
7229 prologCount
= cg
->prolog
.noteCount
;
7230 CG_SWITCH_TO_MAIN(cg
);
7233 * Either no prolog srcnotes, or no line number change over prolog.
7234 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7235 * of the first main note, by adding to its delta and possibly even
7236 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7237 * that came at and after the last annotated bytecode.
7239 offset
= CG_PROLOG_OFFSET(cg
) - cg
->prolog
.lastNoteOffset
;
7240 JS_ASSERT(offset
>= 0);
7241 if (offset
> 0 && cg
->main
.noteCount
!= 0) {
7242 /* NB: Use as much of the first main note's delta as we can. */
7243 sn
= cg
->main
.notes
;
7244 delta
= SN_IS_XDELTA(sn
)
7245 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
7246 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
7250 if (!js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
))
7255 delta
= JS_MIN(offset
, SN_XDELTA_MASK
);
7256 sn
= cg
->main
.notes
;
7261 mainCount
= cg
->main
.noteCount
;
7262 totalCount
= prologCount
+ mainCount
;
7264 memcpy(notes
, cg
->prolog
.notes
, SRCNOTE_SIZE(prologCount
));
7265 memcpy(notes
+ prologCount
, cg
->main
.notes
, SRCNOTE_SIZE(mainCount
));
7266 SN_MAKE_TERMINATOR(¬es
[totalCount
]);
7269 { int bin
= JS_CeilingLog2(totalCount
);
7279 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
7280 uintN stackDepth
, size_t start
, size_t end
)
7284 JS_ASSERT((uintN
)(uint16
)stackDepth
== stackDepth
);
7285 JS_ASSERT(start
<= end
);
7286 JS_ASSERT((size_t)(uint32
)start
== start
);
7287 JS_ASSERT((size_t)(uint32
)end
== end
);
7289 JS_ARENA_ALLOCATE_TYPE(tryNode
, JSTryNode
, &cx
->tempPool
);
7291 js_ReportOutOfScriptQuota(cx
);
7295 tryNode
->note
.kind
= kind
;
7296 tryNode
->note
.stackDepth
= (uint16
)stackDepth
;
7297 tryNode
->note
.start
= (uint32
)start
;
7298 tryNode
->note
.length
= (uint32
)(end
- start
);
7299 tryNode
->prev
= cg
->lastTryNode
;
7300 cg
->lastTryNode
= tryNode
;
7306 js_FinishTakingTryNotes(JSCodeGenerator
*cg
, JSTryNoteArray
*array
)
7311 JS_ASSERT(array
->length
> 0 && array
->length
== cg
->ntrynotes
);
7312 tn
= array
->vector
+ array
->length
;
7313 tryNode
= cg
->lastTryNode
;
7315 *--tn
= tryNode
->note
;
7316 } while ((tryNode
= tryNode
->prev
) != NULL
);
7317 JS_ASSERT(tn
== array
->vector
);
7321 * Find the index of the given object for code generator.
7323 * Since the emitter refers to each parsed object only once, for the index we
7324 * use the number of already indexes objects. We also add the object to a list
7325 * to convert the list to a fixed-size array when we complete code generation,
7326 * see JSCGObjectList::finish below.
7328 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7329 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7330 * directly used to store and fetch a reference to a cloned RegExp object that
7331 * shares the same JSRegExp private data created for the object literal in
7332 * objbox. We need a cloned object to hold lastIndex and other direct properties
7333 * that should not be shared among threads sharing a precompiled function or
7336 * If the code being compiled is function code, allocate a reserved slot in
7337 * the cloned function object that shares its precompiled script with other
7338 * cloned function objects and with the compiler-created clone-parent. There
7339 * are nregexps = script->regexps()->length such reserved slots in each
7340 * function object cloned from fun->object. NB: during compilation, a funobj
7341 * slots element must never be allocated, because js_AllocSlot could hand out
7342 * one of the slots that should be given to a regexp clone.
7344 * If the code being compiled is global code, the cloned regexp are stored in
7345 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7346 * fp->nvars to ngvars + nregexps.
7348 * The slots initially contain undefined or null. We populate them lazily when
7349 * JSOP_REGEXP is executed for the first time.
7351 * Why clone regexp objects? ECMA specifies that when a regular expression
7352 * literal is scanned, a RegExp object is created. In the spec, compilation
7353 * and execution happen indivisibly, but in this implementation and many of
7354 * its embeddings, code is precompiled early and re-executed in multiple
7355 * threads, or using multiple global objects, or both, for efficiency.
7357 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7358 * objects, which makes for collisions on the lastIndex property (especially
7359 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7360 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7363 JSCGObjectList::index(JSObjectBox
*objbox
)
7365 JS_ASSERT(!objbox
->emitLink
);
7366 objbox
->emitLink
= lastbox
;
7372 JSCGObjectList::finish(JSObjectArray
*array
)
7375 JSObjectBox
*objbox
;
7377 JS_ASSERT(length
<= INDEX_LIMIT
);
7378 JS_ASSERT(length
== array
->length
);
7380 cursor
= array
->vector
+ array
->length
;
7384 JS_ASSERT(!*cursor
);
7385 *cursor
= objbox
->object
;
7386 } while ((objbox
= objbox
->emitLink
) != NULL
);
7387 JS_ASSERT(cursor
== array
->vector
);
7391 JSGCConstList::finish(JSConstArray
*array
)
7393 JS_ASSERT(array
->length
== list
.length());
7394 Value
*src
= list
.begin(), *srcend
= list
.end();
7395 Value
*dst
= array
->vector
;
7396 for (; src
!= srcend
; ++src
, ++dst
)