1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
59 #include "jsversion.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
83 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
84 uintN stackDepth
, size_t start
, size_t end
);
86 JSCodeGenerator::JSCodeGenerator(JSCompiler
*jsc
,
87 JSArenaPool
*cpool
, JSArenaPool
*npool
,
90 codePool(cpool
), notePool(npool
),
91 codeMark(JS_ARENA_MARK(cpool
)), noteMark(JS_ARENA_MARK(npool
)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL
),
94 spanDeps(NULL
), jumpTargets(NULL
), jtFreeList(NULL
),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
99 flags
= TCF_COMPILING
;
100 memset(&prolog
, 0, sizeof prolog
);
101 memset(&main
, 0, sizeof main
);
103 firstLine
= prolog
.currentLine
= main
.currentLine
= lineno
;
104 prolog
.noteMask
= main
.noteMask
= SRCNOTE_CHUNK
- 1;
105 memset(&upvarMap
, 0, sizeof upvarMap
);
108 JSCodeGenerator::~JSCodeGenerator()
110 JS_ARENA_RELEASE(codePool
, codeMark
);
111 JS_ARENA_RELEASE(notePool
, noteMark
);
113 /* NB: non-null only after OOM. */
115 compiler
->context
->free(spanDeps
);
118 compiler
->context
->free(upvarMap
.vector
);
122 EmitCheck(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t delta
)
124 jsbytecode
*base
, *limit
, *next
;
125 ptrdiff_t offset
, length
;
130 limit
= CG_LIMIT(cg
);
131 offset
= next
- base
;
132 if (next
+ delta
> limit
) {
133 length
= offset
+ delta
;
134 length
= (length
<= BYTECODE_CHUNK
)
136 : JS_BIT(JS_CeilingLog2(length
));
137 incr
= BYTECODE_SIZE(length
);
139 JS_ARENA_ALLOCATE_CAST(base
, jsbytecode
*, cg
->codePool
, incr
);
141 size
= BYTECODE_SIZE(limit
- base
);
143 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
146 js_ReportOutOfScriptQuota(cx
);
150 CG_LIMIT(cg
) = base
+ length
;
151 CG_NEXT(cg
) = base
+ offset
;
157 UpdateDepth(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t target
)
161 const JSCodeSpec
*cs
;
162 uintN extra
, depth
, nuses
;
165 pc
= CG_CODE(cg
, target
);
167 cs
= &js_CodeSpec
[op
];
169 extern uint8 js_opcode2extra
[];
170 extra
= js_opcode2extra
[op
];
174 if ((cs
->format
& JOF_TMPSLOT_MASK
) || extra
) {
175 depth
= (uintN
) cg
->stackDepth
+
176 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
) +
178 if (depth
> cg
->maxStackDepth
)
179 cg
->maxStackDepth
= depth
;
182 nuses
= js_GetStackUses(cs
, op
, pc
);
183 cg
->stackDepth
-= nuses
;
184 JS_ASSERT(cg
->stackDepth
>= 0);
185 if (cg
->stackDepth
< 0) {
189 JS_snprintf(numBuf
, sizeof numBuf
, "%d", target
);
190 ts
= &cg
->compiler
->tokenStream
;
191 JS_ReportErrorFlagsAndNumber(cx
, JSREPORT_WARNING
,
192 js_GetErrorMessage
, NULL
,
193 JSMSG_STACK_UNDERFLOW
,
194 ts
->filename
? ts
->filename
: "stdin",
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op
== JSOP_ENTERBLOCK
);
203 JS_ASSERT(nuses
== 0);
204 blockObj
= cg
->objectList
.lastbox
->object
;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj
) == &js_BlockClass
);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
208 OBJ_SET_BLOCK_DEPTH(cx
, blockObj
, cg
->stackDepth
);
209 ndefs
= OBJ_BLOCK_COUNT(cx
, blockObj
);
211 cg
->stackDepth
+= ndefs
;
212 if ((uintN
)cg
->stackDepth
> cg
->maxStackDepth
)
213 cg
->maxStackDepth
= cg
->stackDepth
;
217 js_Emit1(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
)
219 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 1);
222 *CG_NEXT(cg
)++ = (jsbytecode
)op
;
223 UpdateDepth(cx
, cg
, offset
);
229 js_Emit2(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
)
231 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 2);
234 jsbytecode
*next
= CG_NEXT(cg
);
235 next
[0] = (jsbytecode
)op
;
237 CG_NEXT(cg
) = next
+ 2;
238 UpdateDepth(cx
, cg
, offset
);
244 js_Emit3(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
,
247 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 3);
250 jsbytecode
*next
= CG_NEXT(cg
);
251 next
[0] = (jsbytecode
)op
;
254 CG_NEXT(cg
) = next
+ 3;
255 UpdateDepth(cx
, cg
, offset
);
261 js_EmitN(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, size_t extra
)
263 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
264 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, length
);
267 jsbytecode
*next
= CG_NEXT(cg
);
268 *next
= (jsbytecode
)op
;
269 memset(next
+ 1, 0, BYTECODE_SIZE(extra
));
270 CG_NEXT(cg
) = next
+ length
;
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
276 if (js_CodeSpec
[op
].nuses
>= 0)
277 UpdateDepth(cx
, cg
, offset
);
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str
[] = "with statement";
284 const char js_finally_block_str
[] = "finally block";
285 const char js_script_str
[] = "script";
287 static const char *statementName
[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
294 js_with_statement_str
, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str
, /* FINALLY */
298 js_finally_block_str
, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
308 StatementName(JSCodeGenerator
*cg
)
311 return js_script_str
;
312 return statementName
[cg
->topStmt
->type
];
316 ReportStatementTooLarge(JSContext
*cx
, JSCodeGenerator
*cg
)
318 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_NEED_DIET
,
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
389 BalanceJumpTargets(JSJumpTarget
**jtp
)
391 JSJumpTarget
*jt
, *jt2
, *root
;
392 int dir
, otherDir
, heightChanged
;
396 JS_ASSERT(jt
->balance
!= 0);
398 if (jt
->balance
< -1) {
400 doubleRotate
= (jt
->kids
[JT_LEFT
]->balance
> 0);
401 } else if (jt
->balance
> 1) {
403 doubleRotate
= (jt
->kids
[JT_RIGHT
]->balance
< 0);
408 otherDir
= JT_OTHER_DIR(dir
);
410 jt2
= jt
->kids
[otherDir
];
411 *jtp
= root
= jt2
->kids
[dir
];
413 jt
->kids
[otherDir
] = root
->kids
[dir
];
414 root
->kids
[dir
] = jt
;
416 jt2
->kids
[dir
] = root
->kids
[otherDir
];
417 root
->kids
[otherDir
] = jt2
;
420 root
->kids
[JT_LEFT
]->balance
= -JS_MAX(root
->balance
, 0);
421 root
->kids
[JT_RIGHT
]->balance
= -JS_MIN(root
->balance
, 0);
424 *jtp
= root
= jt
->kids
[otherDir
];
425 jt
->kids
[otherDir
] = root
->kids
[dir
];
426 root
->kids
[dir
] = jt
;
428 heightChanged
= (root
->balance
!= 0);
429 jt
->balance
= -((dir
== JT_LEFT
) ? --root
->balance
: ++root
->balance
);
432 return heightChanged
;
435 typedef struct AddJumpTargetArgs
{
443 AddJumpTarget(AddJumpTargetArgs
*args
, JSJumpTarget
**jtp
)
450 JSCodeGenerator
*cg
= args
->cg
;
454 cg
->jtFreeList
= jt
->kids
[JT_LEFT
];
456 JS_ARENA_ALLOCATE_CAST(jt
, JSJumpTarget
*, &args
->cx
->tempPool
,
459 js_ReportOutOfScriptQuota(args
->cx
);
463 jt
->offset
= args
->offset
;
465 jt
->kids
[JT_LEFT
] = jt
->kids
[JT_RIGHT
] = NULL
;
466 cg
->numJumpTargets
++;
472 if (jt
->offset
== args
->offset
) {
477 if (args
->offset
< jt
->offset
)
478 balanceDelta
= -AddJumpTarget(args
, &jt
->kids
[JT_LEFT
]);
480 balanceDelta
= AddJumpTarget(args
, &jt
->kids
[JT_RIGHT
]);
484 jt
->balance
+= balanceDelta
;
485 return (balanceDelta
&& jt
->balance
)
486 ? 1 - BalanceJumpTargets(jtp
)
491 static int AVLCheck(JSJumpTarget
*jt
)
496 JS_ASSERT(-1 <= jt
->balance
&& jt
->balance
<= 1);
497 lh
= AVLCheck(jt
->kids
[JT_LEFT
]);
498 rh
= AVLCheck(jt
->kids
[JT_RIGHT
]);
499 JS_ASSERT(jt
->balance
== rh
- lh
);
500 return 1 + JS_MAX(lh
, rh
);
505 SetSpanDepTarget(JSContext
*cx
, JSCodeGenerator
*cg
, JSSpanDep
*sd
,
508 AddJumpTargetArgs args
;
510 if (off
< JUMPX_OFFSET_MIN
|| JUMPX_OFFSET_MAX
< off
) {
511 ReportStatementTooLarge(cx
, cg
);
517 args
.offset
= sd
->top
+ off
;
519 AddJumpTarget(&args
, &cg
->jumpTargets
);
524 AVLCheck(cg
->jumpTargets
);
527 SD_SET_TARGET(sd
, args
.node
);
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
536 AddSpanDep(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
, jsbytecode
*pc2
,
540 JSSpanDep
*sdbase
, *sd
;
543 index
= cg
->numSpanDeps
;
544 if (index
+ 1 == 0) {
545 ReportStatementTooLarge(cx
, cg
);
549 if ((index
& (index
- 1)) == 0 &&
550 (!(sdbase
= cg
->spanDeps
) || index
>= SPANDEPS_MIN
)) {
551 size
= sdbase
? SPANDEPS_SIZE(index
) : SPANDEPS_SIZE_MIN
/ 2;
552 sdbase
= (JSSpanDep
*) cx
->realloc(sdbase
, size
+ size
);
555 cg
->spanDeps
= sdbase
;
558 cg
->numSpanDeps
= index
+ 1;
559 sd
= cg
->spanDeps
+ index
;
560 sd
->top
= pc
- CG_BASE(cg
);
561 sd
->offset
= sd
->before
= pc2
- CG_BASE(cg
);
563 if (js_CodeSpec
[*pc
].format
& JOF_BACKPATCH
) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
566 JS_ASSERT(off
>= 1 + JUMP_OFFSET_LEN
);
567 if (off
> BPDELTA_MAX
) {
568 ReportStatementTooLarge(cx
, cg
);
572 SD_SET_BPDELTA(sd
, off
);
573 } else if (off
== 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd
, 0);
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx
, cg
, sd
, off
))
582 if (index
> SPANDEP_INDEX_MAX
)
583 index
= SPANDEP_INDEX_HUGE
;
584 SET_SPANDEP_INDEX(pc2
, index
);
589 AddSwitchSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
)
595 uintN njumps
, indexlen
;
598 JS_ASSERT(op
== JSOP_TABLESWITCH
|| op
== JSOP_LOOKUPSWITCH
);
600 off
= GET_JUMP_OFFSET(pc2
);
601 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
603 pc2
+= JUMP_OFFSET_LEN
;
604 if (op
== JSOP_TABLESWITCH
) {
605 low
= GET_JUMP_OFFSET(pc2
);
606 pc2
+= JUMP_OFFSET_LEN
;
607 high
= GET_JUMP_OFFSET(pc2
);
608 pc2
+= JUMP_OFFSET_LEN
;
609 njumps
= (uintN
) (high
- low
+ 1);
612 njumps
= GET_UINT16(pc2
);
614 indexlen
= INDEX_LEN
;
619 off
= GET_JUMP_OFFSET(pc2
);
620 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
622 pc2
+= JUMP_OFFSET_LEN
;
628 BuildSpanDepTable(JSContext
*cx
, JSCodeGenerator
*cg
)
630 jsbytecode
*pc
, *end
;
632 const JSCodeSpec
*cs
;
635 pc
= CG_BASE(cg
) + cg
->spanDepTodo
;
640 cs
= &js_CodeSpec
[op
];
642 switch (JOF_TYPE(cs
->format
)) {
643 case JOF_TABLESWITCH
:
644 case JOF_LOOKUPSWITCH
:
645 pc
= AddSwitchSpanDeps(cx
, cg
, pc
);
651 off
= GET_JUMP_OFFSET(pc
);
652 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
665 GetSpanDep(JSCodeGenerator
*cg
, jsbytecode
*pc
)
672 index
= GET_SPANDEP_INDEX(pc
);
673 if (index
!= SPANDEP_INDEX_HUGE
)
674 return cg
->spanDeps
+ index
;
676 offset
= pc
- CG_BASE(cg
);
678 hi
= cg
->numSpanDeps
- 1;
681 sd
= cg
->spanDeps
+ mid
;
682 if (sd
->before
== offset
)
684 if (sd
->before
< offset
)
695 SetBackPatchDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
700 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
701 if (!cg
->spanDeps
&& delta
< JUMP_OFFSET_MAX
) {
702 SET_JUMP_OFFSET(pc
, delta
);
706 if (delta
> BPDELTA_MAX
) {
707 ReportStatementTooLarge(cx
, cg
);
711 if (!cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
714 sd
= GetSpanDep(cg
, pc
);
715 JS_ASSERT(SD_GET_BPDELTA(sd
) == 0);
716 SD_SET_BPDELTA(sd
, delta
);
721 UpdateJumpTargets(JSJumpTarget
*jt
, ptrdiff_t pivot
, ptrdiff_t delta
)
723 if (jt
->offset
> pivot
) {
725 if (jt
->kids
[JT_LEFT
])
726 UpdateJumpTargets(jt
->kids
[JT_LEFT
], pivot
, delta
);
728 if (jt
->kids
[JT_RIGHT
])
729 UpdateJumpTargets(jt
->kids
[JT_RIGHT
], pivot
, delta
);
733 FindNearestSpanDep(JSCodeGenerator
*cg
, ptrdiff_t offset
, int lo
,
737 JSSpanDep
*sdbase
, *sd
;
739 num
= cg
->numSpanDeps
;
742 sdbase
= cg
->spanDeps
;
746 if (sd
->before
== offset
)
748 if (sd
->before
< offset
)
756 JS_ASSERT(sd
->before
>= offset
&& (lo
== 0 || sd
[-1].before
< offset
));
761 FreeJumpTargets(JSCodeGenerator
*cg
, JSJumpTarget
*jt
)
763 if (jt
->kids
[JT_LEFT
])
764 FreeJumpTargets(cg
, jt
->kids
[JT_LEFT
]);
765 if (jt
->kids
[JT_RIGHT
])
766 FreeJumpTargets(cg
, jt
->kids
[JT_RIGHT
]);
767 jt
->kids
[JT_LEFT
] = cg
->jtFreeList
;
772 OptimizeSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
)
774 jsbytecode
*pc
, *oldpc
, *base
, *limit
, *next
;
775 JSSpanDep
*sd
, *sd2
, *sdbase
, *sdlimit
, *sdtop
, guard
;
776 ptrdiff_t offset
, growth
, delta
, top
, pivot
, span
, length
, target
;
781 jssrcnote
*sn
, *snlimit
;
783 uintN i
, n
, noteIndex
;
790 sdbase
= cg
->spanDeps
;
791 sdlimit
= sdbase
+ cg
->numSpanDeps
;
792 offset
= CG_OFFSET(cg
);
807 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
808 JS_ASSERT(JT_HAS_TAG(sd
->target
));
811 if (sd
->top
!= top
) {
814 JS_ASSERT(top
== sd
->before
);
818 type
= JOF_OPTYPE(op
);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
829 JS_ASSERT(type
== JOF_JUMP
||
830 type
== JOF_TABLESWITCH
||
831 type
== JOF_LOOKUPSWITCH
);
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
835 span
= SD_SPAN(sd
, pivot
);
836 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
837 ptrdiff_t deltaFromTop
= 0;
842 case JSOP_GOTO
: op
= JSOP_GOTOX
; break;
843 case JSOP_IFEQ
: op
= JSOP_IFEQX
; break;
844 case JSOP_IFNE
: op
= JSOP_IFNEX
; break;
845 case JSOP_OR
: op
= JSOP_ORX
; break;
846 case JSOP_AND
: op
= JSOP_ANDX
; break;
847 case JSOP_GOSUB
: op
= JSOP_GOSUBX
; break;
848 case JSOP_CASE
: op
= JSOP_CASEX
; break;
849 case JSOP_DEFAULT
: op
= JSOP_DEFAULTX
; break;
850 case JSOP_TABLESWITCH
: op
= JSOP_TABLESWITCHX
; break;
851 case JSOP_LOOKUPSWITCH
: op
= JSOP_LOOKUPSWITCHX
; break;
853 ReportStatementTooLarge(cx
, cg
);
856 *pc
= (jsbytecode
) op
;
858 for (sd2
= sdtop
; sd2
< sdlimit
&& sd2
->top
== top
; sd2
++) {
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
871 sd2
->offset
+= deltaFromTop
;
872 deltaFromTop
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
879 sd2
->offset
+= delta
;
882 delta
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
883 UpdateJumpTargets(cg
->jumpTargets
, sd2
->offset
,
884 JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
896 JSTokenStream
*ts
= &cg
->compiler
->tokenStream
;
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts
->filename
? ts
->filename
: "stdin", cg
->firstLine
,
900 growth
/ (JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
), cg
->numSpanDeps
,
901 passes
, offset
+ growth
, offset
, growth
);
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
908 limit
= CG_LIMIT(cg
);
909 length
= offset
+ growth
;
910 next
= base
+ length
;
912 JS_ASSERT(length
> BYTECODE_CHUNK
);
913 size
= BYTECODE_SIZE(limit
- base
);
914 incr
= BYTECODE_SIZE(length
) - size
;
915 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
917 js_ReportOutOfScriptQuota(cx
);
921 CG_LIMIT(cg
) = next
= base
+ length
;
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
932 guard
.offset
= offset
+ growth
;
933 guard
.before
= offset
;
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
945 JS_ASSERT(sd
== sdlimit
);
947 while (--sd
>= sdbase
) {
948 if (sd
->top
!= top
) {
950 op
= (JSOp
) base
[top
];
951 type
= JOF_OPTYPE(op
);
953 for (sd2
= sd
- 1; sd2
>= sdbase
&& sd2
->top
== top
; sd2
--)
957 JS_ASSERT(top
== sd2
->before
);
960 oldpc
= base
+ sd
->before
;
961 span
= SD_SPAN(sd
, pivot
);
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
974 JS_ASSERT(JUMP_OFFSET_MIN
<= span
&& span
<= JUMP_OFFSET_MAX
);
975 SET_JUMP_OFFSET(oldpc
, span
);
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
986 pc
= base
+ sd
->offset
;
987 delta
= offset
- sd
->before
;
988 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
996 offset
= sd
->before
+ 1;
997 size
= BYTECODE_SIZE(delta
- (1 + JUMP_OFFSET_LEN
));
999 memmove(pc
+ 1 + JUMPX_OFFSET_LEN
,
1000 oldpc
+ 1 + JUMP_OFFSET_LEN
,
1004 SET_JUMPX_OFFSET(pc
, span
);
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1016 offset
= growth
= 0;
1018 for (sn
= cg
->main
.notes
, snlimit
= sn
+ cg
->main
.noteCount
;
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1026 offset
+= SN_DELTA(sn
);
1027 while (sd
< sdlimit
&& sd
->before
< offset
) {
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1036 delta
= sd2
->offset
- (sd2
->before
+ growth
);
1038 JS_ASSERT(delta
== JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
1039 sn
= js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
);
1042 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1056 spec
= &js_SrcNoteSpec
[SN_TYPE(sn
)];
1057 if (spec
->isSpanDep
) {
1058 pivot
= offset
+ spec
->offsetBias
;
1060 for (i
= 0; i
< n
; i
++) {
1061 span
= js_GetSrcNoteOffset(sn
, i
);
1064 target
= pivot
+ span
* spec
->isSpanDep
;
1065 sd2
= FindNearestSpanDep(cg
, target
,
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1077 target
+= sd2
->offset
- sd2
->before
;
1078 span
= target
- (pivot
+ growth
);
1079 span
*= spec
->isSpanDep
;
1080 noteIndex
= sn
- cg
->main
.notes
;
1081 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, i
, span
))
1083 sn
= cg
->main
.notes
+ noteIndex
;
1084 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1088 cg
->main
.lastNoteOffset
+= growth
;
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1094 for (tryNode
= cg
->lastTryNode
; tryNode
; tryNode
= tryNode
->prev
) {
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1100 offset
= tryNode
->note
.start
;
1101 sd
= FindNearestSpanDep(cg
, offset
, 0, &guard
);
1102 delta
= sd
->offset
- sd
->before
;
1103 tryNode
->note
.start
= offset
+ delta
;
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1109 length
= tryNode
->note
.length
;
1110 sd2
= FindNearestSpanDep(cg
, offset
+ length
, sd
- sdbase
, &guard
);
1112 tryNode
->note
.length
=
1113 length
+ sd2
->offset
- sd2
->before
- delta
;
1118 #ifdef DEBUG_brendan
1122 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
1123 offset
= sd
->offset
;
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd
->top
!= top
) {
1127 JS_ASSERT(top
== -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type
) ||
1132 JS_ASSERT(top
== sd
->before
);
1133 op
= (JSOp
) base
[offset
];
1134 type
= JOF_OPTYPE(op
);
1135 JS_ASSERT(type
== JOF_JUMP
||
1136 type
== JOF_JUMPX
||
1137 type
== JOF_TABLESWITCH
||
1138 type
== JOF_TABLESWITCHX
||
1139 type
== JOF_LOOKUPSWITCH
||
1140 type
== JOF_LOOKUPSWITCHX
);
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
1146 span
= GET_JUMPX_OFFSET(pc
);
1147 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
1150 JS_ASSERT(type
== JOF_TABLESWITCHX
||
1151 type
== JOF_LOOKUPSWITCHX
);
1154 span
= GET_JUMP_OFFSET(pc
);
1156 JS_ASSERT(SD_SPAN(sd
, pivot
) == span
);
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type
) || bigspans
!= 0);
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1167 size
= SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg
->numSpanDeps
)));
1168 cx
->free(cg
->spanDeps
);
1169 cg
->spanDeps
= NULL
;
1170 FreeJumpTargets(cg
, cg
->jumpTargets
);
1171 cg
->jumpTargets
= NULL
;
1172 cg
->numSpanDeps
= cg
->numJumpTargets
= 0;
1173 cg
->spanDepTodo
= CG_OFFSET(cg
);
1178 EmitJump(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t off
)
1184 extend
= off
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< off
;
1185 if (extend
&& !cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
1188 jmp
= js_Emit3(cx
, cg
, op
, JUMP_OFFSET_HI(off
), JUMP_OFFSET_LO(off
));
1189 if (jmp
>= 0 && (extend
|| cg
->spanDeps
)) {
1190 pc
= CG_CODE(cg
, jmp
);
1191 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
1198 GetJumpOffset(JSCodeGenerator
*cg
, jsbytecode
*pc
)
1205 return GET_JUMP_OFFSET(pc
);
1207 sd
= GetSpanDep(cg
, pc
);
1209 if (!JT_HAS_TAG(jt
))
1210 return JT_TO_BPDELTA(jt
);
1213 while (--sd
>= cg
->spanDeps
&& sd
->top
== top
)
1216 return JT_CLR_TAG(jt
)->offset
- sd
->offset
;
1220 js_SetJumpOffset(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
1223 if (!cg
->spanDeps
) {
1224 if (JUMP_OFFSET_MIN
<= off
&& off
<= JUMP_OFFSET_MAX
) {
1225 SET_JUMP_OFFSET(pc
, off
);
1229 if (!BuildSpanDepTable(cx
, cg
))
1233 return SetSpanDepTarget(cx
, cg
, GetSpanDep(cg
, pc
), off
);
1237 JSTreeContext::inStatement(JSStmtType type
)
1239 for (JSStmtInfo
*stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
1240 if (stmt
->type
== type
)
1247 js_PushStatement(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSStmtType type
,
1252 stmt
->blockid
= tc
->blockid();
1253 SET_STATEMENT_TOP(stmt
, top
);
1255 JS_ASSERT(!stmt
->blockObj
);
1256 stmt
->down
= tc
->topStmt
;
1258 if (STMT_LINKS_SCOPE(stmt
)) {
1259 stmt
->downScope
= tc
->topScopeStmt
;
1260 tc
->topScopeStmt
= stmt
;
1262 stmt
->downScope
= NULL
;
1267 js_PushBlockScope(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSObject
*blockObj
,
1270 js_PushStatement(tc
, stmt
, STMT_BLOCK
, top
);
1271 stmt
->flags
|= SIF_SCOPE
;
1272 STOBJ_SET_PARENT(blockObj
, tc
->blockChain
);
1273 stmt
->downScope
= tc
->topScopeStmt
;
1274 tc
->topScopeStmt
= stmt
;
1275 tc
->blockChain
= blockObj
;
1276 stmt
->blockObj
= blockObj
;
1280 * Emit a backpatch op with offset pointing to the previous jump of this type,
1281 * so that we can walk back up the chain fixing up the op and jump offset.
1284 EmitBackPatchOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t *lastp
)
1286 ptrdiff_t offset
, delta
;
1288 offset
= CG_OFFSET(cg
);
1289 delta
= offset
- *lastp
;
1291 JS_ASSERT(delta
> 0);
1292 return EmitJump(cx
, cg
, op
, delta
);
1296 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1297 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1298 * NB: We use cx and cg from our caller's lexical environment, and return
1301 #define EMIT_UINT16_IMM_OP(op, i) \
1303 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1308 FlushPops(JSContext
*cx
, JSCodeGenerator
*cg
, intN
*npops
)
1310 JS_ASSERT(*npops
!= 0);
1311 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1313 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
1319 * Emit additional bytecode(s) for non-local jumps.
1322 EmitNonLocalJumpFixup(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
)
1328 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1329 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1330 * end of a with statement, so we save cg->stackDepth here and restore it
1331 * just before a successful return.
1333 depth
= cg
->stackDepth
;
1336 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1338 for (stmt
= cg
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
1339 switch (stmt
->type
) {
1342 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1344 if (EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(*stmt
)) < 0)
1349 /* There's a With object on the stack that we need to pop. */
1351 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1353 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
1357 case STMT_FOR_IN_LOOP
:
1359 * The iterator and the object being iterated need to be popped.
1362 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1364 if (js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0)
1368 case STMT_SUBROUTINE
:
1370 * There's a [exception or hole, retsub pc-index] pair on the
1371 * stack that we need to pop.
1379 if (stmt
->flags
& SIF_SCOPE
) {
1382 /* There is a Block object with locals on the stack to pop. */
1384 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1386 i
= OBJ_BLOCK_COUNT(cx
, stmt
->blockObj
);
1387 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, i
);
1392 cg
->stackDepth
= depth
;
1399 EmitGoto(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
,
1400 ptrdiff_t *lastp
, JSAtomListElement
*label
, JSSrcNoteType noteType
)
1404 if (!EmitNonLocalJumpFixup(cx
, cg
, toStmt
))
1408 index
= js_NewSrcNote2(cx
, cg
, noteType
, (ptrdiff_t) ALE_INDEX(label
));
1409 else if (noteType
!= SRC_NULL
)
1410 index
= js_NewSrcNote(cx
, cg
, noteType
);
1416 return EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, lastp
);
1420 BackPatch(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t last
,
1421 jsbytecode
*target
, jsbytecode op
)
1423 jsbytecode
*pc
, *stop
;
1424 ptrdiff_t delta
, span
;
1426 pc
= CG_CODE(cg
, last
);
1427 stop
= CG_CODE(cg
, -1);
1428 while (pc
!= stop
) {
1429 delta
= GetJumpOffset(cg
, pc
);
1431 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, span
);
1434 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1435 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1436 * and need to see the JSOP_BACKPATCH* op at *pc).
1445 js_PopStatement(JSTreeContext
*tc
)
1450 tc
->topStmt
= stmt
->down
;
1451 if (STMT_LINKS_SCOPE(stmt
)) {
1452 tc
->topScopeStmt
= stmt
->downScope
;
1453 if (stmt
->flags
& SIF_SCOPE
) {
1454 tc
->blockChain
= STOBJ_GET_PARENT(stmt
->blockObj
);
1455 JS_SCOPE_DEPTH_METERING(--tc
->scopeDepth
);
1461 js_PopStatementCG(JSContext
*cx
, JSCodeGenerator
*cg
)
1466 if (!STMT_IS_TRYING(stmt
) &&
1467 (!BackPatch(cx
, cg
, stmt
->breaks
, CG_NEXT(cg
), JSOP_GOTO
) ||
1468 !BackPatch(cx
, cg
, stmt
->continues
, CG_CODE(cg
, stmt
->update
),
1472 js_PopStatement(cg
);
1477 js_DefineCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1484 JSAtomListElement
*ale
;
1486 /* XXX just do numbers for now */
1487 if (pn
->pn_type
== TOK_NUMBER
) {
1489 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
1490 v
= INT_TO_JSVAL(ival
);
1493 * We atomize double to root a jsdouble instance that we wrap as
1494 * jsval and store in cg->constList. This works because atoms are
1495 * protected from GC during compilation.
1497 valueAtom
= js_AtomizeDouble(cx
, dval
);
1500 v
= ATOM_KEY(valueAtom
);
1502 ale
= cg
->constList
.add(cg
->compiler
, atom
);
1505 ALE_SET_VALUE(ale
, v
);
1511 js_LexicalLookup(JSTreeContext
*tc
, JSAtom
*atom
, jsint
*slotp
, JSStmtInfo
*stmt
)
1515 JSScopeProperty
*sprop
;
1518 stmt
= tc
->topScopeStmt
;
1519 for (; stmt
; stmt
= stmt
->downScope
) {
1520 if (stmt
->type
== STMT_WITH
)
1523 /* Skip "maybe scope" statements that don't contain let bindings. */
1524 if (!(stmt
->flags
& SIF_SCOPE
))
1527 obj
= stmt
->blockObj
;
1528 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj
) == &js_BlockClass
);
1529 scope
= OBJ_SCOPE(obj
);
1530 sprop
= scope
->lookup(ATOM_TO_JSID(atom
));
1532 JS_ASSERT(sprop
->flags
& SPROP_HAS_SHORTID
);
1535 JS_ASSERT(JSVAL_IS_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
1536 *slotp
= JSVAL_TO_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]) +
1549 * Check if the attributes describe a property holding a compile-time constant
1550 * or a permanent, read-only property without a getter.
1552 #define IS_CONSTANT_PROPERTY(attrs) \
1553 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1554 (JSPROP_READONLY | JSPROP_PERMANENT))
1557 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1558 * name defining a constant.
1561 LookupCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1566 JSAtomListElement
*ale
;
1567 JSObject
*obj
, *objbox
;
1572 * Chase down the cg stack, but only until we reach the outermost cg.
1573 * This enables propagating consts from top-level into switch cases in a
1574 * function compiled along with the top-level script.
1578 if (cg
->flags
& (TCF_IN_FUNCTION
| TCF_COMPILE_N_GO
)) {
1579 /* XXX this will need revising if 'const' becomes block-scoped. */
1580 stmt
= js_LexicalLookup(cg
, atom
, NULL
);
1584 ale
= cg
->constList
.lookup(atom
);
1586 JS_ASSERT(ALE_VALUE(ale
) != JSVAL_HOLE
);
1587 *vp
= ALE_VALUE(ale
);
1592 * Try looking in the variable object for a direct property that
1593 * is readonly and permanent. We know such a property can't be
1594 * shadowed by another property on obj's prototype chain, or a
1595 * with object or catch variable; nor can prop's value be changed,
1596 * nor can prop be deleted.
1598 if (cg
->flags
& TCF_IN_FUNCTION
) {
1599 if (js_LookupLocal(cx
, cg
->fun
, atom
, NULL
) != JSLOCAL_NONE
)
1602 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
1603 obj
= cg
->scopeChain
;
1604 ok
= OBJ_LOOKUP_PROPERTY(cx
, obj
, ATOM_TO_JSID(atom
), &objbox
,
1608 if (objbox
== obj
) {
1610 * We're compiling code that will be executed immediately,
1611 * not re-executed against a different scope chain and/or
1612 * variable object. Therefore we can get constant values
1613 * from our variable object here.
1615 ok
= OBJ_GET_ATTRIBUTES(cx
, obj
, ATOM_TO_JSID(atom
), prop
,
1617 if (ok
&& IS_CONSTANT_PROPERTY(attrs
)) {
1618 ok
= OBJ_GET_PROPERTY(cx
, obj
, ATOM_TO_JSID(atom
), vp
);
1619 JS_ASSERT_IF(ok
, *vp
!= JSVAL_HOLE
);
1623 OBJ_DROP_PROPERTY(cx
, objbox
, prop
);
1630 } while ((cg
= (JSCodeGenerator
*) cg
->parent
) != NULL
);
1635 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1636 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1637 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1638 * after the main bytecode sequence.
1641 EmitBigIndexPrefix(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
1646 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1649 JS_STATIC_ASSERT(INDEX_LIMIT
<= JS_BIT(24));
1650 JS_STATIC_ASSERT(INDEX_LIMIT
>=
1651 (JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 2) << 16);
1653 if (index
< JS_BIT(16))
1655 indexBase
= index
>> 16;
1656 if (indexBase
<= JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 1) {
1657 if (js_Emit1(cx
, cg
, (JSOp
)(JSOP_INDEXBASE1
+ indexBase
- 1)) < 0)
1659 return JSOP_RESETBASE0
;
1662 if (index
>= INDEX_LIMIT
) {
1663 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1664 JSMSG_TOO_MANY_LITERALS
);
1668 if (js_Emit2(cx
, cg
, JSOP_INDEXBASE
, (JSOp
)indexBase
) < 0)
1670 return JSOP_RESETBASE
;
1674 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1675 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1676 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1677 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1678 * with single-byte JSOP_INDEXBASE[123] codes.
1680 * Such prefixing currently requires a suffix to restore the "zero segment"
1681 * register setting, but this could be optimized further.
1684 EmitIndexOp(JSContext
*cx
, JSOp op
, uintN index
, JSCodeGenerator
*cg
)
1688 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1689 if (bigSuffix
== JSOP_FALSE
)
1691 EMIT_UINT16_IMM_OP(op
, index
);
1692 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1696 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1697 * caller's lexical environment, and embedding a false return on error.
1699 #define EMIT_INDEX_OP(op, index) \
1701 if (!EmitIndexOp(cx, op, index, cg)) \
1706 EmitAtomOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
1708 JSAtomListElement
*ale
;
1710 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1711 if (op
== JSOP_GETPROP
&&
1712 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
1713 return js_Emit1(cx
, cg
, JSOP_LENGTH
) >= 0;
1715 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
1718 return EmitIndexOp(cx
, op
, ALE_INDEX(ale
), cg
);
1722 EmitObjectOp(JSContext
*cx
, JSObjectBox
*objbox
, JSOp op
,
1723 JSCodeGenerator
*cg
)
1725 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1726 return EmitIndexOp(cx
, op
, cg
->objectList
.index(objbox
), cg
);
1730 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1731 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1732 * other parts of the code there's no necessary relationship between the two.
1733 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1734 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1736 JS_STATIC_ASSERT(ARGNO_LEN
== 2);
1737 JS_STATIC_ASSERT(SLOTNO_LEN
== 2);
1740 EmitSlotIndexOp(JSContext
*cx
, JSOp op
, uintN slot
, uintN index
,
1741 JSCodeGenerator
*cg
)
1747 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SLOTATOM
||
1748 JOF_OPTYPE(op
) == JOF_SLOTOBJECT
);
1749 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1750 if (bigSuffix
== JSOP_FALSE
)
1753 /* Emit [op, slot, index]. */
1754 off
= js_EmitN(cx
, cg
, op
, 2 + INDEX_LEN
);
1757 pc
= CG_CODE(cg
, off
);
1758 SET_UINT16(pc
, slot
);
1760 SET_INDEX(pc
, index
);
1761 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1765 * Adjust the slot for a block local to account for the number of variables
1766 * that share the same index space with locals. Due to the incremental code
1767 * generation for top-level script, we do the adjustment via code patching in
1768 * JSCompiler::compileScript; see comments there.
1770 * The function returns -1 on failures.
1773 AdjustBlockSlot(JSContext
*cx
, JSCodeGenerator
*cg
, jsint slot
)
1775 JS_ASSERT((jsuint
) slot
< cg
->maxStackDepth
);
1776 if (cg
->flags
& TCF_IN_FUNCTION
) {
1777 slot
+= cg
->fun
->u
.i
.nvars
;
1778 if ((uintN
) slot
>= SLOTNO_LIMIT
) {
1779 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), NULL
,
1781 JSMSG_TOO_MANY_LOCALS
);
1789 EmitEnterBlock(JSContext
*cx
, JSParseNode
*pn
, JSCodeGenerator
*cg
)
1791 JS_ASSERT(PN_TYPE(pn
) == TOK_LEXICALSCOPE
);
1792 if (!EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_ENTERBLOCK
, cg
))
1795 JSObject
*blockObj
= pn
->pn_objbox
->object
;
1796 jsint depth
= AdjustBlockSlot(cx
, cg
, OBJ_BLOCK_DEPTH(cx
, blockObj
));
1800 for (uintN slot
= JSSLOT_FREE(&js_BlockClass
),
1801 limit
= slot
+ OBJ_BLOCK_COUNT(cx
, blockObj
);
1802 slot
< limit
; slot
++) {
1803 jsval v
= STOBJ_GET_SLOT(blockObj
, slot
);
1805 /* Beware the empty destructuring dummy. */
1806 if (JSVAL_IS_VOID(v
)) {
1807 JS_ASSERT(slot
+ 1 <= limit
);
1811 JSDefinition
*dn
= (JSDefinition
*) JSVAL_TO_PRIVATE(v
);
1812 JS_ASSERT(dn
->pn_defn
);
1813 JS_ASSERT(uintN(dn
->frameSlot() + depth
) < JS_BIT(16));
1814 dn
->pn_cookie
+= depth
;
1816 for (JSParseNode
*pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
1817 JS_ASSERT(pnu
->pn_lexdef
== dn
);
1818 JS_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
1819 JS_ASSERT(pnu
->pn_cookie
== FREE_UPVAR_COOKIE
);
1824 OBJ_SCOPE(blockObj
)->freeslot
= JSSLOT_FREE(&js_BlockClass
);
1825 return js_GrowSlots(cx
, blockObj
, JSSLOT_FREE(&js_BlockClass
));
1829 * When eval is called from a function, the eval code or function code it
1830 * compiles may reference upvars that live in the eval-calling function. The
1831 * eval-invoked compiler does not have explicit definitions for these upvars
1832 * and we do not attempt to create them a-priori (by inspecting the function's
1833 * args and vars) -- we could, but we'd take an avoidable penalty for each
1834 * function local not referenced by any upvar. Instead, we map such upvars
1835 * lazily, growing upvarMap.vector by powers of two.
1837 * This function knows that it is called with pn pointing to a PN_NAME-arity
1838 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1839 * static level of cg at least one greater than the eval-calling function's
1843 MakeUpvarForEval(JSParseNode
*pn
, JSCodeGenerator
*cg
)
1845 JSContext
*cx
= cg
->compiler
->context
;
1846 JSFunction
*fun
= cg
->compiler
->callerFrame
->fun
;
1847 uintN upvarLevel
= fun
->u
.i
.script
->staticLevel
;
1849 JSFunctionBox
*funbox
= cg
->funbox
;
1852 * Treat top-level function definitions as escaping (i.e., as funargs),
1853 * required since we compile each such top level function or statement
1854 * and throw away the AST, so we can't yet see all funarg uses of this
1855 * function being compiled (cg->funbox->object). See bug 493177.
1857 if (funbox
->level
== fun
->u
.i
.script
->staticLevel
+ 1U &&
1858 !(((JSFunction
*) funbox
->object
)->flags
& JSFUN_LAMBDA
)) {
1859 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
1860 ((JSFunction
*) funbox
->object
)->atom
);
1864 while (funbox
->level
>= upvarLevel
) {
1865 if (funbox
->node
->pn_dflags
& PND_FUNARG
)
1867 funbox
= funbox
->parent
;
1873 JSAtom
*atom
= pn
->pn_atom
;
1876 JSLocalKind localKind
= js_LookupLocal(cx
, fun
, atom
, &index
);
1877 if (localKind
== JSLOCAL_NONE
)
1880 JS_ASSERT(cg
->staticLevel
> upvarLevel
);
1881 if (cg
->staticLevel
>= JS_DISPLAY_SIZE
|| upvarLevel
>= JS_DISPLAY_SIZE
)
1884 JSAtomListElement
*ale
= cg
->upvarList
.lookup(atom
);
1886 if ((cg
->flags
& TCF_IN_FUNCTION
) &&
1887 !js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
)) {
1891 ale
= cg
->upvarList
.add(cg
->compiler
, atom
);
1894 JS_ASSERT(ALE_INDEX(ale
) == cg
->upvarList
.count
- 1);
1896 uint32
*vector
= cg
->upvarMap
.vector
;
1897 uint32 length
= cg
->upvarMap
.length
;
1899 JS_ASSERT(ALE_INDEX(ale
) <= length
);
1900 if (ALE_INDEX(ale
) == length
) {
1901 length
= 2 * JS_MAX(2, length
);
1902 vector
= (uint32
*) cx
->realloc(vector
, length
* sizeof *vector
);
1905 cg
->upvarMap
.vector
= vector
;
1906 cg
->upvarMap
.length
= length
;
1909 if (localKind
!= JSLOCAL_ARG
)
1910 index
+= fun
->nargs
;
1911 JS_ASSERT(index
< JS_BIT(16));
1913 uintN skip
= cg
->staticLevel
- upvarLevel
;
1914 vector
[ALE_INDEX(ale
)] = MAKE_UPVAR_COOKIE(skip
, index
);
1917 pn
->pn_op
= JSOP_GETUPVAR
;
1918 pn
->pn_cookie
= MAKE_UPVAR_COOKIE(cg
->staticLevel
, ALE_INDEX(ale
));
1919 pn
->pn_dflags
|= PND_BOUND
;
1924 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1925 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1926 * It returns false on error, true on success.
1928 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1929 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1930 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1931 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1932 * pn->pn_op was modified, if this function finds an argument or local variable
1933 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1934 * successful return.
1936 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1937 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1941 BindNameToSlot(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
1947 JSDefinition::Kind dn_kind
;
1948 JSAtomListElement
*ale
;
1951 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
1953 /* Idempotency tests come first, since we may be called more than once. */
1954 if (pn
->pn_dflags
& PND_BOUND
)
1957 /* No cookie initialized for these two, they're pre-bound by definition. */
1958 JS_ASSERT(pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
);
1961 * The parser linked all uses (including forward references) to their
1962 * definitions, unless a with statement or direct eval intervened.
1965 JS_ASSERT(pn
->pn_cookie
== FREE_UPVAR_COOKIE
);
1967 JS_ASSERT(dn
->pn_defn
);
1968 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
1972 dn
= (JSDefinition
*) pn
;
1979 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1981 cookie
= dn
->pn_cookie
;
1982 dn_kind
= dn
->kind();
1985 * Turn attempts to mutate const-declared bindings into get ops (for
1986 * pre-increment and pre-decrement ops, our caller will have to emit
1987 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1989 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1990 * bindings visible to the compiler are permanent in JS unless the
1991 * declaration originates in eval code. We detect eval code by testing
1992 * cg->compiler->callerFrame, which is set only by eval or a debugger
1995 * Note that this callerFrame non-null test must be qualified by testing
1996 * !cg->funbox to exclude function code nested in eval code, which is not
1997 * subject to the deletable binding exception.
2004 if (dn_kind
!= JSDefinition::UNKNOWN
) {
2005 if (cg
->compiler
->callerFrame
&& !cg
->funbox
)
2006 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
2008 pn
->pn_op
= JSOP_FALSE
;
2009 pn
->pn_dflags
|= PND_BOUND
;
2015 pn
->pn_op
= op
= JSOP_NAME
;
2018 if (cookie
== FREE_UPVAR_COOKIE
) {
2019 JSStackFrame
*caller
= cg
->compiler
->callerFrame
;
2021 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
2024 * Don't generate upvars on the left side of a for loop. See
2027 if (cg
->flags
& TCF_IN_FOR_INIT
)
2030 JS_ASSERT(caller
->script
);
2035 * Make sure the variable object used by the compiler to initialize
2036 * parent links matches the caller's varobj. Compile-n-go compiler-
2037 * created function objects have the top-level cg's scopeChain set
2038 * as their parent by JSCompiler::newFunction.
2040 JSObject
*scopeobj
= (cg
->flags
& TCF_IN_FUNCTION
)
2041 ? STOBJ_GET_PARENT(FUN_OBJECT(cg
->fun
))
2043 if (scopeobj
!= caller
->varobj
)
2047 * We are compiling eval or debug script inside a function frame
2048 * and the scope chain matches the function's variable object.
2049 * Optimize access to function's arguments and variable and the
2052 if (op
!= JSOP_NAME
)
2055 return MakeUpvarForEval(pn
, cg
);
2060 if (dn
->pn_dflags
& PND_GVAR
) {
2062 * If this is a global reference from within a function, leave pn_op as
2063 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2064 * only we could depend on the global frame's slots being valid for all
2065 * calls to the function.
2067 if (cg
->flags
& TCF_IN_FUNCTION
)
2071 * We are optimizing global variables and there may be no pre-existing
2072 * global property named atom when this global script runs. If atom was
2073 * declared via const or var, optimize pn to access fp->vars using the
2074 * appropriate JSOP_*GVAR op.
2076 * FIXME: should be able to optimize global function access too.
2078 JS_ASSERT(dn_kind
== JSDefinition::VAR
|| dn_kind
== JSDefinition::CONST
);
2081 case JSOP_NAME
: op
= JSOP_GETGVAR
; break;
2082 case JSOP_SETNAME
: op
= JSOP_SETGVAR
; break;
2083 case JSOP_SETCONST
: /* NB: no change */ break;
2084 case JSOP_INCNAME
: op
= JSOP_INCGVAR
; break;
2085 case JSOP_NAMEINC
: op
= JSOP_GVARINC
; break;
2086 case JSOP_DECNAME
: op
= JSOP_DECGVAR
; break;
2087 case JSOP_NAMEDEC
: op
= JSOP_GVARDEC
; break;
2088 case JSOP_FORNAME
: /* NB: no change */ break;
2089 case JSOP_DELNAME
: /* NB: no change */ break;
2090 default: JS_NOT_REACHED("gvar");
2093 pn
->pn_cookie
= cookie
;
2094 pn
->pn_dflags
|= PND_BOUND
;
2098 uintN level
= UPVAR_FRAME_SKIP(cookie
);
2099 JS_ASSERT(cg
->staticLevel
>= level
);
2102 * A JSDefinition witnessed as a declaration by the parser cannot be an
2103 * upvar, unless it is the degenerate kind of upvar selected above (in the
2104 * code before the PND_GVAR test) for the special case of compile-and-go
2105 * code generated from eval called from a function, where the eval code
2106 * uses local vars defined in the function. We detect this upvar-for-eval
2107 * case by checking dn's op.
2109 if (PN_OP(dn
) == JSOP_GETUPVAR
) {
2110 JS_ASSERT(cg
->staticLevel
>= level
);
2111 if (op
!= JSOP_NAME
)
2115 JSStackFrame
*caller
= cg
->compiler
->callerFrame
;
2118 JSTreeContext
*tc
= cg
;
2119 while (tc
->staticLevel
!= level
)
2121 JS_ASSERT(tc
->flags
& TCF_COMPILING
);
2123 JSCodeGenerator
*evalcg
= (JSCodeGenerator
*) tc
;
2124 JS_ASSERT(evalcg
->flags
& TCF_COMPILE_N_GO
);
2125 JS_ASSERT(!(evalcg
->flags
& TCF_IN_FOR_INIT
));
2126 JS_ASSERT(caller
->script
);
2127 JS_ASSERT(caller
->fun
&& caller
->varobj
== evalcg
->scopeChain
);
2130 if (cg
->staticLevel
== level
) {
2131 pn
->pn_op
= JSOP_GETUPVAR
;
2132 pn
->pn_cookie
= cookie
;
2133 pn
->pn_dflags
|= PND_BOUND
;
2137 return MakeUpvarForEval(pn
, cg
);
2140 uintN skip
= cg
->staticLevel
- level
;
2142 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
2143 JS_ASSERT(cg
->lexdeps
.lookup(atom
));
2144 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2145 JS_ASSERT(cg
->fun
->u
.i
.skipmin
<= skip
);
2148 * If op is a mutating opcode, this upvar's static level is too big to
2149 * index into the display, or the function is heavyweight, we fall back
2152 if (op
!= JSOP_NAME
)
2154 if (level
>= JS_DISPLAY_SIZE
)
2156 if (cg
->flags
& TCF_FUN_HEAVYWEIGHT
)
2159 if (FUN_FLAT_CLOSURE(cg
->fun
)) {
2163 * The function we're compiling may not be heavyweight, but if it
2164 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2165 * JSCompiler::analyzeFunctions has arranged for this function's
2166 * enclosing functions to be heavyweight, so we can safely stick
2167 * with JSOP_NAME/JSOP_CALLNAME.
2169 if (cg
->funbox
->node
->pn_dflags
& PND_FUNARG
)
2173 * Generator functions may be resumed from any call stack, which
2174 * defeats the display optimization to static link searching used
2175 * by JSOP_{GET,CALL}UPVAR.
2177 if (cg
->flags
& TCF_FUN_IS_GENERATOR
)
2183 ale
= cg
->upvarList
.lookup(atom
);
2185 index
= ALE_INDEX(ale
);
2187 if (!js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
))
2190 ale
= cg
->upvarList
.add(cg
->compiler
, atom
);
2193 index
= ALE_INDEX(ale
);
2194 JS_ASSERT(index
== cg
->upvarList
.count
- 1);
2196 uint32
*vector
= cg
->upvarMap
.vector
;
2198 uint32 length
= cg
->lexdeps
.count
;
2200 vector
= (uint32
*) js_calloc(length
* sizeof *vector
);
2202 JS_ReportOutOfMemory(cx
);
2205 cg
->upvarMap
.vector
= vector
;
2206 cg
->upvarMap
.length
= length
;
2209 uintN slot
= UPVAR_FRAME_SLOT(cookie
);
2210 if (dn_kind
!= JSDefinition::ARG
) {
2211 JSTreeContext
*tc
= cg
;
2214 } while (tc
->staticLevel
!= level
);
2215 if (tc
->flags
& TCF_IN_FUNCTION
)
2216 slot
+= tc
->fun
->nargs
;
2219 vector
[index
] = MAKE_UPVAR_COOKIE(skip
, slot
);
2223 pn
->pn_cookie
= index
;
2224 pn
->pn_dflags
|= PND_BOUND
;
2229 * We are compiling a function body and may be able to optimize name
2230 * to stack slot. Look for an argument or variable in the function and
2231 * rewrite pn_op and update pn accordingly.
2234 case JSDefinition::UNKNOWN
:
2237 case JSDefinition::LET
:
2239 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2240 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2241 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2242 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2243 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2244 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2245 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2246 default: JS_NOT_REACHED("let");
2250 case JSDefinition::ARG
:
2252 case JSOP_NAME
: op
= JSOP_GETARG
; break;
2253 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
2254 case JSOP_INCNAME
: op
= JSOP_INCARG
; break;
2255 case JSOP_NAMEINC
: op
= JSOP_ARGINC
; break;
2256 case JSOP_DECNAME
: op
= JSOP_DECARG
; break;
2257 case JSOP_NAMEDEC
: op
= JSOP_ARGDEC
; break;
2258 case JSOP_FORNAME
: op
= JSOP_FORARG
; break;
2259 default: JS_NOT_REACHED("arg");
2261 JS_ASSERT(!pn
->isConst());
2264 case JSDefinition::VAR
:
2265 if (PN_OP(dn
) == JSOP_CALLEE
) {
2266 JS_ASSERT(op
!= JSOP_CALLEE
);
2267 JS_ASSERT((cg
->fun
->flags
& JSFUN_LAMBDA
) && atom
== cg
->fun
->atom
);
2272 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as
2273 * we cannot be sure cg->fun is not something of the form:
2275 * var ff = (function f(s) { eval(s); return f; });
2277 * where a caller invokes ff("var f = 42"). The result returned
2278 * for such an invocation must be 42, since the callee name is
2279 * lexically bound in an outer declarative environment from the
2280 * function's activation. See jsfun.cpp:call_resolve.
2282 JS_ASSERT(op
!= JSOP_DELNAME
);
2283 if (!(cg
->flags
& TCF_FUN_HEAVYWEIGHT
)) {
2285 pn
->pn_dflags
|= PND_CONST
;
2290 pn
->pn_dflags
|= PND_BOUND
;
2296 JS_ASSERT_IF(dn_kind
!= JSDefinition::FUNCTION
,
2297 dn_kind
== JSDefinition::VAR
||
2298 dn_kind
== JSDefinition::CONST
);
2300 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2301 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2302 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
2303 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2304 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2305 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2306 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2307 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2308 default: JS_NOT_REACHED("local");
2310 JS_ASSERT_IF(dn_kind
== JSDefinition::CONST
, pn
->pn_dflags
& PND_CONST
);
2314 JS_ASSERT(op
!= PN_OP(pn
));
2316 pn
->pn_cookie
= UPVAR_FRAME_SLOT(cookie
);
2317 pn
->pn_dflags
|= PND_BOUND
;
2322 * If pn contains a useful expression, return true with *answer set to true.
2323 * If pn contains a useless expression, return true with *answer set to false.
2324 * Return false on error.
2326 * The caller should initialize *answer to false and invoke this function on
2327 * an expression statement or similar subtree to decide whether the tree could
2328 * produce code that has any side effects. For an expression statement, we
2329 * define useless code as code with no side effects, because the main effect,
2330 * the value left on the stack after the code executes, will be discarded by a
2334 CheckSideEffects(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2344 switch (pn
->pn_arity
) {
2347 * A named function, contrary to ES3, is no longer useful, because we
2348 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2349 * Object instance and binding a readonly, permanent property in it
2350 * (the object and binding can be detected and hijacked or captured).
2351 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2357 if (pn
->pn_op
== JSOP_NOP
||
2358 pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2359 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2361 * Non-operators along with ||, &&, ===, and !== never invoke
2362 * toString or valueOf.
2364 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
2365 ok
&= CheckSideEffects(cx
, cg
, pn2
, answer
);
2368 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2369 * are presumed to be useful, because they may have side effects
2370 * even if their main effect (their return value) is discarded.
2372 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2373 * to avoid too much recursion. All such lists must be presumed
2374 * to be useful because each index operation could invoke a getter
2375 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2376 * does not apply here: arguments[i][j] might invoke a getter).
2378 * Likewise, array and object initialisers may call prototype
2379 * setters (the __defineSetter__ built-in, and writable __proto__
2380 * on Array.prototype create this hazard). Initialiser list nodes
2381 * have JSOP_NEWINIT in their pn_op.
2388 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid1
, answer
) &&
2389 CheckSideEffects(cx
, cg
, pn
->pn_kid2
, answer
) &&
2390 CheckSideEffects(cx
, cg
, pn
->pn_kid3
, answer
);
2394 if (pn
->pn_type
== TOK_ASSIGN
) {
2396 * Assignment is presumed to be useful, even if the next operation
2397 * is another assignment overwriting this one's ostensible effect,
2398 * because the left operand may be a property with a setter that
2401 * The only exception is assignment of a useless value to a const
2402 * declared in the function currently being compiled.
2405 if (pn2
->pn_type
!= TOK_NAME
) {
2408 if (!BindNameToSlot(cx
, cg
, pn2
))
2410 if (!CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
))
2412 if (!*answer
&& (pn
->pn_op
!= JSOP_NOP
|| !pn2
->isConst()))
2416 if (pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2417 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2419 * ||, &&, ===, and !== do not convert their operands via
2420 * toString or valueOf method calls.
2422 ok
= CheckSideEffects(cx
, cg
, pn
->pn_left
, answer
) &&
2423 CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
);
2426 * We can't easily prove that neither operand ever denotes an
2427 * object with a toString or valueOf method.
2435 switch (pn
->pn_type
) {
2437 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2442 switch (pn2
->pn_type
) {
2444 if (!BindNameToSlot(cx
, cg
, pn2
))
2446 if (pn2
->isConst()) {
2452 #if JS_HAS_XML_SUPPORT
2455 #if JS_HAS_LVALUE_RETURN
2459 /* All these delete addressing modes have effects too. */
2463 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2469 if (pn
->pn_op
== JSOP_NOT
) {
2470 /* ! does not convert its operand via toString or valueOf. */
2471 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2478 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2479 * have direct effects. Of the remaining unary-arity node types,
2480 * we can't easily prove that the operand never denotes an object
2481 * with a toString or valueOf method.
2490 * Take care to avoid trying to bind a label name (labels, both for
2491 * statements and property values in object initialisers, have pn_op
2492 * defaulted to JSOP_NOP).
2494 if (pn
->pn_type
== TOK_NAME
&& pn
->pn_op
!= JSOP_NOP
) {
2495 if (!BindNameToSlot(cx
, cg
, pn
))
2497 if (pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
&&
2498 pn
->pn_cookie
== FREE_UPVAR_COOKIE
) {
2500 * Not an argument or local variable use, and not a use of a
2501 * unshadowed named function expression's given name, so this
2502 * expression could invoke a getter that has side effects.
2507 pn2
= pn
->maybeExpr();
2508 if (pn
->pn_type
== TOK_DOT
) {
2509 if (pn2
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn2
))
2511 if (!(pn2
->pn_op
== JSOP_ARGUMENTS
&&
2512 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
)) {
2514 * Any dotted property reference could call a getter, except
2515 * for arguments.length where arguments is unambiguous.
2520 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2524 ok
= CheckSideEffects(cx
, cg
, pn
->pn_tree
, answer
);
2528 if (pn
->pn_type
== TOK_DEBUGGER
)
2536 EmitNameOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2541 if (!BindNameToSlot(cx
, cg
, pn
))
2557 op
= JSOP_CALLLOCAL
;
2560 op
= JSOP_CALLUPVAR
;
2563 op
= JSOP_CALLDSLOT
;
2566 JS_ASSERT(op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
);
2571 if (op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
) {
2572 if (js_Emit1(cx
, cg
, op
) < 0)
2574 if (callContext
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
2577 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
2578 EMIT_UINT16_IMM_OP(op
, pn
->pn_cookie
);
2580 if (!EmitAtomOp(cx
, pn
, op
, cg
))
2588 #if JS_HAS_XML_SUPPORT
2590 EmitXMLName(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2595 JS_ASSERT(pn
->pn_type
== TOK_UNARYOP
);
2596 JS_ASSERT(pn
->pn_op
== JSOP_XMLNAME
);
2597 JS_ASSERT(op
== JSOP_XMLNAME
|| op
== JSOP_CALLXMLNAME
);
2600 oldflags
= cg
->flags
;
2601 cg
->flags
&= ~TCF_IN_FOR_INIT
;
2602 if (!js_EmitTree(cx
, cg
, pn2
))
2604 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
2605 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2606 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2610 return js_Emit1(cx
, cg
, op
) >= 0;
2615 EmitSpecialPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2618 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2619 * deoptimize away from fast paths in the interpreter and trace recorder,
2620 * which skip dense array instances by going up to Array.prototype before
2621 * looking up the property name.
2623 JSAtomListElement
*ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
2626 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
2628 if (js_Emit1(cx
, cg
, op
) < 0)
2634 EmitPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
,
2637 JSParseNode
*pn2
, *pndot
, *pnup
, *pndown
;
2640 JS_ASSERT(pn
->pn_arity
== PN_NAME
);
2641 pn2
= pn
->maybeExpr();
2643 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2644 if ((op
== JSOP_GETPROP
|| op
== JSOP_CALLPROP
) &&
2645 (pn
->pn_atom
== cx
->runtime
->atomState
.protoAtom
||
2646 pn
->pn_atom
== cx
->runtime
->atomState
.parentAtom
||
2647 pn
->pn_atom
== cx
->runtime
->atomState
.countAtom
)) {
2648 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
2650 return EmitSpecialPropOp(cx
, pn
, callContext
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
);
2654 JS_ASSERT(pn
->pn_type
== TOK_DOT
);
2655 JS_ASSERT(op
== JSOP_GETPROP
);
2657 } else if (op
== JSOP_GETPROP
&& pn
->pn_type
== TOK_DOT
) {
2658 if (pn2
->pn_op
== JSOP_THIS
) {
2659 if (pn
->pn_atom
!= cx
->runtime
->atomState
.lengthAtom
) {
2660 /* Fast path for gets of |this.foo|. */
2661 return EmitAtomOp(cx
, pn
, JSOP_GETTHISPROP
, cg
);
2663 } else if (pn2
->pn_type
== TOK_NAME
) {
2666 * - arguments.length into JSOP_ARGCNT
2667 * - argname.prop into JSOP_GETARGPROP
2668 * - localname.prop into JSOP_GETLOCALPROP
2669 * but don't do this if the property is 'length' -- prefer to emit
2670 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2672 if (!BindNameToSlot(cx
, cg
, pn2
))
2674 if (pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
2675 if (pn2
->pn_op
== JSOP_ARGUMENTS
)
2676 return js_Emit1(cx
, cg
, JSOP_ARGCNT
) >= 0;
2678 switch (pn2
->pn_op
) {
2680 op
= JSOP_GETARGPROP
;
2683 op
= JSOP_GETLOCALPROP
;
2685 JSAtomListElement
*ale
;
2688 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
2691 atomIndex
= ALE_INDEX(ale
);
2692 return EmitSlotIndexOp(cx
, op
, pn2
->pn_cookie
, atomIndex
, cg
);
2702 * If the object operand is also a dotted property reference, reverse the
2703 * list linked via pn_expr temporarily so we can iterate over it from the
2704 * bottom up (reversing again as we go), to avoid excessive recursion.
2706 if (pn2
->pn_type
== TOK_DOT
) {
2709 top
= CG_OFFSET(cg
);
2711 /* Reverse pndot->pn_expr to point up, not down. */
2712 pndot
->pn_offset
= top
;
2713 JS_ASSERT(!pndot
->pn_used
);
2714 pndown
= pndot
->pn_expr
;
2715 pndot
->pn_expr
= pnup
;
2716 if (pndown
->pn_type
!= TOK_DOT
)
2722 /* pndown is a primary expression, not a dotted property reference. */
2723 if (!js_EmitTree(cx
, cg
, pndown
))
2727 /* Walk back up the list, emitting annotated name ops. */
2728 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2729 CG_OFFSET(cg
) - pndown
->pn_offset
) < 0) {
2734 * Special case deoptimization on __proto__, __count__ and
2735 * __parent__, as above.
2737 if (pndot
->pn_arity
== PN_NAME
&&
2738 (pndot
->pn_atom
== cx
->runtime
->atomState
.protoAtom
||
2739 pndot
->pn_atom
== cx
->runtime
->atomState
.parentAtom
||
2740 pndot
->pn_atom
== cx
->runtime
->atomState
.countAtom
)) {
2741 if (!EmitSpecialPropOp(cx
, pndot
, JSOP_GETELEM
, cg
))
2743 } else if (!EmitAtomOp(cx
, pndot
, PN_OP(pndot
), cg
)) {
2747 /* Reverse the pn_expr link again. */
2748 pnup
= pndot
->pn_expr
;
2749 pndot
->pn_expr
= pndown
;
2751 } while ((pndot
= pnup
) != NULL
);
2753 if (!js_EmitTree(cx
, cg
, pn2
))
2757 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2758 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2762 return EmitAtomOp(cx
, pn
, op
, cg
);
2766 EmitElemOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2769 JSParseNode
*left
, *right
, *next
, ltmp
, rtmp
;
2772 top
= CG_OFFSET(cg
);
2773 if (pn
->pn_arity
== PN_LIST
) {
2774 /* Left-associative operator chain to avoid too much recursion. */
2775 JS_ASSERT(pn
->pn_op
== JSOP_GETELEM
);
2776 JS_ASSERT(pn
->pn_count
>= 3);
2779 next
= left
->pn_next
;
2780 JS_ASSERT(next
!= right
);
2783 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2784 * one or more index expression and JSOP_GETELEM op pairs.
2786 if (left
->pn_type
== TOK_NAME
&& next
->pn_type
== TOK_NUMBER
) {
2787 if (!BindNameToSlot(cx
, cg
, left
))
2789 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2790 JSDOUBLE_IS_INT(next
->pn_dval
, slot
) &&
2791 (jsuint
)slot
< JS_BIT(16)) {
2793 * arguments[i]() requires arguments object as "this".
2794 * Check that we never generates list for that usage.
2796 JS_ASSERT(op
!= JSOP_CALLELEM
|| next
->pn_next
);
2797 left
->pn_offset
= next
->pn_offset
= top
;
2798 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2800 next
= left
->pn_next
;
2805 * Check whether we generated JSOP_ARGSUB, just above, and have only
2806 * one more index expression to emit. Given arguments[0][j], we must
2807 * skip the while loop altogether, falling through to emit code for j
2808 * (in the subtree referenced by right), followed by the annotated op,
2809 * at the bottom of this function.
2811 JS_ASSERT(next
!= right
|| pn
->pn_count
== 3);
2812 if (left
== pn
->pn_head
) {
2813 if (!js_EmitTree(cx
, cg
, left
))
2816 while (next
!= right
) {
2817 if (!js_EmitTree(cx
, cg
, next
))
2819 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2821 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
2823 next
= next
->pn_next
;
2826 if (pn
->pn_arity
== PN_NAME
) {
2828 * Set left and right so pn appears to be a TOK_LB node, instead
2829 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2830 * EmitDestructuringOps nearer below. In the destructuring case,
2831 * the base expression (pn_expr) of the name may be null, which
2832 * means we have to emit a JSOP_BINDNAME.
2834 left
= pn
->maybeExpr();
2837 left
->pn_type
= TOK_STRING
;
2838 left
->pn_op
= JSOP_BINDNAME
;
2839 left
->pn_arity
= PN_NULLARY
;
2840 left
->pn_pos
= pn
->pn_pos
;
2841 left
->pn_atom
= pn
->pn_atom
;
2844 right
->pn_type
= TOK_STRING
;
2845 JS_ASSERT(ATOM_IS_STRING(pn
->pn_atom
));
2846 right
->pn_op
= js_IsIdentifier(ATOM_TO_STRING(pn
->pn_atom
))
2849 right
->pn_arity
= PN_NULLARY
;
2850 right
->pn_pos
= pn
->pn_pos
;
2851 right
->pn_atom
= pn
->pn_atom
;
2853 JS_ASSERT(pn
->pn_arity
== PN_BINARY
);
2855 right
= pn
->pn_right
;
2858 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2859 if (op
== JSOP_GETELEM
&&
2860 left
->pn_type
== TOK_NAME
&&
2861 right
->pn_type
== TOK_NUMBER
) {
2862 if (!BindNameToSlot(cx
, cg
, left
))
2864 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2865 JSDOUBLE_IS_INT(right
->pn_dval
, slot
) &&
2866 (jsuint
)slot
< JS_BIT(16)) {
2867 left
->pn_offset
= right
->pn_offset
= top
;
2868 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2873 if (!js_EmitTree(cx
, cg
, left
))
2877 /* The right side of the descendant operator is implicitly quoted. */
2878 JS_ASSERT(op
!= JSOP_DESCENDANTS
|| right
->pn_type
!= TOK_STRING
||
2879 right
->pn_op
== JSOP_QNAMEPART
);
2880 if (!js_EmitTree(cx
, cg
, right
))
2882 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2884 return js_Emit1(cx
, cg
, op
) >= 0;
2888 EmitNumberOp(JSContext
*cx
, jsdouble dval
, JSCodeGenerator
*cg
)
2895 JSAtomListElement
*ale
;
2897 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
2899 return js_Emit1(cx
, cg
, JSOP_ZERO
) >= 0;
2901 return js_Emit1(cx
, cg
, JSOP_ONE
) >= 0;
2902 if ((jsint
)(int8
)ival
== ival
)
2903 return js_Emit2(cx
, cg
, JSOP_INT8
, (jsbytecode
)(int8
)ival
) >= 0;
2906 if (u
< JS_BIT(16)) {
2907 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2908 } else if (u
< JS_BIT(24)) {
2909 off
= js_EmitN(cx
, cg
, JSOP_UINT24
, 3);
2912 pc
= CG_CODE(cg
, off
);
2915 off
= js_EmitN(cx
, cg
, JSOP_INT32
, 4);
2918 pc
= CG_CODE(cg
, off
);
2919 SET_INT32(pc
, ival
);
2924 atom
= js_AtomizeDouble(cx
, dval
);
2928 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
2931 return EmitIndexOp(cx
, JSOP_DOUBLE
, ALE_INDEX(ale
), cg
);
2935 EmitSwitch(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2936 JSStmtInfo
*stmtInfo
)
2939 JSBool ok
, hasDefault
, constPropagated
;
2940 ptrdiff_t top
, off
, defaultOffset
;
2941 JSParseNode
*pn2
, *pn3
, *pn4
;
2942 uint32 caseCount
, tableLength
;
2943 JSParseNode
**table
;
2948 JSAtomListElement
*ale
;
2950 size_t switchSize
, tableSize
;
2951 jsbytecode
*pc
, *savepc
;
2952 #if JS_HAS_BLOCK_SCOPE
2956 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2957 switchOp
= JSOP_TABLESWITCH
;
2959 hasDefault
= constPropagated
= JS_FALSE
;
2963 * If the switch contains let variables scoped by its body, model the
2964 * resulting block on the stack first, before emitting the discriminant's
2965 * bytecode (in case the discriminant contains a stack-model dependency
2966 * such as a let expression).
2969 #if JS_HAS_BLOCK_SCOPE
2970 if (pn2
->pn_type
== TOK_LEXICALSCOPE
) {
2972 * Push the body's block scope before discriminant code-gen for proper
2973 * static block scope linkage in case the discriminant contains a let
2974 * expression. The block's locals must lie under the discriminant on
2975 * the stack so that case-dispatch bytecodes can find the discriminant
2978 count
= OBJ_BLOCK_COUNT(cx
, pn2
->pn_objbox
->object
);
2979 js_PushBlockScope(cg
, stmtInfo
, pn2
->pn_objbox
->object
, -1);
2980 stmtInfo
->type
= STMT_SWITCH
;
2982 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2983 if (!EmitEnterBlock(cx
, pn2
, cg
))
2987 * Pop the switch's statement info around discriminant code-gen. Note
2988 * how this leaves cg->blockChain referencing the switch's
2989 * block scope object, which is necessary for correct block parenting
2990 * in the case where the discriminant contains a let expression.
2992 cg
->topStmt
= stmtInfo
->down
;
2993 cg
->topScopeStmt
= stmtInfo
->downScope
;
3003 * Emit code for the discriminant first (or nearly first, in the case of a
3004 * switch whose body is a block scope).
3006 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
3009 /* Switch bytecodes run from here till end of final case. */
3010 top
= CG_OFFSET(cg
);
3011 #if !JS_HAS_BLOCK_SCOPE
3012 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3014 if (pn2
->pn_type
== TOK_LC
) {
3015 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3017 /* Re-push the switch's statement info record. */
3018 cg
->topStmt
= cg
->topScopeStmt
= stmtInfo
;
3020 /* Set the statement info record's idea of top. */
3021 stmtInfo
->update
= top
;
3023 /* Advance pn2 to refer to the switch case list. */
3028 caseCount
= pn2
->pn_count
;
3032 if (caseCount
== 0 ||
3034 (hasDefault
= (pn2
->pn_head
->pn_type
== TOK_DEFAULT
)))) {
3039 #define INTMAP_LENGTH 256
3040 jsbitmap intmap_space
[INTMAP_LENGTH
];
3041 jsbitmap
*intmap
= NULL
;
3042 int32 intmap_bitlen
= 0;
3044 low
= JSVAL_INT_MAX
;
3045 high
= JSVAL_INT_MIN
;
3047 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3048 if (pn3
->pn_type
== TOK_DEFAULT
) {
3049 hasDefault
= JS_TRUE
;
3050 caseCount
--; /* one of the "cases" was the default */
3054 JS_ASSERT(pn3
->pn_type
== TOK_CASE
);
3055 if (switchOp
== JSOP_CONDSWITCH
)
3059 while (pn4
->pn_type
== TOK_RP
)
3061 switch (pn4
->pn_type
) {
3064 if (JSDOUBLE_IS_INT(d
, i
) && INT_FITS_IN_JSVAL(i
)) {
3065 pn3
->pn_val
= INT_TO_JSVAL(i
);
3067 atom
= js_AtomizeDouble(cx
, d
);
3072 pn3
->pn_val
= ATOM_KEY(atom
);
3076 pn3
->pn_val
= ATOM_KEY(pn4
->pn_atom
);
3079 if (!pn4
->maybeExpr()) {
3080 ok
= LookupCompileTimeConstant(cx
, cg
, pn4
->pn_atom
, &v
);
3083 if (v
!= JSVAL_HOLE
) {
3084 if (!JSVAL_IS_PRIMITIVE(v
)) {
3086 * XXX JSOP_LOOKUPSWITCH does not support const-
3087 * propagated object values, see bug 407186.
3089 switchOp
= JSOP_CONDSWITCH
;
3093 constPropagated
= JS_TRUE
;
3099 if (pn4
->pn_op
== JSOP_TRUE
) {
3100 pn3
->pn_val
= JSVAL_TRUE
;
3103 if (pn4
->pn_op
== JSOP_FALSE
) {
3104 pn3
->pn_val
= JSVAL_FALSE
;
3109 switchOp
= JSOP_CONDSWITCH
;
3113 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3
->pn_val
));
3115 if (switchOp
!= JSOP_TABLESWITCH
)
3117 if (!JSVAL_IS_INT(pn3
->pn_val
)) {
3118 switchOp
= JSOP_LOOKUPSWITCH
;
3121 i
= JSVAL_TO_INT(pn3
->pn_val
);
3122 if ((jsuint
)(i
+ (jsint
)JS_BIT(15)) >= (jsuint
)JS_BIT(16)) {
3123 switchOp
= JSOP_LOOKUPSWITCH
;
3132 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3133 * We bias i by 65536 if it's negative, and hope that's a rare
3134 * case (because it requires a malloc'd bitmap).
3138 if (i
>= intmap_bitlen
) {
3140 i
< (INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
)) {
3141 intmap
= intmap_space
;
3142 intmap_bitlen
= INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
;
3144 /* Just grab 8K for the worst-case bitmap. */
3145 intmap_bitlen
= JS_BIT(16);
3146 intmap
= (jsbitmap
*)
3147 cx
->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2
)
3148 * sizeof(jsbitmap
));
3150 JS_ReportOutOfMemory(cx
);
3154 memset(intmap
, 0, intmap_bitlen
>> JS_BITS_PER_BYTE_LOG2
);
3156 if (JS_TEST_BIT(intmap
, i
)) {
3157 switchOp
= JSOP_LOOKUPSWITCH
;
3160 JS_SET_BIT(intmap
, i
);
3164 if (intmap
&& intmap
!= intmap_space
)
3170 * Compute table length and select lookup instead if overlarge or
3171 * more than half-sparse.
3173 if (switchOp
== JSOP_TABLESWITCH
) {
3174 tableLength
= (uint32
)(high
- low
+ 1);
3175 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
3176 switchOp
= JSOP_LOOKUPSWITCH
;
3177 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3179 * Lookup switch supports only atom indexes below 64K limit.
3180 * Conservatively estimate the maximum possible index during
3181 * switch generation and use conditional switch if it exceeds
3184 if (caseCount
+ cg
->atomList
.count
> JS_BIT(16))
3185 switchOp
= JSOP_CONDSWITCH
;
3190 * Emit a note with two offsets: first tells total switch code length,
3191 * second tells offset to first JSOP_CASE if condswitch.
3193 noteIndex
= js_NewSrcNote3(cx
, cg
, SRC_SWITCH
, 0, 0);
3197 if (switchOp
== JSOP_CONDSWITCH
) {
3199 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3202 } else if (switchOp
== JSOP_TABLESWITCH
) {
3204 * 3 offsets (len, low, high) before the table, 1 per entry.
3206 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
3209 * JSOP_LOOKUPSWITCH:
3210 * 1 offset (len) and 1 atom index (npairs) before the table,
3211 * 1 atom index and 1 jump offset per entry.
3213 switchSize
= (size_t)(JUMP_OFFSET_LEN
+ INDEX_LEN
+
3214 (INDEX_LEN
+ JUMP_OFFSET_LEN
) * caseCount
);
3218 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3220 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3221 * to emit the immediate operand(s) by which bytecode readers such as
3222 * BuildSpanDepTable discover the length of the switch opcode *before*
3223 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3224 * also important to zero all unknown jump offset immediate operands,
3225 * so they can be converted to span dependencies with null targets to
3226 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3228 if (js_EmitN(cx
, cg
, switchOp
, switchSize
) < 0)
3232 if (switchOp
== JSOP_CONDSWITCH
) {
3233 intN caseNoteIndex
= -1;
3234 JSBool beforeCases
= JS_TRUE
;
3236 /* Emit code for evaluating cases and jumping to case statements. */
3237 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3239 if (pn4
&& !js_EmitTree(cx
, cg
, pn4
))
3241 if (caseNoteIndex
>= 0) {
3242 /* off is the previous JSOP_CASE's bytecode offset. */
3243 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3244 CG_OFFSET(cg
) - off
)) {
3249 JS_ASSERT(pn3
->pn_type
== TOK_DEFAULT
);
3252 caseNoteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
3253 if (caseNoteIndex
< 0)
3255 off
= EmitJump(cx
, cg
, JSOP_CASE
, 0);
3258 pn3
->pn_offset
= off
;
3260 uintN noteCount
, noteCountDelta
;
3262 /* Switch note's second offset is to first JSOP_CASE. */
3263 noteCount
= CG_NOTE_COUNT(cg
);
3264 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
3268 noteCountDelta
= CG_NOTE_COUNT(cg
) - noteCount
;
3269 if (noteCountDelta
!= 0)
3270 caseNoteIndex
+= noteCountDelta
;
3271 beforeCases
= JS_FALSE
;
3276 * If we didn't have an explicit default (which could fall in between
3277 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3278 * call in the loop above), link the last case to the implicit default
3279 * for the decompiler.
3282 caseNoteIndex
>= 0 &&
3283 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3284 CG_OFFSET(cg
) - off
)) {
3288 /* Emit default even if no explicit default statement. */
3289 defaultOffset
= EmitJump(cx
, cg
, JSOP_DEFAULT
, 0);
3290 if (defaultOffset
< 0)
3293 pc
= CG_CODE(cg
, top
+ JUMP_OFFSET_LEN
);
3295 if (switchOp
== JSOP_TABLESWITCH
) {
3296 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3297 SET_JUMP_OFFSET(pc
, low
);
3298 pc
+= JUMP_OFFSET_LEN
;
3299 SET_JUMP_OFFSET(pc
, high
);
3300 pc
+= JUMP_OFFSET_LEN
;
3303 * Use malloc to avoid arena bloat for programs with many switches.
3304 * We free table if non-null at label out, so all control flow must
3305 * exit this function through goto out or goto bad.
3307 if (tableLength
!= 0) {
3308 tableSize
= (size_t)tableLength
* sizeof *table
;
3309 table
= (JSParseNode
**) cx
->malloc(tableSize
);
3312 memset(table
, 0, tableSize
);
3313 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3314 if (pn3
->pn_type
== TOK_DEFAULT
)
3316 i
= JSVAL_TO_INT(pn3
->pn_val
);
3318 JS_ASSERT((uint32
)i
< tableLength
);
3323 JS_ASSERT(switchOp
== JSOP_LOOKUPSWITCH
);
3325 /* Fill in the number of cases. */
3326 SET_INDEX(pc
, caseCount
);
3331 * After this point, all control flow involving JSOP_TABLESWITCH
3332 * must set ok and goto out to exit this function. To keep things
3333 * simple, all switchOp cases exit that way.
3335 MUST_FLOW_THROUGH("out");
3338 * We have already generated at least one big jump so we must
3339 * explicitly add span dependencies for the switch jumps. When
3340 * called below, js_SetJumpOffset can only do it when patching
3341 * the first big jump or when cg->spanDeps is null.
3343 if (!AddSwitchSpanDeps(cx
, cg
, CG_CODE(cg
, top
)))
3347 if (constPropagated
) {
3349 * Skip switchOp, as we are not setting jump offsets in the two
3350 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3351 * unless there was an error.
3353 savepc
= CG_NEXT(cg
);
3354 CG_NEXT(cg
) = pc
+ 1;
3355 if (switchOp
== JSOP_TABLESWITCH
) {
3356 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3359 (pn4
= pn3
->pn_left
) != NULL
&&
3360 pn4
->pn_type
== TOK_NAME
) {
3361 /* Note a propagated constant with the const's name. */
3362 JS_ASSERT(!pn4
->maybeExpr());
3363 ale
= cg
->atomList
.add(cg
->compiler
, pn4
->pn_atom
);
3367 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3368 ALE_INDEX(ale
)) < 0) {
3372 pc
+= JUMP_OFFSET_LEN
;
3375 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3377 if (pn4
&& pn4
->pn_type
== TOK_NAME
) {
3378 /* Note a propagated constant with the const's name. */
3379 JS_ASSERT(!pn4
->maybeExpr());
3380 ale
= cg
->atomList
.add(cg
->compiler
, pn4
->pn_atom
);
3384 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3385 ALE_INDEX(ale
)) < 0) {
3389 pc
+= INDEX_LEN
+ JUMP_OFFSET_LEN
;
3392 CG_NEXT(cg
) = savepc
;
3396 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3397 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3398 if (switchOp
== JSOP_CONDSWITCH
&& pn3
->pn_type
!= TOK_DEFAULT
)
3399 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx
, cg
, pn3
->pn_offset
, goto bad
);
3400 pn4
= pn3
->pn_right
;
3401 ok
= js_EmitTree(cx
, cg
, pn4
);
3404 pn3
->pn_offset
= pn4
->pn_offset
;
3405 if (pn3
->pn_type
== TOK_DEFAULT
)
3406 off
= pn3
->pn_offset
- top
;
3410 /* If no default case, offset for default is to end of switch. */
3411 off
= CG_OFFSET(cg
) - top
;
3414 /* We better have set "off" by now. */
3415 JS_ASSERT(off
!= -1);
3417 /* Set the default offset (to end of switch if no default). */
3418 if (switchOp
== JSOP_CONDSWITCH
) {
3420 JS_ASSERT(defaultOffset
!= -1);
3421 ok
= js_SetJumpOffset(cx
, cg
, CG_CODE(cg
, defaultOffset
),
3422 off
- (defaultOffset
- top
));
3426 pc
= CG_CODE(cg
, top
);
3427 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3430 pc
+= JUMP_OFFSET_LEN
;
3433 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3434 off
= CG_OFFSET(cg
) - top
;
3435 ok
= js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, off
);
3439 if (switchOp
== JSOP_TABLESWITCH
) {
3440 /* Skip over the already-initialized switch bounds. */
3441 pc
+= 2 * JUMP_OFFSET_LEN
;
3443 /* Fill in the jump table, if there is one. */
3444 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3446 off
= pn3
? pn3
->pn_offset
- top
: 0;
3447 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3450 pc
+= JUMP_OFFSET_LEN
;
3452 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3453 /* Skip over the already-initialized number of cases. */
3456 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3457 if (pn3
->pn_type
== TOK_DEFAULT
)
3459 if (!js_AtomizePrimitiveValue(cx
, pn3
->pn_val
, &atom
))
3461 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
3464 SET_INDEX(pc
, ALE_INDEX(ale
));
3467 off
= pn3
->pn_offset
- top
;
3468 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3471 pc
+= JUMP_OFFSET_LEN
;
3479 ok
= js_PopStatementCG(cx
, cg
);
3481 #if JS_HAS_BLOCK_SCOPE
3482 if (ok
&& pn
->pn_right
->pn_type
== TOK_LEXICALSCOPE
)
3483 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
3494 js_EmitFunctionScript(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*body
)
3496 if (cg
->flags
& TCF_FUN_IS_GENERATOR
) {
3497 /* JSOP_GENERATOR must be the first instruction. */
3498 CG_SWITCH_TO_PROLOG(cg
);
3499 JS_ASSERT(CG_NEXT(cg
) == CG_BASE(cg
));
3500 if (js_Emit1(cx
, cg
, JSOP_GENERATOR
) < 0)
3502 CG_SWITCH_TO_MAIN(cg
);
3505 return js_EmitTree(cx
, cg
, body
) &&
3506 js_Emit1(cx
, cg
, JSOP_STOP
) >= 0 &&
3507 js_NewScriptFromCG(cx
, cg
);
3510 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3511 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3513 uintN line_ = (line); \
3514 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3515 if (delta_ != 0) { \
3517 * Encode any change in the current source line number by using \
3518 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3519 * whichever consumes less space. \
3521 * NB: We handle backward line number deltas (possible with for \
3522 * loops where the update part is emitted after the body, but its \
3523 * line number is <= any line number in the body) here by letting \
3524 * unsigned delta_ wrap to a very large number, which triggers a \
3527 CG_CURRENT_LINE(cg) = line_; \
3528 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3529 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3533 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3535 } while (--delta_ != 0); \
3540 /* A function, so that we avoid macro-bloating all the other callsites. */
3542 UpdateLineNumberNotes(JSContext
*cx
, JSCodeGenerator
*cg
, uintN line
)
3544 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, line
);
3549 MaybeEmitVarDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3550 JSParseNode
*pn
, jsatomid
*result
)
3553 JSAtomListElement
*ale
;
3555 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
3556 atomIndex
= (jsatomid
) UPVAR_FRAME_SLOT(pn
->pn_cookie
);
3558 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
3561 atomIndex
= ALE_INDEX(ale
);
3564 if (JOF_OPTYPE(pn
->pn_op
) == JOF_ATOM
&&
3565 (!(cg
->flags
& TCF_IN_FUNCTION
) || (cg
->flags
& TCF_FUN_HEAVYWEIGHT
))) {
3566 CG_SWITCH_TO_PROLOG(cg
);
3567 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.begin
.lineno
))
3569 EMIT_INDEX_OP(prologOp
, atomIndex
);
3570 CG_SWITCH_TO_MAIN(cg
);
3574 *result
= atomIndex
;
3578 #if JS_HAS_DESTRUCTURING
3581 (*DestructuringDeclEmitter
)(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3585 EmitDestructuringDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3588 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
3589 if (!BindNameToSlot(cx
, cg
, pn
))
3592 JS_ASSERT(PN_OP(pn
) != JSOP_ARGUMENTS
&& PN_OP(pn
) != JSOP_CALLEE
);
3593 return MaybeEmitVarDecl(cx
, cg
, prologOp
, pn
, NULL
);
3597 EmitDestructuringDecls(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3600 JSParseNode
*pn2
, *pn3
;
3601 DestructuringDeclEmitter emitter
;
3603 if (pn
->pn_type
== TOK_RB
) {
3604 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3605 if (pn2
->pn_type
== TOK_COMMA
)
3607 emitter
= (pn2
->pn_type
== TOK_NAME
)
3608 ? EmitDestructuringDecl
3609 : EmitDestructuringDecls
;
3610 if (!emitter(cx
, cg
, prologOp
, pn2
))
3614 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3615 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3616 pn3
= pn2
->pn_right
;
3617 emitter
= (pn3
->pn_type
== TOK_NAME
)
3618 ? EmitDestructuringDecl
3619 : EmitDestructuringDecls
;
3620 if (!emitter(cx
, cg
, prologOp
, pn3
))
3628 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
);
3631 EmitDestructuringLHS(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3635 /* Skip any parenthesization. */
3636 while (pn
->pn_type
== TOK_RP
)
3640 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3641 * destructuring initialiser-form, call ourselves to handle it, then
3642 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3643 * ending with a JSOP_ENUMELEM or equivalent op.
3645 if (pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
) {
3646 if (!EmitDestructuringOpsHelper(cx
, cg
, pn
))
3648 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3651 if (pn
->pn_type
== TOK_NAME
) {
3652 if (!BindNameToSlot(cx
, cg
, pn
))
3654 if (pn
->isConst() && !pn
->isInitialized())
3655 return js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3658 switch (pn
->pn_op
) {
3661 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3662 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3663 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3665 if (!EmitElemOp(cx
, pn
, JSOP_ENUMELEM
, cg
))
3670 if (!EmitElemOp(cx
, pn
, JSOP_ENUMCONSTELEM
, cg
))
3675 slot
= (jsuint
) pn
->pn_cookie
;
3676 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, slot
);
3681 slot
= (jsuint
) pn
->pn_cookie
;
3682 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
3683 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3688 #if JS_HAS_LVALUE_RETURN || JS_HAS_XML_SUPPORT
3692 top
= CG_OFFSET(cg
);
3693 if (!js_EmitTree(cx
, cg
, pn
))
3695 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
3697 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
3711 * Recursive helper for EmitDestructuringOps.
3713 * Given a value to destructure on the stack, walk over an object or array
3714 * initialiser at pn, emitting bytecodes to match property values and store
3715 * them in the lvalues identified by the matched property names.
3718 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3721 JSParseNode
*pn2
, *pn3
;
3725 intN stackDepth
= cg
->stackDepth
;
3726 JS_ASSERT(stackDepth
!= 0);
3727 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
3728 JS_ASSERT(pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
);
3731 if (pn
->pn_count
== 0) {
3732 /* Emit a DUP;POP sequence for the decompiler. */
3733 return js_Emit1(cx
, cg
, JSOP_DUP
) >= 0 &&
3734 js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3738 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3740 * Duplicate the value being destructured to use as a reference base.
3741 * If dup is not the first one, annotate it for the decompiler.
3743 if (pn2
!= pn
->pn_head
&& js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
3745 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
3749 * Now push the property name currently being matched, which is either
3750 * the array initialiser's current index, or the current property name
3751 * "label" on the left of a colon in the object initialiser. Set pn3
3752 * to the lvalue node, which is in the value-initializing position.
3755 if (pn
->pn_type
== TOK_RB
) {
3756 if (!EmitNumberOp(cx
, index
, cg
))
3760 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3761 JS_ASSERT(pn2
->pn_type
== TOK_COLON
);
3763 if (pn3
->pn_type
== TOK_NUMBER
) {
3765 * If we are emitting an object destructuring initialiser,
3766 * annotate the index op with SRC_INITPROP so we know we are
3767 * not decompiling an array initialiser.
3769 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
3771 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
3774 JS_ASSERT(pn3
->pn_type
== TOK_STRING
||
3775 pn3
->pn_type
== TOK_NAME
);
3776 if (!EmitAtomOp(cx
, pn3
, JSOP_GETPROP
, cg
))
3778 doElemOp
= JS_FALSE
;
3780 pn3
= pn2
->pn_right
;
3785 * Ok, get the value of the matching property name. This leaves
3786 * that value on top of the value being destructured, so the stack
3787 * is one deeper than when we started.
3789 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
3791 JS_ASSERT(cg
->stackDepth
== stackDepth
+ 1);
3794 /* Nullary comma node makes a hole in the array destructurer. */
3795 if (pn3
->pn_type
== TOK_COMMA
&& pn3
->pn_arity
== PN_NULLARY
) {
3796 JS_ASSERT(pn
->pn_type
== TOK_RB
);
3797 JS_ASSERT(pn2
== pn3
);
3798 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3801 if (!EmitDestructuringLHS(cx
, cg
, pn3
))
3805 JS_ASSERT(cg
->stackDepth
== stackDepth
);
3813 OpToDeclType(JSOp op
)
3817 return SRC_DECL_LET
;
3819 return SRC_DECL_CONST
;
3821 return SRC_DECL_VAR
;
3823 return SRC_DECL_NONE
;
3828 EmitDestructuringOps(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3832 * If we're called from a variable declaration, help the decompiler by
3833 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3834 * If the destructuring initialiser is empty, our helper will emit a
3835 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3837 if (js_NewSrcNote2(cx
, cg
, SRC_DESTRUCT
, OpToDeclType(prologOp
)) < 0)
3841 * Call our recursive helper to emit the destructuring assignments and
3842 * related stack manipulations.
3844 return EmitDestructuringOpsHelper(cx
, cg
, pn
);
3848 EmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3849 JSParseNode
*lhs
, JSParseNode
*rhs
)
3851 jsuint depth
, limit
, i
, nslots
;
3854 depth
= limit
= (uintN
) cg
->stackDepth
;
3855 for (pn
= rhs
->pn_head
; pn
; pn
= pn
->pn_next
) {
3856 if (limit
== JS_BIT(16)) {
3857 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), rhs
, JSREPORT_ERROR
,
3858 JSMSG_ARRAY_INIT_TOO_BIG
);
3862 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3863 JS_ASSERT(pn
->pn_type
!= TOK_COMMA
);
3864 if (!js_EmitTree(cx
, cg
, pn
))
3869 if (js_NewSrcNote2(cx
, cg
, SRC_GROUPASSIGN
, OpToDeclType(prologOp
)) < 0)
3873 for (pn
= lhs
->pn_head
; pn
; pn
= pn
->pn_next
, ++i
) {
3874 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3875 JS_ASSERT(i
< limit
);
3876 jsint slot
= AdjustBlockSlot(cx
, cg
, i
);
3879 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL
, slot
);
3881 if (pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
) {
3882 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3885 if (!EmitDestructuringLHS(cx
, cg
, pn
))
3890 nslots
= limit
- depth
;
3891 EMIT_UINT16_IMM_OP(JSOP_POPN
, nslots
);
3892 cg
->stackDepth
= (uintN
) depth
;
3897 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3898 * can emit a group assignment sequence, which results in 0 stack depth delta,
3899 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3902 MaybeEmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3903 JSParseNode
*pn
, JSOp
*pop
)
3905 JSParseNode
*lhs
, *rhs
;
3907 JS_ASSERT(pn
->pn_type
== TOK_ASSIGN
);
3908 JS_ASSERT(*pop
== JSOP_POP
|| *pop
== JSOP_POPV
);
3911 if (lhs
->pn_type
== TOK_RB
&& rhs
->pn_type
== TOK_RB
&&
3912 !(rhs
->pn_xflags
& PNX_HOLEY
) &&
3913 lhs
->pn_count
<= rhs
->pn_count
) {
3914 if (!EmitGroupAssignment(cx
, cg
, prologOp
, lhs
, rhs
))
3921 #endif /* JS_HAS_DESTRUCTURING */
3924 EmitVariables(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
3925 JSBool inLetHead
, ptrdiff_t *headNoteIndex
)
3927 bool let
, forInVar
, first
;
3928 #if JS_HAS_BLOCK_SCOPE
3929 bool forInLet
, popScope
;
3930 JSStmtInfo
*stmt
, *scopeStmt
;
3932 ptrdiff_t off
, noteIndex
, tmp
;
3933 JSParseNode
*pn2
, *pn3
, *next
;
3938 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3939 *headNoteIndex
= -1;
3942 * Let blocks and expressions have a parenthesized head in which the new
3943 * scope is not yet open. Initializer evaluation uses the parent node's
3944 * lexical scope. If popScope is true below, then we hide the top lexical
3945 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3946 * it won't find any names in the new let block.
3948 * The same goes for let declarations in the head of any kind of for loop.
3949 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3950 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3951 * in the containing scope, and puts x in the loop body's scope.
3953 let
= (pn
->pn_op
== JSOP_NOP
);
3954 forInVar
= (pn
->pn_xflags
& PNX_FORINVAR
) != 0;
3955 #if JS_HAS_BLOCK_SCOPE
3956 forInLet
= let
&& forInVar
;
3957 popScope
= (inLetHead
|| (let
&& (cg
->flags
& TCF_IN_FOR_INIT
)));
3960 scopeStmt
= cg
->topScopeStmt
;
3963 else stmt
= scopeStmt
= NULL
; /* quell GCC overwarning */
3965 JS_ASSERT(!popScope
|| let
);
3968 off
= noteIndex
= -1;
3969 for (pn2
= pn
->pn_head
; ; pn2
= next
) {
3970 first
= pn2
== pn
->pn_head
;
3971 next
= pn2
->pn_next
;
3973 if (pn2
->pn_type
!= TOK_NAME
) {
3974 #if JS_HAS_DESTRUCTURING
3975 if (pn2
->pn_type
== TOK_RB
|| pn2
->pn_type
== TOK_RC
) {
3977 * Emit variable binding ops, but not destructuring ops.
3978 * The parser (see Variables, jsparse.c) has ensured that
3979 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
3980 * and that case will emit the destructuring code only after
3981 * emitting an enumerating opcode and a branch that tests
3982 * whether the enumeration ended.
3984 JS_ASSERT(forInVar
);
3985 JS_ASSERT(pn
->pn_count
== 1);
3986 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn2
))
3993 * A destructuring initialiser assignment preceded by var will
3994 * never occur to the left of 'in' in a for-in loop. As with 'for
3995 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3996 * i' to be hoisted out of the loop.
3998 JS_ASSERT(pn2
->pn_type
== TOK_ASSIGN
);
3999 JS_ASSERT(!forInVar
);
4002 * To allow the front end to rewrite var f = x; as f = x; when a
4003 * function f(){} precedes the var, detect simple name assignment
4004 * here and initialize the name.
4006 #if !JS_HAS_DESTRUCTURING
4007 JS_ASSERT(pn2
->pn_left
->pn_type
== TOK_NAME
);
4009 if (pn2
->pn_left
->pn_type
== TOK_NAME
)
4012 pn3
= pn2
->pn_right
;
4017 #if JS_HAS_DESTRUCTURING
4018 if (pn
->pn_count
== 1) {
4020 * If this is the only destructuring assignment in the list,
4021 * try to optimize to a group assignment. If we're in a let
4022 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4023 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4025 JS_ASSERT(noteIndex
< 0 && !pn2
->pn_next
);
4027 if (!MaybeEmitGroupAssignment(cx
, cg
,
4028 inLetHead
? JSOP_POP
: PN_OP(pn
),
4032 if (op
== JSOP_NOP
) {
4033 pn
->pn_xflags
= (pn
->pn_xflags
& ~PNX_POPVAR
) | PNX_GROUPINIT
;
4039 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn3
))
4042 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4046 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4047 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4048 * we will emit at the bottom of this function.
4050 if (!EmitDestructuringOps(cx
, cg
,
4051 inLetHead
? JSOP_POP
: PN_OP(pn
),
4060 * Load initializer early to share code above that jumps to do_name.
4061 * NB: if this var redeclares an existing binding, then pn2 is linked
4062 * on its definition's use-chain and pn_expr has been overlayed with
4065 pn3
= pn2
->maybeExpr();
4068 if (!BindNameToSlot(cx
, cg
, pn2
))
4072 if (op
== JSOP_ARGUMENTS
) {
4073 /* JSOP_ARGUMENTS => no initializer */
4074 JS_ASSERT(!pn3
&& !let
);
4077 atomIndex
= 0; /* quell GCC overwarning */
4080 JS_ASSERT(op
!= JSOP_CALLEE
);
4081 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
|| !let
);
4082 if (!MaybeEmitVarDecl(cx
, cg
, PN_OP(pn
), pn2
, &atomIndex
))
4086 JS_ASSERT(!forInVar
);
4087 if (op
== JSOP_SETNAME
) {
4089 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
4091 if (pn
->pn_op
== JSOP_DEFCONST
&&
4092 !js_DefineCompileTimeConstant(cx
, cg
, pn2
->pn_atom
, pn3
)) {
4096 #if JS_HAS_BLOCK_SCOPE
4097 /* Evaluate expr in the outer lexical scope if requested. */
4099 cg
->topStmt
= stmt
->down
;
4100 cg
->topScopeStmt
= scopeStmt
->downScope
;
4104 oldflags
= cg
->flags
;
4105 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4106 if (!js_EmitTree(cx
, cg
, pn3
))
4108 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
4110 #if JS_HAS_BLOCK_SCOPE
4113 cg
->topScopeStmt
= scopeStmt
;
4120 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4121 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4122 * a TOK_SEQ node to make the two statements appear as one. Therefore
4123 * if this declaration is part of a for-in loop head, we do not need to
4124 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4125 * js_EmitTree, will annotate appropriately.
4127 JS_ASSERT_IF(pn2
->pn_defn
, pn3
== pn2
->pn_expr
);
4129 JS_ASSERT(pn
->pn_count
== 1);
4136 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4137 (pn
->pn_op
== JSOP_DEFCONST
)
4139 : (pn
->pn_op
== JSOP_DEFVAR
)
4141 : SRC_DECL_LET
) < 0) {
4144 if (op
== JSOP_ARGUMENTS
) {
4145 if (js_Emit1(cx
, cg
, op
) < 0)
4147 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4148 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4150 EMIT_INDEX_OP(op
, atomIndex
);
4153 #if JS_HAS_DESTRUCTURING
4156 tmp
= CG_OFFSET(cg
);
4157 if (noteIndex
>= 0) {
4158 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
4164 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
4165 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4169 /* If this is a let head, emit and return a srcnote on the pop. */
4171 *headNoteIndex
= js_NewSrcNote(cx
, cg
, SRC_DECL
);
4172 if (*headNoteIndex
< 0)
4174 if (!(pn
->pn_xflags
& PNX_POPVAR
))
4175 return js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4178 return !(pn
->pn_xflags
& PNX_POPVAR
) || js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
4181 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4183 GettableNoteForNextOp(JSCodeGenerator
*cg
)
4185 ptrdiff_t offset
, target
;
4186 jssrcnote
*sn
, *end
;
4189 target
= CG_OFFSET(cg
);
4190 for (sn
= CG_NOTES(cg
), end
= sn
+ CG_NOTE_COUNT(cg
); sn
< end
;
4192 if (offset
== target
&& SN_IS_GETTABLE(sn
))
4194 offset
+= SN_DELTA(sn
);
4200 /* Top-level named functions need a nop for decompilation. */
4202 EmitFunctionDefNop(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
4204 return js_NewSrcNote2(cx
, cg
, SRC_FUNCDEF
, (ptrdiff_t)index
) >= 0 &&
4205 js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4208 /* See the SRC_FOR source note offsetBias comments later in this file. */
4209 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
4210 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4213 js_EmitTree(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
4215 JSBool ok
, useful
, wantval
;
4216 JSStmtInfo
*stmt
, stmtInfo
;
4217 ptrdiff_t top
, off
, tmp
, beq
, jmp
;
4218 JSParseNode
*pn2
, *pn3
;
4220 JSAtomListElement
*ale
;
4223 ptrdiff_t noteIndex
;
4224 JSSrcNoteType noteType
;
4229 #if JS_HAS_SHARP_VARS
4233 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
4237 pn
->pn_offset
= top
= CG_OFFSET(cg
);
4239 /* Emit notes to tell the current bytecode's source line number. */
4240 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
->pn_pos
.begin
.lineno
);
4242 switch (pn
->pn_type
) {
4248 #if JS_HAS_XML_SUPPORT
4249 if (pn
->pn_arity
== PN_NULLARY
) {
4250 if (js_Emit1(cx
, cg
, JSOP_GETFUNNS
) < 0)
4256 fun
= (JSFunction
*) pn
->pn_funbox
->object
;
4257 JS_ASSERT(FUN_INTERPRETED(fun
));
4258 if (fun
->u
.i
.script
) {
4260 * This second pass is needed to emit JSOP_NOP with a source note
4261 * for the already-emitted function definition prolog opcode. See
4262 * comments in the TOK_LC case.
4264 JS_ASSERT(pn
->pn_op
== JSOP_NOP
);
4265 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
4266 if (!EmitFunctionDefNop(cx
, cg
, pn
->pn_index
))
4271 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
4273 (!pn
->pn_used
&& !pn
->isTopLevel()) ||
4274 (fun
->flags
& JSFUN_LAMBDA
));
4276 JS_ASSERT_IF(pn
->pn_funbox
->tcflags
& TCF_FUN_HEAVYWEIGHT
,
4277 FUN_KIND(fun
) == JSFUN_INTERPRETED
);
4279 /* Generate code for the function's body. */
4280 void *cg2mark
= JS_ARENA_MARK(cg
->codePool
);
4282 JS_ARENA_ALLOCATE_TYPE(cg2space
, JSCodeGenerator
, cg
->codePool
);
4284 js_ReportOutOfScriptQuota(cx
);
4287 JSCodeGenerator
*cg2
=
4288 new (cg2space
) JSCodeGenerator(cg
->compiler
,
4289 cg
->codePool
, cg
->notePool
,
4290 pn
->pn_pos
.begin
.lineno
);
4291 cg2
->flags
= (uint16
) (pn
->pn_funbox
->tcflags
| TCF_IN_FUNCTION
);
4293 cg2
->funbox
= pn
->pn_funbox
;
4297 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4298 * bits and to reserve the all-ones value, thereby reserving the magic
4299 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4301 JS_ASSERT(cg
->staticLevel
< JS_BITMASK(16) - 1);
4302 cg2
->staticLevel
= cg
->staticLevel
+ 1;
4304 /* We measured the max scope depth when we parsed the function. */
4305 JS_SCOPE_DEPTH_METERING(cg2
->maxScopeDepth
= (uintN
) -1);
4306 if (!js_EmitFunctionScript(cx
, cg2
, pn
->pn_body
))
4309 cg2
->~JSCodeGenerator();
4310 JS_ARENA_RELEASE(cg
->codePool
, cg2mark
);
4315 /* Make the function object a literal in the outer script's pool. */
4316 index
= cg
->objectList
.index(pn
->pn_funbox
);
4318 /* Emit a bytecode pointing to the closure object in its immediate. */
4320 if (op
!= JSOP_NOP
) {
4321 if ((pn
->pn_funbox
->tcflags
& TCF_GENEXP_LAMBDA
) &&
4322 js_NewSrcNote(cx
, cg
, SRC_GENEXP
) < 0) {
4325 EMIT_INDEX_OP(op
, index
);
4330 * For a script we emit the code as we parse. Thus the bytecode for
4331 * top-level functions should go in the prolog to predefine their
4332 * names in the variable object before the already-generated main code
4333 * is executed. This extra work for top-level scripts is not necessary
4334 * when we emit the code for a function. It is fully parsed prior to
4335 * invocation of the emitter and calls to js_EmitTree for function
4336 * definitions can be scheduled before generating the rest of code.
4338 if (!(cg
->flags
& TCF_IN_FUNCTION
)) {
4339 JS_ASSERT(!cg
->topStmt
);
4340 CG_SWITCH_TO_PROLOG(cg
);
4341 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFFUN_FC
: JSOP_DEFFUN
;
4342 EMIT_INDEX_OP(op
, index
);
4343 CG_SWITCH_TO_MAIN(cg
);
4345 /* Emit NOP for the decompiler. */
4346 if (!EmitFunctionDefNop(cx
, cg
, index
))
4350 JSLocalKind localKind
=
4352 js_LookupLocal(cx
, cg
->fun
, fun
->atom
, &slot
);
4353 JS_ASSERT(localKind
== JSLOCAL_VAR
|| localKind
== JSLOCAL_CONST
);
4354 JS_ASSERT(index
< JS_BIT(20));
4355 pn
->pn_index
= index
;
4356 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFLOCALFUN_FC
: JSOP_DEFLOCALFUN
;
4357 if (!EmitSlotIndexOp(cx
, op
, slot
, index
, cg
))
4364 ok
= js_EmitTree(cx
, cg
, pn
->last());
4368 JS_ASSERT(cg
->lexdeps
.count
== 0);
4369 JS_ASSERT(pn
->pn_names
.count
!= 0);
4370 cg
->lexdeps
= pn
->pn_names
;
4371 ok
= js_EmitTree(cx
, cg
, pn
->pn_tree
);
4375 /* Initialize so we can detect else-if chains and avoid recursion. */
4376 stmtInfo
.type
= STMT_IF
;
4381 /* Emit code for the condition before pushing stmtInfo. */
4382 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4384 top
= CG_OFFSET(cg
);
4385 if (stmtInfo
.type
== STMT_IF
) {
4386 js_PushStatement(cg
, &stmtInfo
, STMT_IF
, top
);
4389 * We came here from the goto further below that detects else-if
4390 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4391 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4392 * to help the decompiler. Actually, we need two offsets, one for
4393 * decompiling any else clause and the second for decompiling an
4394 * else-if chain without bracing, overindenting, or incorrectly
4395 * scoping let declarations.
4397 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4398 stmtInfo
.type
= STMT_IF
;
4399 stmtInfo
.update
= top
;
4400 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4402 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 1, top
- jmp
))
4406 /* Emit an annotated branch-if-false around the then part. */
4408 noteIndex
= js_NewSrcNote(cx
, cg
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4411 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4415 /* Emit code for the then and optional else parts. */
4416 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4419 /* Modify stmtInfo so we know we're in the else part. */
4420 stmtInfo
.type
= STMT_ELSE
;
4423 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4424 * around the else part. The js_PopStatementCG call at the bottom
4425 * of this switch case will fix up the backpatch chain linked from
4428 jmp
= EmitGoto(cx
, cg
, &stmtInfo
, &stmtInfo
.breaks
, NULL
, SRC_NULL
);
4432 /* Ensure the branch-if-false comes here, then emit the else. */
4433 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4434 if (pn3
->pn_type
== TOK_IF
) {
4439 if (!js_EmitTree(cx
, cg
, pn3
))
4443 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4444 * the decompiler's benefit. We can't just "back up" from the pc
4445 * of the else clause, because we don't know whether an extended
4446 * jump was required to leap from the end of the then clause over
4449 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4452 /* No else part, fixup the branch-if-false to come here. */
4453 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4455 ok
= js_PopStatementCG(cx
, cg
);
4459 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4460 ok
= EmitSwitch(cx
, cg
, pn
, &stmtInfo
);
4465 * Minimize bytecodes issued for one or more iterations by jumping to
4466 * the condition below the body and closing the loop if the condition
4467 * is true with a backward branch. For iteration count i:
4469 * i test at the top test at the bottom
4470 * = =============== ==================
4471 * 0 ifeq-pass goto; ifne-fail
4472 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4473 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4475 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4477 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4478 * test at the top. When JSParseNode trees were added during the ES3
4479 * work (1998-9), the code generation scheme was not optimized, and
4480 * the decompiler continued to take advantage of the branch and jump
4481 * that bracketed the body. But given the SRC_WHILE note, it is easy
4482 * to support the more efficient scheme.
4484 js_PushStatement(cg
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
4485 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4488 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4491 top
= CG_OFFSET(cg
);
4492 if (!js_Emit1(cx
, cg
, JSOP_LOOP
))
4494 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4496 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4497 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4499 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4502 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, beq
- jmp
))
4504 ok
= js_PopStatementCG(cx
, cg
);
4508 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4509 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4510 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
4513 /* Compile the loop body. */
4514 top
= CG_OFFSET(cg
);
4515 if (!js_Emit1(cx
, cg
, JSOP_LOOP
))
4517 js_PushStatement(cg
, &stmtInfo
, STMT_DO_LOOP
, top
);
4518 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4521 /* Set loop and enclosing label update offsets, for continue. */
4524 stmt
->update
= CG_OFFSET(cg
);
4525 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4527 /* Compile the loop condition, now that continues know where to go. */
4528 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4532 * Since we use JSOP_IFNE for other purposes as well as for do-while
4533 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4534 * and the decompiler must get that delta and decompile recursively.
4536 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4539 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, 1 + (beq
- top
)))
4541 ok
= js_PopStatementCG(cx
, cg
);
4545 beq
= 0; /* suppress gcc warnings */
4548 js_PushStatement(cg
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4550 if (pn2
->pn_type
== TOK_IN
) {
4551 /* Set stmtInfo type for later testing. */
4552 stmtInfo
.type
= STMT_FOR_IN_LOOP
;
4555 * If the left part is 'var x', emit code to define x if necessary
4556 * using a prolog opcode, but do not emit a pop. If the left part
4557 * is 'var x = i', emit prolog code to define x if necessary; then
4558 * emit code to evaluate i, assign the result to x, and pop the
4559 * result off the stack.
4561 * All the logic to do this is implemented in the outer switch's
4562 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4564 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4565 * called here will generate the proper note for the assignment
4566 * op that sets x = i, hoisting the initialized var declaration
4567 * out of the loop: 'var x = i; for (x in o) ...'.
4569 * In the 'for (var x in o) ...' case, nothing but the prolog op
4570 * (if needed) should be generated here, we must emit the note
4571 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4572 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4574 * A 'for (let x = i in o)' loop must not be hoisted, since in
4575 * this form the let variable is scoped by the loop body (but not
4576 * the head). The initializer expression i must be evaluated for
4577 * any side effects. So we hoist only i in the let case.
4580 type
= PN_TYPE(pn3
);
4581 cg
->flags
|= TCF_IN_FOR_INIT
;
4582 if (TOKEN_TYPE_IS_DECL(type
) && !js_EmitTree(cx
, cg
, pn3
))
4584 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4586 /* Compile the object expression to the right of 'in'. */
4587 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4591 * Emit a bytecode to convert top of stack value to the iterator
4592 * object depending on the loop variant (for-in, for-each-in, or
4593 * destructuring for-in).
4595 JS_ASSERT(pn
->pn_op
== JSOP_ITER
);
4596 if (js_Emit2(cx
, cg
, JSOP_ITER
, (uint8
) pn
->pn_iflags
) < 0)
4599 /* Annotate so the decompiler can find the loop-closing jump. */
4600 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR_IN
);
4605 * Jump down to the loop condition to minimize overhead assuming at
4606 * least one iteration, as the other loop forms do.
4608 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4612 top
= CG_OFFSET(cg
);
4613 SET_STATEMENT_TOP(&stmtInfo
, top
);
4614 if (!js_Emit1(cx
, cg
, JSOP_LOOP
))
4618 intN loopDepth
= cg
->stackDepth
;
4622 * Compile a JSOP_FOR* bytecode based on the left hand side.
4624 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4625 * or similar, to signify assignment, rather than declaration, to
4626 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4627 * parameter and emits the appropriate source note, defaulting to
4628 * assignment, so JSOP_SETNAME is not critical here; many similar
4629 * ops could be used -- just not JSOP_NOP (which means 'let').
4633 #if JS_HAS_BLOCK_SCOPE
4637 JS_ASSERT(pn3
->pn_arity
== PN_LIST
&& pn3
->pn_count
== 1);
4639 #if JS_HAS_DESTRUCTURING
4640 if (pn3
->pn_type
== TOK_ASSIGN
) {
4642 JS_ASSERT(pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
);
4644 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4645 op
= PN_OP(pn2
->pn_left
);
4646 goto destructuring_for
;
4649 JS_ASSERT(pn3
->pn_type
== TOK_NAME
);
4655 * Always annotate JSOP_FORLOCAL if given input of the form
4656 * 'for (let x in * o)' -- the decompiler must not hoist the
4657 * 'let x' out of the loop head, or x will be bound in the
4658 * wrong scope. Likewise, but in this case only for the sake
4659 * of higher decompilation fidelity only, do not hoist 'var x'
4660 * when given 'for (var x in o)'.
4663 #if JS_HAS_BLOCK_SCOPE
4666 (type
== TOK_VAR
&& !pn3
->maybeExpr())) &&
4667 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4670 : SRC_DECL_LET
) < 0) {
4673 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4676 case JSOP_GETARG
: /* FALL THROUGH */
4677 case JSOP_SETARG
: op
= JSOP_FORARG
; break;
4678 case JSOP_GETGVAR
: /* FALL THROUGH */
4679 case JSOP_SETGVAR
: op
= JSOP_FORNAME
; break;
4680 case JSOP_GETLOCAL
: /* FALL THROUGH */
4681 case JSOP_SETLOCAL
: op
= JSOP_FORLOCAL
; break;
4682 default: JS_ASSERT(0);
4685 pn3
->pn_op
= JSOP_FORNAME
;
4686 if (!BindNameToSlot(cx
, cg
, pn3
))
4690 if (pn3
->isConst()) {
4691 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn3
, JSREPORT_ERROR
,
4692 JSMSG_BAD_FOR_LEFTSIDE
);
4695 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4696 atomIndex
= (jsatomid
) pn3
->pn_cookie
;
4697 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4699 if (!EmitAtomOp(cx
, pn3
, op
, cg
))
4706 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4707 * has no side effects.
4710 if (!CheckSideEffects(cx
, cg
, pn3
->expr(), &useful
))
4713 if (!EmitPropOp(cx
, pn3
, JSOP_FORPROP
, cg
, JS_FALSE
))
4719 #if JS_HAS_DESTRUCTURING
4723 if (js_Emit1(cx
, cg
, JSOP_FORELEM
) < 0)
4725 JS_ASSERT(cg
->stackDepth
>= 3);
4727 #if JS_HAS_DESTRUCTURING
4728 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4729 if (!EmitDestructuringOps(cx
, cg
, op
, pn3
))
4731 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4735 #if JS_HAS_LVALUE_RETURN
4736 if (pn3
->pn_type
== TOK_LP
) {
4737 JS_ASSERT(pn3
->pn_op
== JSOP_SETCALL
);
4738 if (!js_EmitTree(cx
, cg
, pn3
))
4740 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4744 #if JS_HAS_XML_SUPPORT
4745 if (pn3
->pn_type
== TOK_UNARYOP
) {
4746 JS_ASSERT(pn3
->pn_op
== JSOP_BINDXMLNAME
);
4747 if (!js_EmitTree(cx
, cg
, pn3
))
4749 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4753 if (!EmitElemOp(cx
, pn3
, JSOP_ENUMELEM
, cg
))
4758 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4759 JS_ASSERT(cg
->stackDepth
== loopDepth
);
4761 /* Set the first srcnote offset so we can find the start of the loop body. */
4762 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, CG_OFFSET(cg
) - jmp
))
4765 /* Emit code for the loop body. */
4766 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4769 /* Set loop and enclosing "update" offsets, for continue. */
4772 stmt
->update
= CG_OFFSET(cg
);
4773 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4776 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4778 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4779 if (js_Emit1(cx
, cg
, JSOP_NEXTITER
) < 0)
4781 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4785 /* Set the second srcnote offset so we can find the closing jump. */
4786 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1, beq
- jmp
))
4789 /* C-style for (init; cond; update) ... loop. */
4793 /* No initializer: emit an annotated nop for the decompiler. */
4796 cg
->flags
|= TCF_IN_FOR_INIT
;
4797 #if JS_HAS_DESTRUCTURING
4798 if (pn3
->pn_type
== TOK_ASSIGN
&&
4799 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4803 if (op
== JSOP_POP
) {
4804 if (!js_EmitTree(cx
, cg
, pn3
))
4806 if (TOKEN_TYPE_IS_DECL(pn3
->pn_type
)) {
4808 * Check whether a destructuring-initialized var decl
4809 * was optimized to a group assignment. If so, we do
4810 * not need to emit a pop below, so switch to a nop,
4811 * just for the decompiler.
4813 JS_ASSERT(pn3
->pn_arity
== PN_LIST
);
4814 if (pn3
->pn_xflags
& PNX_GROUPINIT
)
4818 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4822 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4823 * Use tmp to hold the biased srcnote "top" offset, which differs
4824 * from the top local variable by the length of the JSOP_GOTO{,X}
4825 * emitted in between tmp and top if this loop has a condition.
4827 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR
);
4828 if (noteIndex
< 0 || js_Emit1(cx
, cg
, op
) < 0)
4830 tmp
= CG_OFFSET(cg
);
4833 /* Goto the loop condition, which branches back to iterate. */
4834 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4839 top
= CG_OFFSET(cg
);
4840 SET_STATEMENT_TOP(&stmtInfo
, top
);
4842 /* Emit code for the loop body. */
4843 if (!js_Emit1(cx
, cg
, JSOP_LOOP
))
4845 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4848 /* Set the second note offset so we can find the update part. */
4849 JS_ASSERT(noteIndex
!= -1);
4850 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
4851 CG_OFFSET(cg
) - tmp
)) {
4855 /* Set loop and enclosing "update" offsets, for continue. */
4858 stmt
->update
= CG_OFFSET(cg
);
4859 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4861 /* Check for update code to do before the condition (if any). */
4865 #if JS_HAS_DESTRUCTURING
4866 if (pn3
->pn_type
== TOK_ASSIGN
&&
4867 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4871 if (op
== JSOP_POP
&& !js_EmitTree(cx
, cg
, pn3
))
4874 /* Always emit the POP or NOP, to help the decompiler. */
4875 if (js_Emit1(cx
, cg
, op
) < 0)
4878 /* Restore the absolute line number for source note readers. */
4879 off
= (ptrdiff_t) pn
->pn_pos
.end
.lineno
;
4880 if (CG_CURRENT_LINE(cg
) != (uintN
) off
) {
4881 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, off
) < 0)
4883 CG_CURRENT_LINE(cg
) = (uintN
) off
;
4887 /* Set the first note offset so we can find the loop condition. */
4888 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
4889 CG_OFFSET(cg
) - tmp
)) {
4894 /* Fix up the goto from top to target the loop condition. */
4895 JS_ASSERT(jmp
>= 0);
4896 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4898 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid2
))
4902 /* The third note offset helps us find the loop-closing jump. */
4903 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 2,
4904 CG_OFFSET(cg
) - tmp
)) {
4909 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4913 /* No loop condition -- emit the loop-closing jump. */
4914 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, top
- CG_OFFSET(cg
));
4920 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
4921 if (!js_PopStatementCG(cx
, cg
))
4924 if (pn2
->pn_type
== TOK_IN
) {
4926 * JSOP_ENDITER must have a slot to save an exception thrown from
4927 * the body of for-in loop when closing the iterator object, and
4928 * fortunately it does: the slot that was set by JSOP_NEXTITER to
4929 * the return value of iterator.next().
4931 JS_ASSERT(js_CodeSpec
[JSOP_ENDITER
].nuses
== 2);
4932 if (!NewTryNote(cx
, cg
, JSTRY_ITER
, cg
->stackDepth
, top
, CG_OFFSET(cg
)) ||
4933 js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0) {
4943 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
4946 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
)
4948 noteType
= SRC_BREAK2LABEL
;
4951 while (!STMT_IS_LOOP(stmt
) && stmt
->type
!= STMT_SWITCH
)
4953 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_NULL
: SRC_BREAK
;
4956 if (EmitGoto(cx
, cg
, stmt
, &stmt
->breaks
, ale
, noteType
) < 0)
4964 /* Find the loop statement enclosed by the matching label. */
4965 JSStmtInfo
*loop
= NULL
;
4966 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
4969 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
) {
4970 if (STMT_IS_LOOP(stmt
))
4975 noteType
= SRC_CONT2LABEL
;
4978 while (!STMT_IS_LOOP(stmt
))
4980 noteType
= SRC_CONTINUE
;
4983 if (EmitGoto(cx
, cg
, stmt
, &stmt
->continues
, ale
, noteType
) < 0)
4988 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4990 js_PushStatement(cg
, &stmtInfo
, STMT_WITH
, CG_OFFSET(cg
));
4991 if (js_Emit1(cx
, cg
, JSOP_ENTERWITH
) < 0)
4993 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4995 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
4997 ok
= js_PopStatementCG(cx
, cg
);
5002 ptrdiff_t tryStart
, tryEnd
, catchJump
, finallyStart
;
5004 JSParseNode
*lastCatch
;
5009 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5012 * When a finally block is active (STMT_FINALLY in our tree context),
5013 * non-local jumps (including jumps-over-catches) result in a GOSUB
5014 * being written into the bytecode stream and fixed-up later (c.f.
5015 * EmitBackPatchOp and BackPatch).
5017 js_PushStatement(cg
, &stmtInfo
,
5018 pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
,
5022 * Since an exception can be thrown at any place inside the try block,
5023 * we need to restore the stack and the scope chain before we transfer
5024 * the control to the exception handler.
5026 * For that we store in a try note associated with the catch or
5027 * finally block the stack depth upon the try entry. The interpreter
5028 * uses this depth to properly unwind the stack and the scope chain.
5030 depth
= cg
->stackDepth
;
5032 /* Mark try location for decompilation, then emit try block. */
5033 if (js_Emit1(cx
, cg
, JSOP_TRY
) < 0)
5035 tryStart
= CG_OFFSET(cg
);
5036 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5038 JS_ASSERT(depth
== cg
->stackDepth
);
5040 /* GOSUB to finally, if present. */
5042 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5044 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(stmtInfo
));
5049 /* Emit (hidden) jump over catch and/or finally. */
5050 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5052 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5056 tryEnd
= CG_OFFSET(cg
);
5058 /* If this try has a catch block, emit it. */
5062 jsint count
= 0; /* previous catch block's population */
5065 * The emitted code for a catch block looks like:
5067 * [throwing] only if 2nd+ catch block
5068 * [leaveblock] only if 2nd+ catch block
5069 * enterblock with SRC_CATCH
5071 * [dup] only if catchguard
5072 * setlocalpop <slot> or destructuring code
5073 * [< catchguard code >] if there's a catchguard
5074 * [ifeq <offset to next catch block>] " "
5075 * [pop] only if catchguard
5076 * < catch block contents >
5078 * goto <end of catch blocks> non-local; finally applies
5080 * If there's no catch block without a catchguard, the last
5081 * <offset to next catch block> points to rethrow code. This
5082 * code will [gosub] to the finally code if appropriate, and is
5083 * also used for the catch-all trynote for capturing exceptions
5084 * thrown from catch{} blocks.
5086 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
5087 ptrdiff_t guardJump
, catchNote
;
5089 JS_ASSERT(cg
->stackDepth
== depth
);
5090 guardJump
= GUARDJUMP(stmtInfo
);
5091 if (guardJump
!= -1) {
5092 /* Fix up and clean up previous catch block. */
5093 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, guardJump
);
5096 * Account for JSOP_ENTERBLOCK (whose block object count
5097 * is saved below) and pushed exception object that we
5098 * still have after the jumping from the previous guard.
5100 cg
->stackDepth
= depth
+ count
+ 1;
5103 * Move exception back to cx->exception to prepare for
5104 * the next catch. We hide [throwing] from the decompiler
5105 * since it compensates for the hidden JSOP_DUP at the
5106 * start of the previous guarded catch.
5108 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5109 js_Emit1(cx
, cg
, JSOP_THROWING
) < 0) {
5112 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5114 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
5115 JS_ASSERT(cg
->stackDepth
== depth
);
5119 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5120 * by the call to js_EmitTree immediately below. Save this
5121 * source note's index in stmtInfo for use by the TOK_CATCH:
5122 * case, where the length of the catch guard is set as the
5125 catchNote
= js_NewSrcNote2(cx
, cg
, SRC_CATCH
, 0);
5128 CATCHNOTE(stmtInfo
) = catchNote
;
5131 * Emit the lexical scope and catch body. Save the catch's
5132 * block object population via count, for use when targeting
5133 * guardJump at the next catch (the guard mismatch case).
5135 JS_ASSERT(pn3
->pn_type
== TOK_LEXICALSCOPE
);
5136 count
= OBJ_BLOCK_COUNT(cx
, pn3
->pn_objbox
->object
);
5137 if (!js_EmitTree(cx
, cg
, pn3
))
5140 /* gosub <finally>, if required */
5142 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
,
5146 JS_ASSERT(cg
->stackDepth
== depth
);
5150 * Jump over the remaining catch blocks. This will get fixed
5151 * up to jump to after catch/finally.
5153 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5155 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5160 * Save a pointer to the last catch node to handle try-finally
5161 * and try-catch(guard)-finally special cases.
5163 lastCatch
= pn3
->expr();
5168 * Last catch guard jumps to the rethrow code sequence if none of the
5169 * guards match. Target guardJump at the beginning of the rethrow
5170 * sequence, just in case a guard expression throws and leaves the
5173 if (lastCatch
&& lastCatch
->pn_kid2
) {
5174 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, GUARDJUMP(stmtInfo
));
5176 /* Sync the stack to take into account pushed exception. */
5177 JS_ASSERT(cg
->stackDepth
== depth
);
5178 cg
->stackDepth
= depth
+ 1;
5181 * Rethrow the exception, delegating executing of finally if any
5182 * to the exception handler.
5184 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5185 js_Emit1(cx
, cg
, JSOP_THROW
) < 0) {
5190 JS_ASSERT(cg
->stackDepth
== depth
);
5192 /* Emit finally handler if any. */
5193 finallyStart
= 0; /* to quell GCC uninitialized warnings */
5196 * Fix up the gosubs that might have been emitted before non-local
5197 * jumps to the finally code.
5199 if (!BackPatch(cx
, cg
, GOSUBS(stmtInfo
), CG_NEXT(cg
), JSOP_GOSUB
))
5202 finallyStart
= CG_OFFSET(cg
);
5204 /* Indicate that we're emitting a subroutine body. */
5205 stmtInfo
.type
= STMT_SUBROUTINE
;
5206 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_kid3
->pn_pos
.begin
.lineno
))
5208 if (js_Emit1(cx
, cg
, JSOP_FINALLY
) < 0 ||
5209 !js_EmitTree(cx
, cg
, pn
->pn_kid3
) ||
5210 js_Emit1(cx
, cg
, JSOP_RETSUB
) < 0) {
5213 JS_ASSERT(cg
->stackDepth
== depth
);
5215 if (!js_PopStatementCG(cx
, cg
))
5218 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5219 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5223 /* Fix up the end-of-try/catch jumps to come here. */
5224 if (!BackPatch(cx
, cg
, catchJump
, CG_NEXT(cg
), JSOP_GOTO
))
5228 * Add the try note last, to let post-order give us the right ordering
5229 * (first to last for a given nesting level, inner to outer by level).
5232 !NewTryNote(cx
, cg
, JSTRY_CATCH
, depth
, tryStart
, tryEnd
)) {
5237 * If we've got a finally, mark try+catch region with additional
5238 * trynote to catch exceptions (re)thrown from a catch block or
5239 * for the try{}finally{} case.
5242 !NewTryNote(cx
, cg
, JSTRY_FINALLY
, depth
, tryStart
, finallyStart
)) {
5250 ptrdiff_t catchStart
, guardJump
;
5254 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5255 * and save the block object atom.
5258 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& (stmt
->flags
& SIF_SCOPE
));
5259 stmt
->type
= STMT_CATCH
;
5260 catchStart
= stmt
->update
;
5261 blockObj
= stmt
->blockObj
;
5263 /* Go up one statement info record to the TRY or FINALLY record. */
5265 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
5267 /* Pick up the pending exception and bind it to the catch variable. */
5268 if (js_Emit1(cx
, cg
, JSOP_EXCEPTION
) < 0)
5272 * Dup the exception object if there is a guard for rethrowing to use
5273 * it later when rethrowing or in other catches.
5275 if (pn
->pn_kid2
&& js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5279 switch (pn2
->pn_type
) {
5280 #if JS_HAS_DESTRUCTURING
5283 if (!EmitDestructuringOps(cx
, cg
, JSOP_NOP
, pn2
))
5285 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5291 /* Inline and specialize BindNameToSlot for pn2. */
5292 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
);
5293 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, pn2
->pn_cookie
);
5300 /* Emit the guard expression, if there is one. */
5302 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5304 if (!js_SetSrcNoteOffset(cx
, cg
, CATCHNOTE(*stmt
), 0,
5305 CG_OFFSET(cg
) - catchStart
)) {
5308 /* ifeq <next block> */
5309 guardJump
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5312 GUARDJUMP(*stmt
) = guardJump
;
5314 /* Pop duplicated exception object as we no longer need it. */
5315 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5319 /* Emit the catch body. */
5320 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5324 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5325 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5327 off
= cg
->stackDepth
;
5328 if (js_NewSrcNote2(cx
, cg
, SRC_CATCH
, off
) < 0)
5334 if (!EmitVariables(cx
, cg
, pn
, JS_FALSE
, ¬eIndex
))
5339 /* Push a return value */
5342 if (!js_EmitTree(cx
, cg
, pn2
))
5345 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5350 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5351 * blocks having finally clauses and to exit intermingled let blocks.
5352 * We can't simply transfer control flow to our caller in that case,
5353 * because we must gosub to those finally clauses from inner to outer,
5354 * with the correct stack pointer (i.e., after popping any with,
5355 * for/in, etc., slots nested inside the finally's try).
5357 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5358 * extra JSOP_RETRVAL after the fixups.
5360 top
= CG_OFFSET(cg
);
5361 if (js_Emit1(cx
, cg
, JSOP_RETURN
) < 0)
5363 if (!EmitNonLocalJumpFixup(cx
, cg
, NULL
))
5365 if (top
+ JSOP_RETURN_LENGTH
!= CG_OFFSET(cg
)) {
5366 CG_BASE(cg
)[top
] = JSOP_SETRVAL
;
5367 if (js_Emit1(cx
, cg
, JSOP_RETRVAL
) < 0)
5372 #if JS_HAS_GENERATORS
5374 if (!(cg
->flags
& TCF_IN_FUNCTION
)) {
5375 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
5376 JSMSG_BAD_RETURN_OR_YIELD
,
5381 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5384 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5387 if (pn
->pn_hidden
&& js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5389 if (js_Emit1(cx
, cg
, JSOP_YIELD
) < 0)
5396 #if JS_HAS_XML_SUPPORT
5397 if (pn
->pn_arity
== PN_UNARY
) {
5398 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5400 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5406 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5409 tmp
= CG_OFFSET(cg
);
5410 if (pn
->pn_xflags
& PNX_NEEDBRACES
) {
5411 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5412 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
5416 js_PushStatement(cg
, &stmtInfo
, STMT_BLOCK
, top
);
5418 JSParseNode
*pnchild
= pn
->pn_head
;
5419 if (pn
->pn_xflags
& PNX_FUNCDEFS
) {
5421 * This block contains top-level function definitions. To ensure
5422 * that we emit the bytecode defining them before the rest of code
5423 * in the block we use a separate pass over functions. During the
5424 * main pass later the emitter will add JSOP_NOP with source notes
5425 * for the function to preserve the original functions position
5428 * Currently this is used only for functions, as compile-as-we go
5429 * mode for scripts does not allow separate emitter passes.
5431 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
5432 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
5434 * Assign the destructuring arguments before defining any
5435 * functions, see bug 419662.
5437 JS_ASSERT(pnchild
->pn_type
== TOK_SEMI
);
5438 JS_ASSERT(pnchild
->pn_kid
->pn_type
== TOK_COMMA
);
5439 if (!js_EmitTree(cx
, cg
, pnchild
))
5441 pnchild
= pnchild
->pn_next
;
5444 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5445 if (pn2
->pn_type
== TOK_FUNCTION
) {
5446 if (pn2
->pn_op
== JSOP_NOP
) {
5447 if (!js_EmitTree(cx
, cg
, pn2
))
5451 * JSOP_DEFFUN in a top-level block with function
5452 * definitions appears, for example, when "if (true)"
5453 * is optimized away from "if (true) function x() {}".
5456 JS_ASSERT(pn2
->pn_op
== JSOP_DEFFUN
);
5461 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5462 if (!js_EmitTree(cx
, cg
, pn2
))
5466 if (noteIndex
>= 0 &&
5467 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5468 CG_OFFSET(cg
) - tmp
)) {
5472 ok
= js_PopStatementCG(cx
, cg
);
5477 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5478 js_PushStatement(cg
, &stmtInfo
, STMT_SEQ
, top
);
5479 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5480 if (!js_EmitTree(cx
, cg
, pn2
))
5483 ok
= js_PopStatementCG(cx
, cg
);
5490 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5491 * debugger, and eval frames may need the value of the ultimate
5492 * expression statement as the script's result, despite the fact
5493 * that it appears useless to the compiler.
5495 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5496 * calling JS_Compile* to suppress JSOP_POPV.
5498 useful
= wantval
= !(cg
->flags
& (TCF_IN_FUNCTION
| TCF_NO_SCRIPT_RVAL
));
5500 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5505 * Don't eliminate apparently useless expressions if they are
5506 * labeled expression statements. The tc->topStmt->update test
5507 * catches the case where we are nesting in js_EmitTree for a
5508 * labeled compound statement.
5512 cg
->topStmt
->type
!= STMT_LABEL
||
5513 cg
->topStmt
->update
< CG_OFFSET(cg
))) {
5514 CG_CURRENT_LINE(cg
) = pn2
->pn_pos
.begin
.lineno
;
5515 if (!js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn2
,
5518 JSMSG_USELESS_EXPR
)) {
5522 op
= wantval
? JSOP_POPV
: JSOP_POP
;
5523 #if JS_HAS_DESTRUCTURING
5525 pn2
->pn_type
== TOK_ASSIGN
&&
5526 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn2
, &op
)) {
5530 if (op
!= JSOP_NOP
) {
5531 if (!js_EmitTree(cx
, cg
, pn2
))
5533 if (js_Emit1(cx
, cg
, op
) < 0)
5541 /* Emit an annotated nop so we know to decompile a label. */
5543 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
5547 noteType
= (pn2
->pn_type
== TOK_LC
||
5548 (pn2
->pn_type
== TOK_LEXICALSCOPE
&&
5549 pn2
->expr()->pn_type
== TOK_LC
))
5552 noteIndex
= js_NewSrcNote2(cx
, cg
, noteType
,
5553 (ptrdiff_t) ALE_INDEX(ale
));
5554 if (noteIndex
< 0 ||
5555 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5559 /* Emit code for the labeled statement. */
5560 js_PushStatement(cg
, &stmtInfo
, STMT_LABEL
, CG_OFFSET(cg
));
5561 stmtInfo
.label
= atom
;
5562 if (!js_EmitTree(cx
, cg
, pn2
))
5564 if (!js_PopStatementCG(cx
, cg
))
5567 /* If the statement was compound, emit a note for the end brace. */
5568 if (noteType
== SRC_LABELBRACE
) {
5569 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5570 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5578 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5579 * These notes help the decompiler bracket the bytecodes generated
5580 * from each sub-expression that follows a comma.
5582 off
= noteIndex
= -1;
5583 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
5584 if (!js_EmitTree(cx
, cg
, pn2
))
5586 tmp
= CG_OFFSET(cg
);
5587 if (noteIndex
>= 0) {
5588 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5594 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5595 if (noteIndex
< 0 ||
5596 js_Emit1(cx
, cg
, JSOP_POP
) < 0) {
5604 * Check left operand type and generate specialized code for it.
5605 * Specialize to avoid ECMA "reference type" values on the operand
5606 * stack, which impose pervasive runtime "GetValue" costs.
5609 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
5610 atomIndex
= (jsatomid
) -1; /* quell GCC overwarning */
5611 switch (pn2
->pn_type
) {
5613 if (!BindNameToSlot(cx
, cg
, pn2
))
5615 if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
5616 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
5618 ale
= cg
->atomList
.add(cg
->compiler
, pn2
->pn_atom
);
5621 atomIndex
= ALE_INDEX(ale
);
5622 if (!pn2
->isConst())
5623 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
5627 if (!js_EmitTree(cx
, cg
, pn2
->expr()))
5629 ale
= cg
->atomList
.add(cg
->compiler
, pn2
->pn_atom
);
5632 atomIndex
= ALE_INDEX(ale
);
5635 JS_ASSERT(pn2
->pn_arity
== PN_BINARY
);
5636 if (!js_EmitTree(cx
, cg
, pn2
->pn_left
))
5638 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
5641 #if JS_HAS_DESTRUCTURING
5646 #if JS_HAS_LVALUE_RETURN
5648 if (!js_EmitTree(cx
, cg
, pn2
))
5652 #if JS_HAS_XML_SUPPORT
5654 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5655 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5657 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5666 #if JS_HAS_GETTER_SETTER
5667 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
5668 if (pn2
->pn_type
== TOK_NAME
&& PN_OP(pn2
) != JSOP_SETNAME
) {
5670 * x getter = y where x is a local or let variable is not
5673 js_ReportCompileErrorNumber(cx
,
5675 pn2
, JSREPORT_ERROR
,
5676 JSMSG_BAD_GETTER_OR_SETTER
,
5683 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5686 /* If += or similar, dup the left operand and get its value. */
5687 if (op
!= JSOP_NOP
) {
5688 switch (pn2
->pn_type
) {
5690 if (pn2
->isConst()) {
5691 if (PN_OP(pn2
) == JSOP_CALLEE
) {
5692 if (js_Emit1(cx
, cg
, JSOP_CALLEE
) < 0)
5695 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5697 } else if (PN_OP(pn2
) == JSOP_SETNAME
) {
5698 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5700 EMIT_INDEX_OP(JSOP_GETXPROP
, atomIndex
);
5702 EMIT_UINT16_IMM_OP((PN_OP(pn2
) == JSOP_SETGVAR
)
5704 : (PN_OP(pn2
) == JSOP_GETUPVAR
)
5706 : (PN_OP(pn2
) == JSOP_SETARG
)
5713 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5715 if (pn2
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
5716 if (js_Emit1(cx
, cg
, JSOP_LENGTH
) < 0)
5718 } else if (pn2
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
5719 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, atomIndex
, cg
))
5721 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5724 EMIT_INDEX_OP(JSOP_GETPROP
, atomIndex
);
5728 #if JS_HAS_LVALUE_RETURN
5731 #if JS_HAS_XML_SUPPORT
5734 if (js_Emit1(cx
, cg
, JSOP_DUP2
) < 0)
5736 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5743 /* Now emit the right operand (it may affect the namespace). */
5744 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5747 /* If += etc., emit the binary operator with a decompiler note. */
5748 if (op
!= JSOP_NOP
) {
5750 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5751 * declared in the current compilation unit, as in this case (just
5752 * a bit further below) we will avoid emitting the assignment op.
5754 if (pn2
->pn_type
!= TOK_NAME
|| !pn2
->isConst()) {
5755 if (js_NewSrcNote(cx
, cg
, SRC_ASSIGNOP
) < 0)
5758 if (js_Emit1(cx
, cg
, op
) < 0)
5762 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5763 if (pn2
->pn_type
!= TOK_NAME
&&
5764 #if JS_HAS_DESTRUCTURING
5765 pn2
->pn_type
!= TOK_RB
&&
5766 pn2
->pn_type
!= TOK_RC
&&
5768 js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0) {
5772 /* Finally, emit the specialized assignment bytecode. */
5773 switch (pn2
->pn_type
) {
5779 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5782 #if JS_HAS_LVALUE_RETURN
5785 if (js_Emit1(cx
, cg
, JSOP_SETELEM
) < 0)
5788 #if JS_HAS_DESTRUCTURING
5791 if (!EmitDestructuringOps(cx
, cg
, JSOP_SETNAME
, pn2
))
5795 #if JS_HAS_XML_SUPPORT
5797 if (js_Emit1(cx
, cg
, JSOP_SETXMLNAME
) < 0)
5807 /* Emit the condition, then branch if false to the else part. */
5808 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5810 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_COND
);
5813 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5814 if (beq
< 0 || !js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5817 /* Jump around else, fixup the branch, emit else, fixup jump. */
5818 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
5821 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
5824 * Because each branch pushes a single value, but our stack budgeting
5825 * analysis ignores branches, we now have to adjust cg->stackDepth to
5826 * ignore the value pushed by the first branch. Execution will follow
5827 * only one path, so we must decrement cg->stackDepth.
5829 * Failing to do this will foil code, such as the try/catch/finally
5830 * exception handling code generator, that samples cg->stackDepth for
5831 * use at runtime (JSOP_SETSP), or in let expression and block code
5832 * generation, which must use the stack depth to compute local stack
5833 * indexes correctly.
5835 JS_ASSERT(cg
->stackDepth
> 0);
5837 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5839 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5840 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
5847 * JSOP_OR converts the operand on the stack to boolean, and if true,
5848 * leaves the original operand value on the stack and jumps; otherwise
5849 * it pops and falls into the next bytecode, which evaluates the right
5850 * operand. The jump goes around the right operand evaluation.
5852 * JSOP_AND converts the operand on the stack to boolean, and if false,
5853 * leaves the original operand value on the stack and jumps; otherwise
5854 * it pops and falls into the right operand's bytecode.
5856 if (pn
->pn_arity
== PN_BINARY
) {
5857 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5859 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5862 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5864 off
= CG_OFFSET(cg
);
5865 pc
= CG_CODE(cg
, top
);
5866 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5869 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5870 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5872 /* Left-associative operator chain: avoid too much recursion. */
5874 if (!js_EmitTree(cx
, cg
, pn2
))
5876 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5880 /* Emit nodes between the head and the tail. */
5882 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5883 if (!js_EmitTree(cx
, cg
, pn2
))
5885 off
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5888 if (!SetBackPatchDelta(cx
, cg
, CG_CODE(cg
, jmp
), off
- jmp
))
5893 if (!js_EmitTree(cx
, cg
, pn2
))
5897 off
= CG_OFFSET(cg
);
5899 pc
= CG_CODE(cg
, top
);
5900 tmp
= GetJumpOffset(cg
, pc
);
5901 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5904 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5914 case TOK_INSTANCEOF
:
5920 if (pn
->pn_arity
== PN_LIST
) {
5921 /* Left-associative operator chain: avoid too much recursion. */
5923 if (!js_EmitTree(cx
, cg
, pn2
))
5926 while ((pn2
= pn2
->pn_next
) != NULL
) {
5927 if (!js_EmitTree(cx
, cg
, pn2
))
5929 if (js_Emit1(cx
, cg
, op
) < 0)
5933 #if JS_HAS_XML_SUPPORT
5937 if (pn
->pn_arity
== PN_NAME
) {
5938 if (!js_EmitTree(cx
, cg
, pn
->expr()))
5940 if (!EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
))
5946 * Binary :: has a right operand that brackets arbitrary code,
5947 * possibly including a let (a = b) ... expression. We must clear
5948 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
5950 oldflags
= cg
->flags
;
5951 cg
->flags
&= ~TCF_IN_FOR_INIT
;
5954 /* Binary operators that evaluate both operands unconditionally. */
5955 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5957 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5959 #if JS_HAS_XML_SUPPORT
5960 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
5962 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5968 #if JS_HAS_XML_SUPPORT
5971 JS_ASSERT(pn
->pn_arity
== PN_UNARY
);
5978 /* Unary op, including unary +/-. */
5980 #if JS_HAS_XML_SUPPORT
5981 if (op
== JSOP_XMLNAME
) {
5982 if (!EmitXMLName(cx
, pn
, op
, cg
))
5988 if (op
== JSOP_TYPEOF
) {
5989 for (pn3
= pn2
; pn3
->pn_type
== TOK_RP
; pn3
= pn3
->pn_kid
)
5991 if (pn3
->pn_type
!= TOK_NAME
)
5992 op
= JSOP_TYPEOFEXPR
;
5994 oldflags
= cg
->flags
;
5995 cg
->flags
&= ~TCF_IN_FOR_INIT
;
5996 if (!js_EmitTree(cx
, cg
, pn2
))
5998 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
5999 if (js_Emit1(cx
, cg
, op
) < 0)
6006 /* Emit lvalue-specialized code for ++/-- operators. */
6008 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
6010 switch (pn2
->pn_type
) {
6012 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
6014 if (!BindNameToSlot(cx
, cg
, pn2
))
6017 if (op
== JSOP_CALLEE
) {
6018 if (js_Emit1(cx
, cg
, op
) < 0)
6020 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
6021 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
6022 EMIT_UINT16_IMM_OP(op
, atomIndex
);
6024 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
6025 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6029 if (pn2
->isConst()) {
6030 if (js_Emit1(cx
, cg
, JSOP_POS
) < 0)
6033 if (!(js_CodeSpec
[op
].format
& JOF_POST
)) {
6034 if (js_Emit1(cx
, cg
, JSOP_ONE
) < 0)
6036 op
= (js_CodeSpec
[op
].format
& JOF_INC
) ? JSOP_ADD
: JSOP_SUB
;
6037 if (js_Emit1(cx
, cg
, op
) < 0)
6043 if (!EmitPropOp(cx
, pn2
, op
, cg
, JS_FALSE
))
6047 if (!EmitElemOp(cx
, pn2
, op
, cg
))
6050 #if JS_HAS_LVALUE_RETURN
6052 if (!js_EmitTree(cx
, cg
, pn2
))
6054 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
6055 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
6058 if (js_Emit1(cx
, cg
, op
) < 0)
6062 #if JS_HAS_XML_SUPPORT
6064 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
6065 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
6067 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
6069 if (js_Emit1(cx
, cg
, op
) < 0)
6078 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6079 * must evaluate the operand if it appears it might have side effects.
6082 switch (pn2
->pn_type
) {
6084 if (!BindNameToSlot(cx
, cg
, pn2
))
6087 if (op
== JSOP_FALSE
) {
6088 if (js_Emit1(cx
, cg
, op
) < 0)
6091 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6096 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, cg
, JS_FALSE
))
6099 #if JS_HAS_XML_SUPPORT
6101 if (!EmitElemOp(cx
, pn2
, JSOP_DELDESC
, cg
))
6105 #if JS_HAS_LVALUE_RETURN
6107 top
= CG_OFFSET(cg
);
6108 if (!js_EmitTree(cx
, cg
, pn2
))
6110 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
6112 if (js_Emit1(cx
, cg
, JSOP_DELELEM
) < 0)
6117 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, cg
))
6122 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6123 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6126 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
6129 off
= noteIndex
= -1;
6131 if (!js_EmitTree(cx
, cg
, pn2
))
6133 off
= CG_OFFSET(cg
);
6134 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
6135 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
6138 if (js_Emit1(cx
, cg
, JSOP_TRUE
) < 0)
6140 if (noteIndex
>= 0) {
6141 tmp
= CG_OFFSET(cg
);
6142 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
6148 #if JS_HAS_XML_SUPPORT
6150 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6152 jmp
= js_Emit3(cx
, cg
, JSOP_FILTER
, 0, 0);
6155 top
= CG_OFFSET(cg
);
6156 if (!js_Emit1(cx
, cg
, JSOP_LOOP
))
6158 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6160 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
6161 if (EmitJump(cx
, cg
, JSOP_ENDFILTER
, top
- CG_OFFSET(cg
)) < 0)
6168 * Pop a stack operand, convert it to object, get a property named by
6169 * this bytecode's immediate-indexed atom operand, and push its value
6170 * (not a reference to it).
6172 ok
= EmitPropOp(cx
, pn
, PN_OP(pn
), cg
, JS_FALSE
);
6176 #if JS_HAS_XML_SUPPORT
6180 * Pop two operands, convert the left one to object and the right one
6181 * to property name (atom or tagged int), get the named property, and
6182 * push its value. Set the "obj" register to the result of ToObject
6183 * on the left operand.
6185 ok
= EmitElemOp(cx
, pn
, PN_OP(pn
), cg
);
6194 * Emit function call or operator new (constructor call) code.
6195 * First, emit code for the left operand to evaluate the callable or
6196 * constructable object expression.
6199 switch (pn2
->pn_type
) {
6201 if (!EmitNameOp(cx
, cg
, pn2
, JS_TRUE
))
6205 if (!EmitPropOp(cx
, pn2
, PN_OP(pn2
), cg
, JS_TRUE
))
6209 JS_ASSERT(pn2
->pn_op
== JSOP_GETELEM
);
6210 if (!EmitElemOp(cx
, pn2
, JSOP_CALLELEM
, cg
))
6214 #if JS_HAS_XML_SUPPORT
6215 if (pn2
->pn_op
== JSOP_XMLNAME
) {
6216 if (!EmitXMLName(cx
, pn2
, JSOP_CALLXMLNAME
, cg
))
6224 * Push null as a placeholder for the global object, per ECMA-262
6227 if (!js_EmitTree(cx
, cg
, pn2
))
6229 if (js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
6233 /* Remember start of callable-object bytecode for decompilation hint. */
6237 * Emit code for each argument in order, then emit the JSOP_*CALL or
6238 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6239 * were pushed on the operand stack.
6241 oldflags
= cg
->flags
;
6242 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6243 for (pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
6244 if (!js_EmitTree(cx
, cg
, pn3
))
6247 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6248 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - off
) < 0)
6251 argc
= pn
->pn_count
- 1;
6252 if (js_Emit3(cx
, cg
, PN_OP(pn
), ARGC_HI(argc
), ARGC_LO(argc
)) < 0)
6254 if (PN_OP(pn
) == JSOP_EVAL
)
6255 EMIT_UINT16_IMM_OP(JSOP_LINENO
, pn
->pn_pos
.begin
.lineno
);
6259 case TOK_LEXICALSCOPE
:
6261 JSObjectBox
*objbox
;
6264 objbox
= pn
->pn_objbox
;
6265 js_PushBlockScope(cg
, &stmtInfo
, objbox
->object
, CG_OFFSET(cg
));
6268 * If this lexical scope is not for a catch block, let block or let
6269 * expression, or any kind of for loop (where the scope starts in the
6270 * head after the first part if for (;;), else in the body if for-in);
6271 * and if our container is top-level but not a function body, or else
6272 * a block statement; then emit a SRC_BRACE note. All other container
6273 * statements get braces by default from the decompiler.
6276 type
= PN_TYPE(pn
->expr());
6277 if (type
!= TOK_CATCH
&& type
!= TOK_LET
&& type
!= TOK_FOR
&&
6278 (!(stmt
= stmtInfo
.down
)
6279 ? !(cg
->flags
& TCF_IN_FUNCTION
)
6280 : stmt
->type
== STMT_BLOCK
)) {
6281 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6282 /* There must be no source note already output for the next op. */
6283 JS_ASSERT(CG_NOTE_COUNT(cg
) == 0 ||
6284 CG_LAST_NOTE_OFFSET(cg
) != CG_OFFSET(cg
) ||
6285 !GettableNoteForNextOp(cg
));
6287 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
6292 JS_ASSERT(CG_OFFSET(cg
) == top
);
6293 if (!EmitEnterBlock(cx
, pn
, cg
))
6296 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
6300 if (op
== JSOP_LEAVEBLOCKEXPR
) {
6301 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
6304 if (noteIndex
>= 0 &&
6305 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6306 CG_OFFSET(cg
) - top
)) {
6311 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6312 count
= OBJ_BLOCK_COUNT(cx
, objbox
->object
);
6313 EMIT_UINT16_IMM_OP(op
, count
);
6315 ok
= js_PopStatementCG(cx
, cg
);
6319 #if JS_HAS_BLOCK_SCOPE
6321 /* Let statements have their variable declarations on the left. */
6322 if (pn
->pn_arity
== PN_BINARY
) {
6329 /* Non-null pn2 means that pn is the variable list from a let head. */
6330 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
6331 if (!EmitVariables(cx
, cg
, pn
, pn2
!= NULL
, ¬eIndex
))
6334 /* Thus non-null pn2 is the body of the let block or expression. */
6335 tmp
= CG_OFFSET(cg
);
6336 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
6339 if (noteIndex
>= 0 &&
6340 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6341 CG_OFFSET(cg
) - tmp
)) {
6345 #endif /* JS_HAS_BLOCK_SCOPE */
6347 #if JS_HAS_GENERATORS
6348 case TOK_ARRAYPUSH
: {
6352 * The array object's stack index is in cg->arrayCompDepth. See below
6353 * under the array initialiser code generator for array comprehension
6356 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6358 slot
= AdjustBlockSlot(cx
, cg
, cg
->arrayCompDepth
);
6361 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
6367 #if JS_HAS_GENERATORS
6371 * Emit code for [a, b, c] that is equivalent to constructing a new
6372 * array and in source order evaluating each element value and adding
6373 * it to the array, without invoking latent setters. We use the
6374 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6375 * avoid dup'ing and popping the array as each element is added, as
6376 * JSOP_SETELEM/JSOP_SETPROP would do.
6378 * If no sharp variable is defined, the initializer is not for an array
6379 * comprehension, the initializer is not overlarge, and the initializer
6380 * is not in global code (whose stack growth cannot be precisely modeled
6381 * due to the need to reserve space for global variables and regular
6382 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6383 * array using a fast, all-at-once process rather than a slow, element-
6384 * by-element process.
6386 #if JS_HAS_SHARP_VARS
6391 op
= (JS_LIKELY(pn
->pn_count
< JS_BIT(16)) && (cg
->flags
& TCF_IN_FUNCTION
))
6395 #if JS_HAS_GENERATORS
6396 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6399 #if JS_HAS_SHARP_VARS
6400 JS_ASSERT_IF(sharpnum
>= 0, cg
->flags
& TCF_HAS_SHARPS
);
6401 if (cg
->flags
& TCF_HAS_SHARPS
)
6405 if (op
== JSOP_NEWINIT
) {
6406 if (js_Emit2(cx
, cg
, op
, (jsbytecode
) JSProto_Array
) < 0)
6408 #if JS_HAS_SHARP_VARS
6410 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
) sharpnum
);
6414 #if JS_HAS_GENERATORS
6415 if (pn
->pn_type
== TOK_ARRAYCOMP
) {
6419 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6420 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6421 * its kids under pn2 to generate this comprehension.
6423 JS_ASSERT(cg
->stackDepth
> 0);
6424 saveDepth
= cg
->arrayCompDepth
;
6425 cg
->arrayCompDepth
= (uint32
) (cg
->stackDepth
- 1);
6426 if (!js_EmitTree(cx
, cg
, pn
->pn_head
))
6428 cg
->arrayCompDepth
= saveDepth
;
6430 /* Emit the usual op needed for decompilation. */
6431 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6435 #endif /* JS_HAS_GENERATORS */
6438 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6439 if (op
== JSOP_NEWINIT
&& !EmitNumberOp(cx
, atomIndex
, cg
))
6441 if (pn2
->pn_type
== TOK_COMMA
) {
6442 if (js_Emit1(cx
, cg
, JSOP_HOLE
) < 0)
6445 if (!js_EmitTree(cx
, cg
, pn2
))
6448 if (op
== JSOP_NEWINIT
&& js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6451 JS_ASSERT(atomIndex
== pn
->pn_count
);
6453 if (pn
->pn_xflags
& PNX_ENDCOMMA
) {
6454 /* Emit a source note so we know to decompile an extra comma. */
6455 if (js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
6459 if (op
== JSOP_NEWINIT
) {
6461 * Emit an op to finish the array and, secondarily, to aid in sharp
6462 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6464 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6469 JS_ASSERT(atomIndex
< JS_BIT(16));
6470 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY
, atomIndex
);
6474 #if JS_HAS_SHARP_VARS
6478 #if JS_HAS_DESTRUCTURING_SHORTHAND
6479 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
6480 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
6481 JSMSG_BAD_OBJECT_INIT
);
6486 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6487 * a new object and in source order evaluating each property value and
6488 * adding the property to the object, without invoking latent setters.
6489 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6490 * ignore setters and to avoid dup'ing and popping the object as each
6491 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6493 if (js_Emit2(cx
, cg
, JSOP_NEWINIT
, (jsbytecode
) JSProto_Object
) < 0)
6496 #if JS_HAS_SHARP_VARS
6498 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
) sharpnum
);
6501 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6502 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6504 if (pn3
->pn_type
== TOK_NUMBER
) {
6505 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
6509 /* Emit code for the property initializer. */
6510 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
6513 #if JS_HAS_GETTER_SETTER
6515 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
6516 if (js_Emit1(cx
, cg
, op
) < 0)
6520 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6521 if (pn3
->pn_type
== TOK_NUMBER
) {
6522 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
6524 if (js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6527 JS_ASSERT(pn3
->pn_type
== TOK_NAME
||
6528 pn3
->pn_type
== TOK_STRING
);
6529 ale
= cg
->atomList
.add(cg
->compiler
, pn3
->pn_atom
);
6532 EMIT_INDEX_OP(JSOP_INITPROP
, ALE_INDEX(ale
));
6536 /* Emit an op for sharpArray cleanup and decompilation. */
6537 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6541 #if JS_HAS_SHARP_VARS
6543 JS_ASSERT(cg
->flags
& TCF_HAS_SHARPS
);
6544 sharpnum
= pn
->pn_num
;
6546 if (pn
->pn_type
== TOK_RB
)
6548 # if JS_HAS_GENERATORS
6549 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6552 if (pn
->pn_type
== TOK_RC
)
6553 goto do_emit_object
;
6555 if (!js_EmitTree(cx
, cg
, pn
))
6557 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
) sharpnum
);
6561 JS_ASSERT(cg
->flags
& TCF_HAS_SHARPS
);
6562 EMIT_UINT16_IMM_OP(JSOP_USESHARP
, (jsatomid
) pn
->pn_num
);
6564 #endif /* JS_HAS_SHARP_VARS */
6571 * The node for (e) has e as its kid, enabling users who want to nest
6572 * assignment expressions in conditions to avoid the error correction
6573 * done by Condition (from x = y to x == y) by double-parenthesizing.
6575 oldflags
= cg
->flags
;
6576 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6577 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6579 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6585 * Cope with a left-over function definition that was replaced by a use
6586 * of a later function definition of the same name. See FunctionDef and
6587 * MakeDefIntoUse in jsparse.cpp.
6589 if (pn
->pn_op
== JSOP_NOP
)
6591 if (!EmitNameOp(cx
, cg
, pn
, JS_FALSE
))
6595 #if JS_HAS_XML_SUPPORT
6600 case TOK_XMLCOMMENT
:
6603 ok
= EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6607 ok
= EmitNumberOp(cx
, pn
->pn_dval
, cg
);
6612 * If the regexp's script is one-shot, we can avoid the extra
6613 * fork-on-exec costs of JSOP_REGEXP by selecting JSOP_OBJECT.
6614 * Otherwise, to avoid incorrect proto, parent, and lastIndex
6615 * sharing among threads and sequentially across re-execution,
6616 * select JSOP_REGEXP.
6618 JS_ASSERT(pn
->pn_op
== JSOP_REGEXP
);
6619 if (cg
->flags
& TCF_COMPILE_N_GO
) {
6620 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_OBJECT
, cg
);
6622 ok
= EmitIndexOp(cx
, JSOP_REGEXP
,
6623 cg
->regexpList
.index(pn
->pn_objbox
),
6628 #if JS_HAS_XML_SUPPORT
6632 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6636 #if JS_HAS_DEBUGGER_KEYWORD
6638 if (js_Emit1(cx
, cg
, JSOP_DEBUGGER
) < 0)
6641 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6643 #if JS_HAS_XML_SUPPORT
6646 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6647 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6651 JS_ASSERT(PN_TYPE(pn
) == TOK_XMLLIST
|| pn
->pn_count
!= 0);
6652 switch (pn
->pn_head
? PN_TYPE(pn
->pn_head
) : TOK_XMLLIST
) {
6660 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6664 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6665 if (pn2
->pn_type
== TOK_LC
&&
6666 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6669 if (!js_EmitTree(cx
, cg
, pn2
))
6671 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6675 if (pn
->pn_xflags
& PNX_XMLROOT
) {
6676 if (pn
->pn_count
== 0) {
6677 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
);
6678 atom
= cx
->runtime
->atomState
.emptyAtom
;
6679 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
6682 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6684 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6689 JS_ASSERT(pn
->pn_count
!= 0);
6694 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6695 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6705 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6708 ale
= cg
->atomList
.add(cg
->compiler
,
6709 (pn
->pn_type
== TOK_XMLETAGO
)
6710 ? cx
->runtime
->atomState
.etagoAtom
6711 : cx
->runtime
->atomState
.stagoAtom
);
6714 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6716 JS_ASSERT(pn
->pn_count
!= 0);
6718 if (pn2
->pn_type
== TOK_LC
&& js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0)
6720 if (!js_EmitTree(cx
, cg
, pn2
))
6722 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6725 for (pn2
= pn2
->pn_next
, i
= 0; pn2
; pn2
= pn2
->pn_next
, i
++) {
6726 if (pn2
->pn_type
== TOK_LC
&&
6727 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6730 if (!js_EmitTree(cx
, cg
, pn2
))
6732 if ((i
& 1) && pn2
->pn_type
== TOK_LC
) {
6733 if (js_Emit1(cx
, cg
, JSOP_TOATTRVAL
) < 0)
6736 if (js_Emit1(cx
, cg
,
6737 (i
& 1) ? JSOP_ADDATTRVAL
: JSOP_ADDATTRNAME
) < 0) {
6742 ale
= cg
->atomList
.add(cg
->compiler
,
6743 (pn
->pn_type
== TOK_XMLPTAGC
)
6744 ? cx
->runtime
->atomState
.ptagcAtom
6745 : cx
->runtime
->atomState
.tagcAtom
);
6748 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6749 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6752 if ((pn
->pn_xflags
& PNX_XMLROOT
) && js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6758 if (pn
->pn_arity
== PN_LIST
) {
6759 JS_ASSERT(pn
->pn_count
!= 0);
6760 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6761 if (pn2
->pn_type
== TOK_LC
&&
6762 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6765 if (!js_EmitTree(cx
, cg
, pn2
))
6767 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6771 JS_ASSERT(pn
->pn_arity
== PN_NULLARY
);
6772 ok
= (pn
->pn_op
== JSOP_OBJECT
)
6773 ? EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
)
6774 : EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6779 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom2
);
6782 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
6784 if (!EmitAtomOp(cx
, pn
, JSOP_XMLPI
, cg
))
6787 #endif /* JS_HAS_XML_SUPPORT */
6793 if (ok
&& --cg
->emitLevel
== 0) {
6795 ok
= OptimizeSpanDeps(cx
, cg
);
6796 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.end
.lineno
))
6804 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6805 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6807 JS_FRIEND_DATA(JSSrcNoteSpec
) js_SrcNoteSpec
[] = {
6810 {"if-else", 2, 0, 1},
6813 {"continue", 0, 0, 0},
6815 {"pcdelta", 1, 0, 1},
6816 {"assignop", 0, 0, 0},
6819 {"hidden", 0, 0, 0},
6820 {"pcbase", 1, 0, -1},
6822 {"labelbrace", 1, 0, 0},
6823 {"endbrace", 0, 0, 0},
6824 {"break2label", 1, 0, 0},
6825 {"cont2label", 1, 0, 0},
6826 {"switch", 2, 0, 1},
6827 {"funcdef", 1, 0, 0},
6829 {"extended", -1, 0, 0},
6830 {"newline", 0, 0, 0},
6831 {"setline", 1, 0, 0},
6832 {"xdelta", 0, 0, 0},
6836 AllocSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
)
6842 index
= CG_NOTE_COUNT(cg
);
6843 if (((uintN
)index
& CG_NOTE_MASK(cg
)) == 0) {
6844 pool
= cg
->notePool
;
6845 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6846 if (!CG_NOTES(cg
)) {
6847 /* Allocate the first note array lazily; leave noteMask alone. */
6848 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
);
6850 /* Grow by doubling note array size; update noteMask on success. */
6851 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6853 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6855 if (!CG_NOTES(cg
)) {
6856 js_ReportOutOfScriptQuota(cx
);
6861 CG_NOTE_COUNT(cg
) = index
+ 1;
6866 js_NewSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
)
6870 ptrdiff_t offset
, delta
, xdelta
;
6873 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6874 * incrementing CG_NOTE_COUNT(cg).
6876 index
= AllocSrcNote(cx
, cg
);
6879 sn
= &CG_NOTES(cg
)[index
];
6882 * Compute delta from the last annotated bytecode's offset. If it's too
6883 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6885 offset
= CG_OFFSET(cg
);
6886 delta
= offset
- CG_LAST_NOTE_OFFSET(cg
);
6887 CG_LAST_NOTE_OFFSET(cg
) = offset
;
6888 if (delta
>= SN_DELTA_LIMIT
) {
6890 xdelta
= JS_MIN(delta
, SN_XDELTA_MASK
);
6891 SN_MAKE_XDELTA(sn
, xdelta
);
6893 index
= AllocSrcNote(cx
, cg
);
6896 sn
= &CG_NOTES(cg
)[index
];
6897 } while (delta
>= SN_DELTA_LIMIT
);
6901 * Initialize type and delta, then allocate the minimum number of notes
6902 * needed for type's arity. Usually, we won't need more, but if an offset
6903 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6905 SN_MAKE_NOTE(sn
, type
, delta
);
6906 for (n
= (intN
)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
6907 if (js_NewSrcNote(cx
, cg
, SRC_NULL
) < 0)
6914 js_NewSrcNote2(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
6919 index
= js_NewSrcNote(cx
, cg
, type
);
6921 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset
))
6928 js_NewSrcNote3(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
6929 ptrdiff_t offset1
, ptrdiff_t offset2
)
6933 index
= js_NewSrcNote(cx
, cg
, type
);
6935 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset1
))
6937 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 1, offset2
))
6944 GrowSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
)
6949 /* Grow by doubling note array size; update noteMask on success. */
6950 pool
= cg
->notePool
;
6951 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6952 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6953 if (!CG_NOTES(cg
)) {
6954 js_ReportOutOfScriptQuota(cx
);
6957 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6962 js_AddToSrcNoteDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*sn
,
6965 ptrdiff_t base
, limit
, newdelta
, diff
;
6969 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
6970 * main script note deltas, and only by a small positive amount.
6972 JS_ASSERT(cg
->current
== &cg
->main
);
6973 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
6975 base
= SN_DELTA(sn
);
6976 limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
6977 newdelta
= base
+ delta
;
6978 if (newdelta
< limit
) {
6979 SN_SET_DELTA(sn
, newdelta
);
6981 index
= sn
- cg
->main
.notes
;
6982 if ((cg
->main
.noteCount
& cg
->main
.noteMask
) == 0) {
6983 if (!GrowSrcNotes(cx
, cg
))
6985 sn
= cg
->main
.notes
+ index
;
6987 diff
= cg
->main
.noteCount
- index
;
6988 cg
->main
.noteCount
++;
6989 memmove(sn
+ 1, sn
, SRCNOTE_SIZE(diff
));
6990 SN_MAKE_XDELTA(sn
, delta
);
6996 JS_FRIEND_API(uintN
)
6997 js_SrcNoteLength(jssrcnote
*sn
)
7002 arity
= (intN
)js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7003 for (base
= sn
++; arity
; sn
++, arity
--) {
7004 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7010 JS_FRIEND_API(ptrdiff_t)
7011 js_GetSrcNoteOffset(jssrcnote
*sn
, uintN which
)
7013 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7014 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7015 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7016 for (sn
++; which
; sn
++, which
--) {
7017 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7020 if (*sn
& SN_3BYTE_OFFSET_FLAG
) {
7021 return (ptrdiff_t)(((uint32
)(sn
[0] & SN_3BYTE_OFFSET_MASK
) << 16)
7025 return (ptrdiff_t)*sn
;
7029 js_SetSrcNoteOffset(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
,
7030 uintN which
, ptrdiff_t offset
)
7035 if ((jsuword
)offset
>= (jsuword
)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG
<< 16)) {
7036 ReportStatementTooLarge(cx
, cg
);
7040 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7041 sn
= &CG_NOTES(cg
)[index
];
7042 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7043 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7044 for (sn
++; which
; sn
++, which
--) {
7045 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7049 /* See if the new offset requires three bytes. */
7050 if (offset
> (ptrdiff_t)SN_3BYTE_OFFSET_MASK
) {
7051 /* Maybe this offset was already set to a three-byte value. */
7052 if (!(*sn
& SN_3BYTE_OFFSET_FLAG
)) {
7053 /* Losing, need to insert another two bytes for this offset. */
7054 index
= sn
- CG_NOTES(cg
);
7057 * Simultaneously test to see if the source note array must grow to
7058 * accommodate either the first or second byte of additional storage
7059 * required by this 3-byte offset.
7061 if (((CG_NOTE_COUNT(cg
) + 1) & CG_NOTE_MASK(cg
)) <= 1) {
7062 if (!GrowSrcNotes(cx
, cg
))
7064 sn
= CG_NOTES(cg
) + index
;
7066 CG_NOTE_COUNT(cg
) += 2;
7068 diff
= CG_NOTE_COUNT(cg
) - (index
+ 3);
7069 JS_ASSERT(diff
>= 0);
7071 memmove(sn
+ 3, sn
+ 1, SRCNOTE_SIZE(diff
));
7073 *sn
++ = (jssrcnote
)(SN_3BYTE_OFFSET_FLAG
| (offset
>> 16));
7074 *sn
++ = (jssrcnote
)(offset
>> 8);
7076 *sn
= (jssrcnote
)offset
;
7081 #define DEBUG_srcnotesize
7084 #ifdef DEBUG_srcnotesize
7086 static uint32 hist
[NBINS
];
7088 void DumpSrcNoteSizeHist()
7094 fp
= fopen("/tmp/srcnotes.hist", "w");
7097 setvbuf(fp
, NULL
, _IONBF
, 0);
7099 fprintf(fp
, "SrcNote size histogram:\n");
7100 for (i
= 0; i
< NBINS
; i
++) {
7101 fprintf(fp
, "%4u %4u ", JS_BIT(i
), hist
[i
]);
7102 for (n
= (int) JS_HOWMANY(hist
[i
], 10); n
> 0; --n
)
7111 * Fill in the storage at notes with prolog and main srcnotes; the space at
7112 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7113 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7114 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7117 js_FinishTakingSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*notes
)
7119 uintN prologCount
, mainCount
, totalCount
;
7120 ptrdiff_t offset
, delta
;
7123 JS_ASSERT(cg
->current
== &cg
->main
);
7125 prologCount
= cg
->prolog
.noteCount
;
7126 if (prologCount
&& cg
->prolog
.currentLine
!= cg
->firstLine
) {
7127 CG_SWITCH_TO_PROLOG(cg
);
7128 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, (ptrdiff_t)cg
->firstLine
) < 0)
7130 prologCount
= cg
->prolog
.noteCount
;
7131 CG_SWITCH_TO_MAIN(cg
);
7134 * Either no prolog srcnotes, or no line number change over prolog.
7135 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7136 * of the first main note, by adding to its delta and possibly even
7137 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7138 * that came at and after the last annotated bytecode.
7140 offset
= CG_PROLOG_OFFSET(cg
) - cg
->prolog
.lastNoteOffset
;
7141 JS_ASSERT(offset
>= 0);
7142 if (offset
> 0 && cg
->main
.noteCount
!= 0) {
7143 /* NB: Use as much of the first main note's delta as we can. */
7144 sn
= cg
->main
.notes
;
7145 delta
= SN_IS_XDELTA(sn
)
7146 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
7147 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
7151 if (!js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
))
7156 delta
= JS_MIN(offset
, SN_XDELTA_MASK
);
7157 sn
= cg
->main
.notes
;
7162 mainCount
= cg
->main
.noteCount
;
7163 totalCount
= prologCount
+ mainCount
;
7165 memcpy(notes
, cg
->prolog
.notes
, SRCNOTE_SIZE(prologCount
));
7166 memcpy(notes
+ prologCount
, cg
->main
.notes
, SRCNOTE_SIZE(mainCount
));
7167 SN_MAKE_TERMINATOR(¬es
[totalCount
]);
7170 { int bin
= JS_CeilingLog2(totalCount
);
7180 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
7181 uintN stackDepth
, size_t start
, size_t end
)
7185 JS_ASSERT((uintN
)(uint16
)stackDepth
== stackDepth
);
7186 JS_ASSERT(start
<= end
);
7187 JS_ASSERT((size_t)(uint32
)start
== start
);
7188 JS_ASSERT((size_t)(uint32
)end
== end
);
7190 JS_ARENA_ALLOCATE_TYPE(tryNode
, JSTryNode
, &cx
->tempPool
);
7192 js_ReportOutOfScriptQuota(cx
);
7196 tryNode
->note
.kind
= kind
;
7197 tryNode
->note
.stackDepth
= (uint16
)stackDepth
;
7198 tryNode
->note
.start
= (uint32
)start
;
7199 tryNode
->note
.length
= (uint32
)(end
- start
);
7200 tryNode
->prev
= cg
->lastTryNode
;
7201 cg
->lastTryNode
= tryNode
;
7207 js_FinishTakingTryNotes(JSCodeGenerator
*cg
, JSTryNoteArray
*array
)
7212 JS_ASSERT(array
->length
> 0 && array
->length
== cg
->ntrynotes
);
7213 tn
= array
->vector
+ array
->length
;
7214 tryNode
= cg
->lastTryNode
;
7216 *--tn
= tryNode
->note
;
7217 } while ((tryNode
= tryNode
->prev
) != NULL
);
7218 JS_ASSERT(tn
== array
->vector
);
7222 * Find the index of the given object for code generator.
7224 * Since the emitter refers to each parsed object only once, for the index we
7225 * use the number of already indexes objects. We also add the object to a list
7226 * to convert the list to a fixed-size array when we complete code generation,
7227 * see JSCGObjectList::finish below.
7229 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7230 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7231 * directly used to store and fetch a reference to a cloned RegExp object that
7232 * shares the same JSRegExp private data created for the object literal in
7233 * objbox. We need a cloned object to hold lastIndex and other direct properties
7234 * that should not be shared among threads sharing a precompiled function or
7237 * If the code being compiled is function code, allocate a reserved slot in
7238 * the cloned function object that shares its precompiled script with other
7239 * cloned function objects and with the compiler-created clone-parent. There
7240 * are nregexps = JS_SCRIPT_REGEXPS(script)->length such reserved slots in each
7241 * function object cloned from fun->object. NB: during compilation, a funobj
7242 * slots element must never be allocated, because js_AllocSlot could hand out
7243 * one of the slots that should be given to a regexp clone.
7245 * If the code being compiled is global code, the cloned regexp are stored in
7246 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7247 * fp->nvars to ngvars + nregexps.
7249 * The slots initially contain undefined or null. We populate them lazily when
7250 * JSOP_REGEXP is executed for the first time.
7252 * Why clone regexp objects? ECMA specifies that when a regular expression
7253 * literal is scanned, a RegExp object is created. In the spec, compilation
7254 * and execution happen indivisibly, but in this implementation and many of
7255 * its embeddings, code is precompiled early and re-executed in multiple
7256 * threads, or using multiple global objects, or both, for efficiency.
7258 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7259 * objects, which makes for collisions on the lastIndex property (especially
7260 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
7261 * __parent__ refer to the pre-compilation prototype and global objects, a
7262 * pigeon-hole problem for instanceof tests.
7265 JSCGObjectList::index(JSObjectBox
*objbox
)
7267 JS_ASSERT(!objbox
->emitLink
);
7268 objbox
->emitLink
= lastbox
;
7274 JSCGObjectList::finish(JSObjectArray
*array
)
7277 JSObjectBox
*objbox
;
7279 JS_ASSERT(length
<= INDEX_LIMIT
);
7280 JS_ASSERT(length
== array
->length
);
7282 cursor
= array
->vector
+ array
->length
;
7286 JS_ASSERT(!*cursor
);
7287 *cursor
= objbox
->object
;
7288 } while ((objbox
= objbox
->emitLink
) != NULL
);
7289 JS_ASSERT(cursor
== array
->vector
);