1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
59 #include "jsversion.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
83 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
84 uintN stackDepth
, size_t start
, size_t end
);
86 JSCodeGenerator::JSCodeGenerator(JSCompiler
*jsc
,
87 JSArenaPool
*cpool
, JSArenaPool
*npool
,
90 codePool(cpool
), notePool(npool
),
91 codeMark(JS_ARENA_MARK(cpool
)), noteMark(JS_ARENA_MARK(npool
)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL
),
94 spanDeps(NULL
), jumpTargets(NULL
), jtFreeList(NULL
),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
99 flags
= TCF_COMPILING
;
100 memset(&prolog
, 0, sizeof prolog
);
101 memset(&main
, 0, sizeof main
);
103 firstLine
= prolog
.currentLine
= main
.currentLine
= lineno
;
104 prolog
.noteMask
= main
.noteMask
= SRCNOTE_CHUNK
- 1;
105 memset(&upvarMap
, 0, sizeof upvarMap
);
108 JSCodeGenerator::~JSCodeGenerator()
110 JS_ARENA_RELEASE(codePool
, codeMark
);
111 JS_ARENA_RELEASE(notePool
, noteMark
);
113 /* NB: non-null only after OOM. */
115 compiler
->context
->free(spanDeps
);
118 compiler
->context
->free(upvarMap
.vector
);
122 EmitCheck(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t delta
)
124 jsbytecode
*base
, *limit
, *next
;
125 ptrdiff_t offset
, length
;
130 limit
= CG_LIMIT(cg
);
131 offset
= next
- base
;
132 if (next
+ delta
> limit
) {
133 length
= offset
+ delta
;
134 length
= (length
<= BYTECODE_CHUNK
)
136 : JS_BIT(JS_CeilingLog2(length
));
137 incr
= BYTECODE_SIZE(length
);
139 JS_ARENA_ALLOCATE_CAST(base
, jsbytecode
*, cg
->codePool
, incr
);
141 size
= BYTECODE_SIZE(limit
- base
);
143 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
146 js_ReportOutOfScriptQuota(cx
);
150 CG_LIMIT(cg
) = base
+ length
;
151 CG_NEXT(cg
) = base
+ offset
;
157 UpdateDepth(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t target
)
161 const JSCodeSpec
*cs
;
162 uintN extra
, depth
, nuses
;
165 pc
= CG_CODE(cg
, target
);
167 cs
= &js_CodeSpec
[op
];
169 extern uint8 js_opcode2extra
[];
170 extra
= js_opcode2extra
[op
];
174 if ((cs
->format
& JOF_TMPSLOT_MASK
) || extra
) {
175 depth
= (uintN
) cg
->stackDepth
+
176 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
) +
178 if (depth
> cg
->maxStackDepth
)
179 cg
->maxStackDepth
= depth
;
182 nuses
= js_GetStackUses(cs
, op
, pc
);
183 cg
->stackDepth
-= nuses
;
184 JS_ASSERT(cg
->stackDepth
>= 0);
185 if (cg
->stackDepth
< 0) {
189 JS_snprintf(numBuf
, sizeof numBuf
, "%d", target
);
190 ts
= &cg
->compiler
->tokenStream
;
191 JS_ReportErrorFlagsAndNumber(cx
, JSREPORT_WARNING
,
192 js_GetErrorMessage
, NULL
,
193 JSMSG_STACK_UNDERFLOW
,
194 ts
->filename
? ts
->filename
: "stdin",
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op
== JSOP_ENTERBLOCK
);
203 JS_ASSERT(nuses
== 0);
204 blockObj
= cg
->objectList
.lastbox
->object
;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj
) == &js_BlockClass
);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
208 OBJ_SET_BLOCK_DEPTH(cx
, blockObj
, cg
->stackDepth
);
209 ndefs
= OBJ_BLOCK_COUNT(cx
, blockObj
);
211 cg
->stackDepth
+= ndefs
;
212 if ((uintN
)cg
->stackDepth
> cg
->maxStackDepth
)
213 cg
->maxStackDepth
= cg
->stackDepth
;
217 js_Emit1(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
)
219 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 1);
222 *CG_NEXT(cg
)++ = (jsbytecode
)op
;
223 UpdateDepth(cx
, cg
, offset
);
229 js_Emit2(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
)
231 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 2);
234 jsbytecode
*next
= CG_NEXT(cg
);
235 next
[0] = (jsbytecode
)op
;
237 CG_NEXT(cg
) = next
+ 2;
238 UpdateDepth(cx
, cg
, offset
);
244 js_Emit3(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
,
247 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 3);
250 jsbytecode
*next
= CG_NEXT(cg
);
251 next
[0] = (jsbytecode
)op
;
254 CG_NEXT(cg
) = next
+ 3;
255 UpdateDepth(cx
, cg
, offset
);
261 js_EmitN(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, size_t extra
)
263 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
264 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, length
);
267 jsbytecode
*next
= CG_NEXT(cg
);
268 *next
= (jsbytecode
)op
;
269 memset(next
+ 1, 0, BYTECODE_SIZE(extra
));
270 CG_NEXT(cg
) = next
+ length
;
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
276 if (js_CodeSpec
[op
].nuses
>= 0)
277 UpdateDepth(cx
, cg
, offset
);
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str
[] = "with statement";
284 const char js_finally_block_str
[] = "finally block";
285 const char js_script_str
[] = "script";
287 static const char *statementName
[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
294 js_with_statement_str
, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str
, /* FINALLY */
298 js_finally_block_str
, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
308 StatementName(JSCodeGenerator
*cg
)
311 return js_script_str
;
312 return statementName
[cg
->topStmt
->type
];
316 ReportStatementTooLarge(JSContext
*cx
, JSCodeGenerator
*cg
)
318 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_NEED_DIET
,
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
389 BalanceJumpTargets(JSJumpTarget
**jtp
)
391 JSJumpTarget
*jt
, *jt2
, *root
;
392 int dir
, otherDir
, heightChanged
;
396 JS_ASSERT(jt
->balance
!= 0);
398 if (jt
->balance
< -1) {
400 doubleRotate
= (jt
->kids
[JT_LEFT
]->balance
> 0);
401 } else if (jt
->balance
> 1) {
403 doubleRotate
= (jt
->kids
[JT_RIGHT
]->balance
< 0);
408 otherDir
= JT_OTHER_DIR(dir
);
410 jt2
= jt
->kids
[otherDir
];
411 *jtp
= root
= jt2
->kids
[dir
];
413 jt
->kids
[otherDir
] = root
->kids
[dir
];
414 root
->kids
[dir
] = jt
;
416 jt2
->kids
[dir
] = root
->kids
[otherDir
];
417 root
->kids
[otherDir
] = jt2
;
420 root
->kids
[JT_LEFT
]->balance
= -JS_MAX(root
->balance
, 0);
421 root
->kids
[JT_RIGHT
]->balance
= -JS_MIN(root
->balance
, 0);
424 *jtp
= root
= jt
->kids
[otherDir
];
425 jt
->kids
[otherDir
] = root
->kids
[dir
];
426 root
->kids
[dir
] = jt
;
428 heightChanged
= (root
->balance
!= 0);
429 jt
->balance
= -((dir
== JT_LEFT
) ? --root
->balance
: ++root
->balance
);
432 return heightChanged
;
435 typedef struct AddJumpTargetArgs
{
443 AddJumpTarget(AddJumpTargetArgs
*args
, JSJumpTarget
**jtp
)
450 JSCodeGenerator
*cg
= args
->cg
;
454 cg
->jtFreeList
= jt
->kids
[JT_LEFT
];
456 JS_ARENA_ALLOCATE_CAST(jt
, JSJumpTarget
*, &args
->cx
->tempPool
,
459 js_ReportOutOfScriptQuota(args
->cx
);
463 jt
->offset
= args
->offset
;
465 jt
->kids
[JT_LEFT
] = jt
->kids
[JT_RIGHT
] = NULL
;
466 cg
->numJumpTargets
++;
472 if (jt
->offset
== args
->offset
) {
477 if (args
->offset
< jt
->offset
)
478 balanceDelta
= -AddJumpTarget(args
, &jt
->kids
[JT_LEFT
]);
480 balanceDelta
= AddJumpTarget(args
, &jt
->kids
[JT_RIGHT
]);
484 jt
->balance
+= balanceDelta
;
485 return (balanceDelta
&& jt
->balance
)
486 ? 1 - BalanceJumpTargets(jtp
)
491 static int AVLCheck(JSJumpTarget
*jt
)
496 JS_ASSERT(-1 <= jt
->balance
&& jt
->balance
<= 1);
497 lh
= AVLCheck(jt
->kids
[JT_LEFT
]);
498 rh
= AVLCheck(jt
->kids
[JT_RIGHT
]);
499 JS_ASSERT(jt
->balance
== rh
- lh
);
500 return 1 + JS_MAX(lh
, rh
);
505 SetSpanDepTarget(JSContext
*cx
, JSCodeGenerator
*cg
, JSSpanDep
*sd
,
508 AddJumpTargetArgs args
;
510 if (off
< JUMPX_OFFSET_MIN
|| JUMPX_OFFSET_MAX
< off
) {
511 ReportStatementTooLarge(cx
, cg
);
517 args
.offset
= sd
->top
+ off
;
519 AddJumpTarget(&args
, &cg
->jumpTargets
);
524 AVLCheck(cg
->jumpTargets
);
527 SD_SET_TARGET(sd
, args
.node
);
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
536 AddSpanDep(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
, jsbytecode
*pc2
,
540 JSSpanDep
*sdbase
, *sd
;
543 index
= cg
->numSpanDeps
;
544 if (index
+ 1 == 0) {
545 ReportStatementTooLarge(cx
, cg
);
549 if ((index
& (index
- 1)) == 0 &&
550 (!(sdbase
= cg
->spanDeps
) || index
>= SPANDEPS_MIN
)) {
551 size
= sdbase
? SPANDEPS_SIZE(index
) : SPANDEPS_SIZE_MIN
/ 2;
552 sdbase
= (JSSpanDep
*) cx
->realloc(sdbase
, size
+ size
);
555 cg
->spanDeps
= sdbase
;
558 cg
->numSpanDeps
= index
+ 1;
559 sd
= cg
->spanDeps
+ index
;
560 sd
->top
= pc
- CG_BASE(cg
);
561 sd
->offset
= sd
->before
= pc2
- CG_BASE(cg
);
563 if (js_CodeSpec
[*pc
].format
& JOF_BACKPATCH
) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
566 JS_ASSERT(off
>= 1 + JUMP_OFFSET_LEN
);
567 if (off
> BPDELTA_MAX
) {
568 ReportStatementTooLarge(cx
, cg
);
572 SD_SET_BPDELTA(sd
, off
);
573 } else if (off
== 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd
, 0);
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx
, cg
, sd
, off
))
582 if (index
> SPANDEP_INDEX_MAX
)
583 index
= SPANDEP_INDEX_HUGE
;
584 SET_SPANDEP_INDEX(pc2
, index
);
589 AddSwitchSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
)
595 uintN njumps
, indexlen
;
598 JS_ASSERT(op
== JSOP_TABLESWITCH
|| op
== JSOP_LOOKUPSWITCH
);
600 off
= GET_JUMP_OFFSET(pc2
);
601 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
603 pc2
+= JUMP_OFFSET_LEN
;
604 if (op
== JSOP_TABLESWITCH
) {
605 low
= GET_JUMP_OFFSET(pc2
);
606 pc2
+= JUMP_OFFSET_LEN
;
607 high
= GET_JUMP_OFFSET(pc2
);
608 pc2
+= JUMP_OFFSET_LEN
;
609 njumps
= (uintN
) (high
- low
+ 1);
612 njumps
= GET_UINT16(pc2
);
614 indexlen
= INDEX_LEN
;
619 off
= GET_JUMP_OFFSET(pc2
);
620 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
622 pc2
+= JUMP_OFFSET_LEN
;
628 BuildSpanDepTable(JSContext
*cx
, JSCodeGenerator
*cg
)
630 jsbytecode
*pc
, *end
;
632 const JSCodeSpec
*cs
;
635 pc
= CG_BASE(cg
) + cg
->spanDepTodo
;
640 cs
= &js_CodeSpec
[op
];
642 switch (JOF_TYPE(cs
->format
)) {
643 case JOF_TABLESWITCH
:
644 case JOF_LOOKUPSWITCH
:
645 pc
= AddSwitchSpanDeps(cx
, cg
, pc
);
651 off
= GET_JUMP_OFFSET(pc
);
652 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
665 GetSpanDep(JSCodeGenerator
*cg
, jsbytecode
*pc
)
672 index
= GET_SPANDEP_INDEX(pc
);
673 if (index
!= SPANDEP_INDEX_HUGE
)
674 return cg
->spanDeps
+ index
;
676 offset
= pc
- CG_BASE(cg
);
678 hi
= cg
->numSpanDeps
- 1;
681 sd
= cg
->spanDeps
+ mid
;
682 if (sd
->before
== offset
)
684 if (sd
->before
< offset
)
695 SetBackPatchDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
700 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
701 if (!cg
->spanDeps
&& delta
< JUMP_OFFSET_MAX
) {
702 SET_JUMP_OFFSET(pc
, delta
);
706 if (delta
> BPDELTA_MAX
) {
707 ReportStatementTooLarge(cx
, cg
);
711 if (!cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
714 sd
= GetSpanDep(cg
, pc
);
715 JS_ASSERT(SD_GET_BPDELTA(sd
) == 0);
716 SD_SET_BPDELTA(sd
, delta
);
721 UpdateJumpTargets(JSJumpTarget
*jt
, ptrdiff_t pivot
, ptrdiff_t delta
)
723 if (jt
->offset
> pivot
) {
725 if (jt
->kids
[JT_LEFT
])
726 UpdateJumpTargets(jt
->kids
[JT_LEFT
], pivot
, delta
);
728 if (jt
->kids
[JT_RIGHT
])
729 UpdateJumpTargets(jt
->kids
[JT_RIGHT
], pivot
, delta
);
733 FindNearestSpanDep(JSCodeGenerator
*cg
, ptrdiff_t offset
, int lo
,
737 JSSpanDep
*sdbase
, *sd
;
739 num
= cg
->numSpanDeps
;
742 sdbase
= cg
->spanDeps
;
746 if (sd
->before
== offset
)
748 if (sd
->before
< offset
)
756 JS_ASSERT(sd
->before
>= offset
&& (lo
== 0 || sd
[-1].before
< offset
));
761 FreeJumpTargets(JSCodeGenerator
*cg
, JSJumpTarget
*jt
)
763 if (jt
->kids
[JT_LEFT
])
764 FreeJumpTargets(cg
, jt
->kids
[JT_LEFT
]);
765 if (jt
->kids
[JT_RIGHT
])
766 FreeJumpTargets(cg
, jt
->kids
[JT_RIGHT
]);
767 jt
->kids
[JT_LEFT
] = cg
->jtFreeList
;
772 OptimizeSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
)
774 jsbytecode
*pc
, *oldpc
, *base
, *limit
, *next
;
775 JSSpanDep
*sd
, *sd2
, *sdbase
, *sdlimit
, *sdtop
, guard
;
776 ptrdiff_t offset
, growth
, delta
, top
, pivot
, span
, length
, target
;
781 jssrcnote
*sn
, *snlimit
;
783 uintN i
, n
, noteIndex
;
790 sdbase
= cg
->spanDeps
;
791 sdlimit
= sdbase
+ cg
->numSpanDeps
;
792 offset
= CG_OFFSET(cg
);
807 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
808 JS_ASSERT(JT_HAS_TAG(sd
->target
));
811 if (sd
->top
!= top
) {
814 JS_ASSERT(top
== sd
->before
);
818 type
= JOF_OPTYPE(op
);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
829 JS_ASSERT(type
== JOF_JUMP
||
830 type
== JOF_TABLESWITCH
||
831 type
== JOF_LOOKUPSWITCH
);
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
835 span
= SD_SPAN(sd
, pivot
);
836 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
837 ptrdiff_t deltaFromTop
= 0;
842 case JSOP_GOTO
: op
= JSOP_GOTOX
; break;
843 case JSOP_IFEQ
: op
= JSOP_IFEQX
; break;
844 case JSOP_IFNE
: op
= JSOP_IFNEX
; break;
845 case JSOP_OR
: op
= JSOP_ORX
; break;
846 case JSOP_AND
: op
= JSOP_ANDX
; break;
847 case JSOP_GOSUB
: op
= JSOP_GOSUBX
; break;
848 case JSOP_CASE
: op
= JSOP_CASEX
; break;
849 case JSOP_DEFAULT
: op
= JSOP_DEFAULTX
; break;
850 case JSOP_TABLESWITCH
: op
= JSOP_TABLESWITCHX
; break;
851 case JSOP_LOOKUPSWITCH
: op
= JSOP_LOOKUPSWITCHX
; break;
853 ReportStatementTooLarge(cx
, cg
);
856 *pc
= (jsbytecode
) op
;
858 for (sd2
= sdtop
; sd2
< sdlimit
&& sd2
->top
== top
; sd2
++) {
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
871 sd2
->offset
+= deltaFromTop
;
872 deltaFromTop
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
879 sd2
->offset
+= delta
;
882 delta
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
883 UpdateJumpTargets(cg
->jumpTargets
, sd2
->offset
,
884 JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
896 JSTokenStream
*ts
= &cg
->compiler
->tokenStream
;
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts
->filename
? ts
->filename
: "stdin", cg
->firstLine
,
900 growth
/ (JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
), cg
->numSpanDeps
,
901 passes
, offset
+ growth
, offset
, growth
);
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
908 limit
= CG_LIMIT(cg
);
909 length
= offset
+ growth
;
910 next
= base
+ length
;
912 JS_ASSERT(length
> BYTECODE_CHUNK
);
913 size
= BYTECODE_SIZE(limit
- base
);
914 incr
= BYTECODE_SIZE(length
) - size
;
915 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
917 js_ReportOutOfScriptQuota(cx
);
921 CG_LIMIT(cg
) = next
= base
+ length
;
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
932 guard
.offset
= offset
+ growth
;
933 guard
.before
= offset
;
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
945 JS_ASSERT(sd
== sdlimit
);
947 while (--sd
>= sdbase
) {
948 if (sd
->top
!= top
) {
950 op
= (JSOp
) base
[top
];
951 type
= JOF_OPTYPE(op
);
953 for (sd2
= sd
- 1; sd2
>= sdbase
&& sd2
->top
== top
; sd2
--)
957 JS_ASSERT(top
== sd2
->before
);
960 oldpc
= base
+ sd
->before
;
961 span
= SD_SPAN(sd
, pivot
);
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
974 JS_ASSERT(JUMP_OFFSET_MIN
<= span
&& span
<= JUMP_OFFSET_MAX
);
975 SET_JUMP_OFFSET(oldpc
, span
);
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
986 pc
= base
+ sd
->offset
;
987 delta
= offset
- sd
->before
;
988 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
996 offset
= sd
->before
+ 1;
997 size
= BYTECODE_SIZE(delta
- (1 + JUMP_OFFSET_LEN
));
999 memmove(pc
+ 1 + JUMPX_OFFSET_LEN
,
1000 oldpc
+ 1 + JUMP_OFFSET_LEN
,
1004 SET_JUMPX_OFFSET(pc
, span
);
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1016 offset
= growth
= 0;
1018 for (sn
= cg
->main
.notes
, snlimit
= sn
+ cg
->main
.noteCount
;
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1026 offset
+= SN_DELTA(sn
);
1027 while (sd
< sdlimit
&& sd
->before
< offset
) {
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1036 delta
= sd2
->offset
- (sd2
->before
+ growth
);
1038 JS_ASSERT(delta
== JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
1039 sn
= js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
);
1042 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1056 spec
= &js_SrcNoteSpec
[SN_TYPE(sn
)];
1057 if (spec
->isSpanDep
) {
1058 pivot
= offset
+ spec
->offsetBias
;
1060 for (i
= 0; i
< n
; i
++) {
1061 span
= js_GetSrcNoteOffset(sn
, i
);
1064 target
= pivot
+ span
* spec
->isSpanDep
;
1065 sd2
= FindNearestSpanDep(cg
, target
,
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1077 target
+= sd2
->offset
- sd2
->before
;
1078 span
= target
- (pivot
+ growth
);
1079 span
*= spec
->isSpanDep
;
1080 noteIndex
= sn
- cg
->main
.notes
;
1081 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, i
, span
))
1083 sn
= cg
->main
.notes
+ noteIndex
;
1084 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1088 cg
->main
.lastNoteOffset
+= growth
;
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1094 for (tryNode
= cg
->lastTryNode
; tryNode
; tryNode
= tryNode
->prev
) {
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1100 offset
= tryNode
->note
.start
;
1101 sd
= FindNearestSpanDep(cg
, offset
, 0, &guard
);
1102 delta
= sd
->offset
- sd
->before
;
1103 tryNode
->note
.start
= offset
+ delta
;
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1109 length
= tryNode
->note
.length
;
1110 sd2
= FindNearestSpanDep(cg
, offset
+ length
, sd
- sdbase
, &guard
);
1112 tryNode
->note
.length
=
1113 length
+ sd2
->offset
- sd2
->before
- delta
;
1118 #ifdef DEBUG_brendan
1122 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
1123 offset
= sd
->offset
;
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd
->top
!= top
) {
1127 JS_ASSERT(top
== -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type
) ||
1132 JS_ASSERT(top
== sd
->before
);
1133 op
= (JSOp
) base
[offset
];
1134 type
= JOF_OPTYPE(op
);
1135 JS_ASSERT(type
== JOF_JUMP
||
1136 type
== JOF_JUMPX
||
1137 type
== JOF_TABLESWITCH
||
1138 type
== JOF_TABLESWITCHX
||
1139 type
== JOF_LOOKUPSWITCH
||
1140 type
== JOF_LOOKUPSWITCHX
);
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
1146 span
= GET_JUMPX_OFFSET(pc
);
1147 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
1150 JS_ASSERT(type
== JOF_TABLESWITCHX
||
1151 type
== JOF_LOOKUPSWITCHX
);
1154 span
= GET_JUMP_OFFSET(pc
);
1156 JS_ASSERT(SD_SPAN(sd
, pivot
) == span
);
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type
) || bigspans
!= 0);
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1167 size
= SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg
->numSpanDeps
)));
1168 cx
->free(cg
->spanDeps
);
1169 cg
->spanDeps
= NULL
;
1170 FreeJumpTargets(cg
, cg
->jumpTargets
);
1171 cg
->jumpTargets
= NULL
;
1172 cg
->numSpanDeps
= cg
->numJumpTargets
= 0;
1173 cg
->spanDepTodo
= CG_OFFSET(cg
);
1178 EmitJump(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t off
)
1184 extend
= off
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< off
;
1185 if (extend
&& !cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
1188 jmp
= js_Emit3(cx
, cg
, op
, JUMP_OFFSET_HI(off
), JUMP_OFFSET_LO(off
));
1189 if (jmp
>= 0 && (extend
|| cg
->spanDeps
)) {
1190 pc
= CG_CODE(cg
, jmp
);
1191 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
1198 GetJumpOffset(JSCodeGenerator
*cg
, jsbytecode
*pc
)
1205 return GET_JUMP_OFFSET(pc
);
1207 sd
= GetSpanDep(cg
, pc
);
1209 if (!JT_HAS_TAG(jt
))
1210 return JT_TO_BPDELTA(jt
);
1213 while (--sd
>= cg
->spanDeps
&& sd
->top
== top
)
1216 return JT_CLR_TAG(jt
)->offset
- sd
->offset
;
1220 js_SetJumpOffset(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
1223 if (!cg
->spanDeps
) {
1224 if (JUMP_OFFSET_MIN
<= off
&& off
<= JUMP_OFFSET_MAX
) {
1225 SET_JUMP_OFFSET(pc
, off
);
1229 if (!BuildSpanDepTable(cx
, cg
))
1233 return SetSpanDepTarget(cx
, cg
, GetSpanDep(cg
, pc
), off
);
1237 JSTreeContext::inStatement(JSStmtType type
)
1239 for (JSStmtInfo
*stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
1240 if (stmt
->type
== type
)
1247 JSTreeContext::ensureSharpSlots()
1249 #if JS_HAS_SHARP_VARS
1250 JS_STATIC_ASSERT(SHARP_NSLOTS
== 2);
1252 if (sharpSlotBase
>= 0) {
1253 JS_ASSERT(flags
& TCF_HAS_SHARPS
);
1257 JS_ASSERT(!(flags
& TCF_HAS_SHARPS
));
1258 if (flags
& TCF_IN_FUNCTION
) {
1259 JSContext
*cx
= compiler
->context
;
1260 JSAtom
*sharpArrayAtom
= js_Atomize(cx
, "#array", 6, 0);
1261 JSAtom
*sharpDepthAtom
= js_Atomize(cx
, "#depth", 6, 0);
1262 if (!sharpArrayAtom
|| !sharpDepthAtom
)
1265 sharpSlotBase
= fun
->u
.i
.nvars
;
1266 if (!js_AddLocal(cx
, fun
, sharpArrayAtom
, JSLOCAL_VAR
))
1268 if (!js_AddLocal(cx
, fun
, sharpDepthAtom
, JSLOCAL_VAR
))
1272 * JSCompiler::compileScript will rebase immediate operands indexing
1273 * the sharp slots to come at the end of the global script's |nfixed|
1274 * slots storage, after gvars and regexps.
1278 flags
|= TCF_HAS_SHARPS
;
1284 js_PushStatement(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSStmtType type
,
1289 stmt
->blockid
= tc
->blockid();
1290 SET_STATEMENT_TOP(stmt
, top
);
1292 JS_ASSERT(!stmt
->blockObj
);
1293 stmt
->down
= tc
->topStmt
;
1295 if (STMT_LINKS_SCOPE(stmt
)) {
1296 stmt
->downScope
= tc
->topScopeStmt
;
1297 tc
->topScopeStmt
= stmt
;
1299 stmt
->downScope
= NULL
;
1304 js_PushBlockScope(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSObject
*blockObj
,
1307 js_PushStatement(tc
, stmt
, STMT_BLOCK
, top
);
1308 stmt
->flags
|= SIF_SCOPE
;
1309 STOBJ_SET_PARENT(blockObj
, tc
->blockChain
);
1310 stmt
->downScope
= tc
->topScopeStmt
;
1311 tc
->topScopeStmt
= stmt
;
1312 tc
->blockChain
= blockObj
;
1313 stmt
->blockObj
= blockObj
;
1317 * Emit a backpatch op with offset pointing to the previous jump of this type,
1318 * so that we can walk back up the chain fixing up the op and jump offset.
1321 EmitBackPatchOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t *lastp
)
1323 ptrdiff_t offset
, delta
;
1325 offset
= CG_OFFSET(cg
);
1326 delta
= offset
- *lastp
;
1328 JS_ASSERT(delta
> 0);
1329 return EmitJump(cx
, cg
, op
, delta
);
1333 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1334 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1335 * NB: We use cx and cg from our caller's lexical environment, and return
1338 #define EMIT_UINT16_IMM_OP(op, i) \
1340 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1344 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1346 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1349 jsbytecode *pc_ = CG_CODE(cg, off_); \
1350 SET_UINT16(pc_, i); \
1351 pc_ += UINT16_LEN; \
1352 SET_UINT16(pc_, j); \
1356 FlushPops(JSContext
*cx
, JSCodeGenerator
*cg
, intN
*npops
)
1358 JS_ASSERT(*npops
!= 0);
1359 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1361 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
1367 * Emit additional bytecode(s) for non-local jumps.
1370 EmitNonLocalJumpFixup(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
)
1376 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1377 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1378 * end of a with statement, so we save cg->stackDepth here and restore it
1379 * just before a successful return.
1381 depth
= cg
->stackDepth
;
1384 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1386 for (stmt
= cg
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
1387 switch (stmt
->type
) {
1390 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1392 if (EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(*stmt
)) < 0)
1397 /* There's a With object on the stack that we need to pop. */
1399 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1401 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
1405 case STMT_FOR_IN_LOOP
:
1407 * The iterator and the object being iterated need to be popped.
1410 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1412 if (js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0)
1416 case STMT_SUBROUTINE
:
1418 * There's a [exception or hole, retsub pc-index] pair on the
1419 * stack that we need to pop.
1427 if (stmt
->flags
& SIF_SCOPE
) {
1430 /* There is a Block object with locals on the stack to pop. */
1432 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1434 i
= OBJ_BLOCK_COUNT(cx
, stmt
->blockObj
);
1435 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, i
);
1440 cg
->stackDepth
= depth
;
1447 EmitGoto(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
,
1448 ptrdiff_t *lastp
, JSAtomListElement
*label
, JSSrcNoteType noteType
)
1452 if (!EmitNonLocalJumpFixup(cx
, cg
, toStmt
))
1456 index
= js_NewSrcNote2(cx
, cg
, noteType
, (ptrdiff_t) ALE_INDEX(label
));
1457 else if (noteType
!= SRC_NULL
)
1458 index
= js_NewSrcNote(cx
, cg
, noteType
);
1464 return EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, lastp
);
1468 BackPatch(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t last
,
1469 jsbytecode
*target
, jsbytecode op
)
1471 jsbytecode
*pc
, *stop
;
1472 ptrdiff_t delta
, span
;
1474 pc
= CG_CODE(cg
, last
);
1475 stop
= CG_CODE(cg
, -1);
1476 while (pc
!= stop
) {
1477 delta
= GetJumpOffset(cg
, pc
);
1479 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, span
);
1482 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1483 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1484 * and need to see the JSOP_BACKPATCH* op at *pc).
1493 js_PopStatement(JSTreeContext
*tc
)
1498 tc
->topStmt
= stmt
->down
;
1499 if (STMT_LINKS_SCOPE(stmt
)) {
1500 tc
->topScopeStmt
= stmt
->downScope
;
1501 if (stmt
->flags
& SIF_SCOPE
) {
1502 tc
->blockChain
= STOBJ_GET_PARENT(stmt
->blockObj
);
1503 JS_SCOPE_DEPTH_METERING(--tc
->scopeDepth
);
1509 js_PopStatementCG(JSContext
*cx
, JSCodeGenerator
*cg
)
1514 if (!STMT_IS_TRYING(stmt
) &&
1515 (!BackPatch(cx
, cg
, stmt
->breaks
, CG_NEXT(cg
), JSOP_GOTO
) ||
1516 !BackPatch(cx
, cg
, stmt
->continues
, CG_CODE(cg
, stmt
->update
),
1520 js_PopStatement(cg
);
1525 js_DefineCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1532 JSAtomListElement
*ale
;
1534 /* XXX just do numbers for now */
1535 if (pn
->pn_type
== TOK_NUMBER
) {
1537 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
1538 v
= INT_TO_JSVAL(ival
);
1541 * We atomize double to root a jsdouble instance that we wrap as
1542 * jsval and store in cg->constList. This works because atoms are
1543 * protected from GC during compilation.
1545 valueAtom
= js_AtomizeDouble(cx
, dval
);
1548 v
= ATOM_KEY(valueAtom
);
1550 ale
= cg
->constList
.add(cg
->compiler
, atom
);
1553 ALE_SET_VALUE(ale
, v
);
1559 js_LexicalLookup(JSTreeContext
*tc
, JSAtom
*atom
, jsint
*slotp
, JSStmtInfo
*stmt
)
1563 JSScopeProperty
*sprop
;
1566 stmt
= tc
->topScopeStmt
;
1567 for (; stmt
; stmt
= stmt
->downScope
) {
1568 if (stmt
->type
== STMT_WITH
)
1571 /* Skip "maybe scope" statements that don't contain let bindings. */
1572 if (!(stmt
->flags
& SIF_SCOPE
))
1575 obj
= stmt
->blockObj
;
1576 JS_ASSERT(obj
->getClass() == &js_BlockClass
);
1577 scope
= OBJ_SCOPE(obj
);
1578 sprop
= scope
->lookup(ATOM_TO_JSID(atom
));
1580 JS_ASSERT(sprop
->flags
& SPROP_HAS_SHORTID
);
1583 JS_ASSERT(JSVAL_IS_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
1584 *slotp
= JSVAL_TO_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]) +
1597 * Check if the attributes describe a property holding a compile-time constant
1598 * or a permanent, read-only property without a getter.
1600 #define IS_CONSTANT_PROPERTY(attrs) \
1601 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1602 (JSPROP_READONLY | JSPROP_PERMANENT))
1605 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1606 * name defining a constant.
1609 LookupCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1614 JSAtomListElement
*ale
;
1615 JSObject
*obj
, *objbox
;
1620 * Chase down the cg stack, but only until we reach the outermost cg.
1621 * This enables propagating consts from top-level into switch cases in a
1622 * function compiled along with the top-level script.
1626 if (cg
->flags
& (TCF_IN_FUNCTION
| TCF_COMPILE_N_GO
)) {
1627 /* XXX this will need revising if 'const' becomes block-scoped. */
1628 stmt
= js_LexicalLookup(cg
, atom
, NULL
);
1632 ale
= cg
->constList
.lookup(atom
);
1634 JS_ASSERT(ALE_VALUE(ale
) != JSVAL_HOLE
);
1635 *vp
= ALE_VALUE(ale
);
1640 * Try looking in the variable object for a direct property that
1641 * is readonly and permanent. We know such a property can't be
1642 * shadowed by another property on obj's prototype chain, or a
1643 * with object or catch variable; nor can prop's value be changed,
1644 * nor can prop be deleted.
1646 if (cg
->flags
& TCF_IN_FUNCTION
) {
1647 if (js_LookupLocal(cx
, cg
->fun
, atom
, NULL
) != JSLOCAL_NONE
)
1650 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
1651 obj
= cg
->scopeChain
;
1652 ok
= obj
->lookupProperty(cx
, ATOM_TO_JSID(atom
), &objbox
, &prop
);
1655 if (objbox
== obj
) {
1657 * We're compiling code that will be executed immediately,
1658 * not re-executed against a different scope chain and/or
1659 * variable object. Therefore we can get constant values
1660 * from our variable object here.
1662 ok
= obj
->getAttributes(cx
, ATOM_TO_JSID(atom
), prop
, &attrs
);
1663 if (ok
&& IS_CONSTANT_PROPERTY(attrs
)) {
1664 ok
= obj
->getProperty(cx
, ATOM_TO_JSID(atom
), vp
);
1665 JS_ASSERT_IF(ok
, *vp
!= JSVAL_HOLE
);
1669 objbox
->dropProperty(cx
, prop
);
1676 } while ((cg
= (JSCodeGenerator
*) cg
->parent
) != NULL
);
1681 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1682 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1683 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1684 * after the main bytecode sequence.
1687 EmitBigIndexPrefix(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
1692 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1695 JS_STATIC_ASSERT(INDEX_LIMIT
<= JS_BIT(24));
1696 JS_STATIC_ASSERT(INDEX_LIMIT
>=
1697 (JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 2) << 16);
1699 if (index
< JS_BIT(16))
1701 indexBase
= index
>> 16;
1702 if (indexBase
<= JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 1) {
1703 if (js_Emit1(cx
, cg
, (JSOp
)(JSOP_INDEXBASE1
+ indexBase
- 1)) < 0)
1705 return JSOP_RESETBASE0
;
1708 if (index
>= INDEX_LIMIT
) {
1709 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1710 JSMSG_TOO_MANY_LITERALS
);
1714 if (js_Emit2(cx
, cg
, JSOP_INDEXBASE
, (JSOp
)indexBase
) < 0)
1716 return JSOP_RESETBASE
;
1720 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1721 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1722 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1723 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1724 * with single-byte JSOP_INDEXBASE[123] codes.
1726 * Such prefixing currently requires a suffix to restore the "zero segment"
1727 * register setting, but this could be optimized further.
1730 EmitIndexOp(JSContext
*cx
, JSOp op
, uintN index
, JSCodeGenerator
*cg
)
1734 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1735 if (bigSuffix
== JSOP_FALSE
)
1737 EMIT_UINT16_IMM_OP(op
, index
);
1738 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1742 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1743 * caller's lexical environment, and embedding a false return on error.
1745 #define EMIT_INDEX_OP(op, index) \
1747 if (!EmitIndexOp(cx, op, index, cg)) \
1752 EmitAtomOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
1754 JSAtomListElement
*ale
;
1756 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1757 if (op
== JSOP_GETPROP
&&
1758 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
1759 return js_Emit1(cx
, cg
, JSOP_LENGTH
) >= 0;
1761 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
1764 return EmitIndexOp(cx
, op
, ALE_INDEX(ale
), cg
);
1768 EmitObjectOp(JSContext
*cx
, JSObjectBox
*objbox
, JSOp op
,
1769 JSCodeGenerator
*cg
)
1771 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1772 return EmitIndexOp(cx
, op
, cg
->objectList
.index(objbox
), cg
);
1776 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1777 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1778 * other parts of the code there's no necessary relationship between the two.
1779 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1780 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1782 JS_STATIC_ASSERT(ARGNO_LEN
== 2);
1783 JS_STATIC_ASSERT(SLOTNO_LEN
== 2);
1786 EmitSlotIndexOp(JSContext
*cx
, JSOp op
, uintN slot
, uintN index
,
1787 JSCodeGenerator
*cg
)
1793 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SLOTATOM
||
1794 JOF_OPTYPE(op
) == JOF_SLOTOBJECT
);
1795 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1796 if (bigSuffix
== JSOP_FALSE
)
1799 /* Emit [op, slot, index]. */
1800 off
= js_EmitN(cx
, cg
, op
, 2 + INDEX_LEN
);
1803 pc
= CG_CODE(cg
, off
);
1804 SET_UINT16(pc
, slot
);
1806 SET_INDEX(pc
, index
);
1807 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1811 * Adjust the slot for a block local to account for the number of variables
1812 * that share the same index space with locals. Due to the incremental code
1813 * generation for top-level script, we do the adjustment via code patching in
1814 * JSCompiler::compileScript; see comments there.
1816 * The function returns -1 on failures.
1819 AdjustBlockSlot(JSContext
*cx
, JSCodeGenerator
*cg
, jsint slot
)
1821 JS_ASSERT((jsuint
) slot
< cg
->maxStackDepth
);
1822 if (cg
->flags
& TCF_IN_FUNCTION
) {
1823 slot
+= cg
->fun
->u
.i
.nvars
;
1824 if ((uintN
) slot
>= SLOTNO_LIMIT
) {
1825 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), NULL
,
1827 JSMSG_TOO_MANY_LOCALS
);
1835 EmitEnterBlock(JSContext
*cx
, JSParseNode
*pn
, JSCodeGenerator
*cg
)
1837 JS_ASSERT(PN_TYPE(pn
) == TOK_LEXICALSCOPE
);
1838 if (!EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_ENTERBLOCK
, cg
))
1841 JSObject
*blockObj
= pn
->pn_objbox
->object
;
1842 jsint depth
= AdjustBlockSlot(cx
, cg
, OBJ_BLOCK_DEPTH(cx
, blockObj
));
1846 for (uintN slot
= JSSLOT_FREE(&js_BlockClass
),
1847 limit
= slot
+ OBJ_BLOCK_COUNT(cx
, blockObj
);
1848 slot
< limit
; slot
++) {
1849 jsval v
= STOBJ_GET_SLOT(blockObj
, slot
);
1851 /* Beware the empty destructuring dummy. */
1852 if (JSVAL_IS_VOID(v
)) {
1853 JS_ASSERT(slot
+ 1 <= limit
);
1857 JSDefinition
*dn
= (JSDefinition
*) JSVAL_TO_PRIVATE(v
);
1858 JS_ASSERT(dn
->pn_defn
);
1859 JS_ASSERT(uintN(dn
->frameSlot() + depth
) < JS_BIT(16));
1860 dn
->pn_cookie
+= depth
;
1862 for (JSParseNode
*pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
1863 JS_ASSERT(pnu
->pn_lexdef
== dn
);
1864 JS_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
1865 JS_ASSERT(pnu
->pn_cookie
== FREE_UPVAR_COOKIE
);
1870 OBJ_SCOPE(blockObj
)->freeslot
= JSSLOT_FREE(&js_BlockClass
);
1871 return js_GrowSlots(cx
, blockObj
, JSSLOT_FREE(&js_BlockClass
));
1875 * When eval is called from a function, the eval code or function code it
1876 * compiles may reference upvars that live in the eval-calling function. The
1877 * eval-invoked compiler does not have explicit definitions for these upvars
1878 * and we do not attempt to create them a-priori (by inspecting the function's
1879 * args and vars) -- we could, but we'd take an avoidable penalty for each
1880 * function local not referenced by any upvar. Instead, we map such upvars
1881 * lazily, growing upvarMap.vector by powers of two.
1883 * This function knows that it is called with pn pointing to a PN_NAME-arity
1884 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1885 * static level of cg at least one greater than the eval-calling function's
1889 MakeUpvarForEval(JSParseNode
*pn
, JSCodeGenerator
*cg
)
1891 JSContext
*cx
= cg
->compiler
->context
;
1892 JSFunction
*fun
= cg
->compiler
->callerFrame
->fun
;
1893 uintN upvarLevel
= fun
->u
.i
.script
->staticLevel
;
1895 JSFunctionBox
*funbox
= cg
->funbox
;
1898 * Treat top-level function definitions as escaping (i.e., as funargs),
1899 * required since we compile each such top level function or statement
1900 * and throw away the AST, so we can't yet see all funarg uses of this
1901 * function being compiled (cg->funbox->object). See bug 493177.
1903 if (funbox
->level
== fun
->u
.i
.script
->staticLevel
+ 1U &&
1904 !(((JSFunction
*) funbox
->object
)->flags
& JSFUN_LAMBDA
)) {
1905 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
1906 ((JSFunction
*) funbox
->object
)->atom
);
1910 while (funbox
->level
>= upvarLevel
) {
1911 if (funbox
->node
->pn_dflags
& PND_FUNARG
)
1913 funbox
= funbox
->parent
;
1919 JSAtom
*atom
= pn
->pn_atom
;
1922 JSLocalKind localKind
= js_LookupLocal(cx
, fun
, atom
, &index
);
1923 if (localKind
== JSLOCAL_NONE
)
1926 JS_ASSERT(cg
->staticLevel
> upvarLevel
);
1927 if (cg
->staticLevel
>= JS_DISPLAY_SIZE
|| upvarLevel
>= JS_DISPLAY_SIZE
)
1930 JSAtomListElement
*ale
= cg
->upvarList
.lookup(atom
);
1932 if ((cg
->flags
& TCF_IN_FUNCTION
) &&
1933 !js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
)) {
1937 ale
= cg
->upvarList
.add(cg
->compiler
, atom
);
1940 JS_ASSERT(ALE_INDEX(ale
) == cg
->upvarList
.count
- 1);
1942 uint32
*vector
= cg
->upvarMap
.vector
;
1943 uint32 length
= cg
->upvarMap
.length
;
1945 JS_ASSERT(ALE_INDEX(ale
) <= length
);
1946 if (ALE_INDEX(ale
) == length
) {
1947 length
= 2 * JS_MAX(2, length
);
1948 vector
= (uint32
*) cx
->realloc(vector
, length
* sizeof *vector
);
1951 cg
->upvarMap
.vector
= vector
;
1952 cg
->upvarMap
.length
= length
;
1955 if (localKind
!= JSLOCAL_ARG
)
1956 index
+= fun
->nargs
;
1957 JS_ASSERT(index
< JS_BIT(16));
1959 uintN skip
= cg
->staticLevel
- upvarLevel
;
1960 vector
[ALE_INDEX(ale
)] = MAKE_UPVAR_COOKIE(skip
, index
);
1963 pn
->pn_op
= JSOP_GETUPVAR
;
1964 pn
->pn_cookie
= MAKE_UPVAR_COOKIE(cg
->staticLevel
, ALE_INDEX(ale
));
1965 pn
->pn_dflags
|= PND_BOUND
;
1970 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1971 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1972 * It returns false on error, true on success.
1974 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1975 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1976 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1977 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1978 * pn->pn_op was modified, if this function finds an argument or local variable
1979 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1980 * successful return.
1982 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1983 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1987 BindNameToSlot(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
1993 JSDefinition::Kind dn_kind
;
1994 JSAtomListElement
*ale
;
1997 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
1999 /* Idempotency tests come first, since we may be called more than once. */
2000 if (pn
->pn_dflags
& PND_BOUND
)
2003 /* No cookie initialized for these two, they're pre-bound by definition. */
2004 JS_ASSERT(pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
);
2007 * The parser linked all uses (including forward references) to their
2008 * definitions, unless a with statement or direct eval intervened.
2011 JS_ASSERT(pn
->pn_cookie
== FREE_UPVAR_COOKIE
);
2013 JS_ASSERT(dn
->pn_defn
);
2014 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
2018 dn
= (JSDefinition
*) pn
;
2025 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2027 cookie
= dn
->pn_cookie
;
2028 dn_kind
= dn
->kind();
2031 * Turn attempts to mutate const-declared bindings into get ops (for
2032 * pre-increment and pre-decrement ops, our caller will have to emit
2033 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2035 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2036 * bindings visible to the compiler are permanent in JS unless the
2037 * declaration originates in eval code. We detect eval code by testing
2038 * cg->compiler->callerFrame, which is set only by eval or a debugger
2041 * Note that this callerFrame non-null test must be qualified by testing
2042 * !cg->funbox to exclude function code nested in eval code, which is not
2043 * subject to the deletable binding exception.
2050 if (dn_kind
!= JSDefinition::UNKNOWN
) {
2051 if (cg
->compiler
->callerFrame
&& !cg
->funbox
)
2052 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
2054 pn
->pn_op
= JSOP_FALSE
;
2055 pn
->pn_dflags
|= PND_BOUND
;
2061 pn
->pn_op
= op
= JSOP_NAME
;
2064 if (cookie
== FREE_UPVAR_COOKIE
) {
2065 JSStackFrame
*caller
= cg
->compiler
->callerFrame
;
2067 JS_ASSERT(cg
->flags
& TCF_COMPILE_N_GO
);
2070 * Don't generate upvars on the left side of a for loop. See
2073 if (cg
->flags
& TCF_IN_FOR_INIT
)
2076 JS_ASSERT(caller
->script
);
2081 * Make sure the variable object used by the compiler to initialize
2082 * parent links matches the caller's varobj. Compile-n-go compiler-
2083 * created function objects have the top-level cg's scopeChain set
2084 * as their parent by JSCompiler::newFunction.
2086 JSObject
*scopeobj
= (cg
->flags
& TCF_IN_FUNCTION
)
2087 ? STOBJ_GET_PARENT(FUN_OBJECT(cg
->fun
))
2089 if (scopeobj
!= caller
->varobj
)
2093 * We are compiling eval or debug script inside a function frame
2094 * and the scope chain matches the function's variable object.
2095 * Optimize access to function's arguments and variable and the
2098 if (op
!= JSOP_NAME
)
2101 return MakeUpvarForEval(pn
, cg
);
2106 if (dn
->pn_dflags
& PND_GVAR
) {
2108 * If this is a global reference from within a function, leave pn_op as
2109 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2110 * only we could depend on the global frame's slots being valid for all
2111 * calls to the function.
2113 if (cg
->flags
& TCF_IN_FUNCTION
)
2117 * We are optimizing global variables and there may be no pre-existing
2118 * global property named atom when this global script runs. If atom was
2119 * declared via const or var, optimize pn to access fp->vars using the
2120 * appropriate JSOP_*GVAR op.
2122 * FIXME: should be able to optimize global function access too.
2124 JS_ASSERT(dn_kind
== JSDefinition::VAR
|| dn_kind
== JSDefinition::CONST
);
2127 case JSOP_NAME
: op
= JSOP_GETGVAR
; break;
2128 case JSOP_SETNAME
: op
= JSOP_SETGVAR
; break;
2129 case JSOP_SETCONST
: /* NB: no change */ break;
2130 case JSOP_INCNAME
: op
= JSOP_INCGVAR
; break;
2131 case JSOP_NAMEINC
: op
= JSOP_GVARINC
; break;
2132 case JSOP_DECNAME
: op
= JSOP_DECGVAR
; break;
2133 case JSOP_NAMEDEC
: op
= JSOP_GVARDEC
; break;
2134 case JSOP_FORNAME
: /* NB: no change */ break;
2135 case JSOP_DELNAME
: /* NB: no change */ break;
2136 default: JS_NOT_REACHED("gvar");
2139 pn
->pn_cookie
= cookie
;
2140 pn
->pn_dflags
|= PND_BOUND
;
2144 uintN level
= UPVAR_FRAME_SKIP(cookie
);
2145 JS_ASSERT(cg
->staticLevel
>= level
);
2148 * A JSDefinition witnessed as a declaration by the parser cannot be an
2149 * upvar, unless it is the degenerate kind of upvar selected above (in the
2150 * code before the PND_GVAR test) for the special case of compile-and-go
2151 * code generated from eval called from a function, where the eval code
2152 * uses local vars defined in the function. We detect this upvar-for-eval
2153 * case by checking dn's op.
2155 if (PN_OP(dn
) == JSOP_GETUPVAR
) {
2156 JS_ASSERT(cg
->staticLevel
>= level
);
2157 if (op
!= JSOP_NAME
)
2161 JSStackFrame
*caller
= cg
->compiler
->callerFrame
;
2164 JS_ASSERT(caller
->script
);
2166 JSTreeContext
*tc
= cg
;
2167 while (tc
->staticLevel
!= level
)
2169 JS_ASSERT(tc
->flags
& TCF_COMPILING
);
2171 JSCodeGenerator
*evalcg
= (JSCodeGenerator
*) tc
;
2172 JS_ASSERT(evalcg
->flags
& TCF_COMPILE_N_GO
);
2173 JS_ASSERT(caller
->fun
&& caller
->varobj
== evalcg
->scopeChain
);
2176 * Don't generate upvars on the left side of a for loop. See
2177 * bug 470758 and bug 520513.
2179 if (evalcg
->flags
& TCF_IN_FOR_INIT
)
2182 if (cg
->staticLevel
== level
) {
2183 pn
->pn_op
= JSOP_GETUPVAR
;
2184 pn
->pn_cookie
= cookie
;
2185 pn
->pn_dflags
|= PND_BOUND
;
2189 return MakeUpvarForEval(pn
, cg
);
2192 uintN skip
= cg
->staticLevel
- level
;
2194 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
2195 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie
) != CALLEE_UPVAR_SLOT
,
2196 cg
->lexdeps
.lookup(atom
));
2197 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2198 JS_ASSERT(cg
->fun
->u
.i
.skipmin
<= skip
);
2201 * If op is a mutating opcode, this upvar's static level is too big to
2202 * index into the display, or the function is heavyweight, we fall back
2205 if (op
!= JSOP_NAME
)
2207 if (level
>= JS_DISPLAY_SIZE
)
2209 if (cg
->flags
& TCF_FUN_HEAVYWEIGHT
)
2212 if (FUN_FLAT_CLOSURE(cg
->fun
)) {
2216 * The function we're compiling may not be heavyweight, but if it
2217 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2218 * JSCompiler::analyzeFunctions has arranged for this function's
2219 * enclosing functions to be heavyweight, so we can safely stick
2220 * with JSOP_NAME/JSOP_CALLNAME.
2222 if (cg
->funbox
->node
->pn_dflags
& PND_FUNARG
)
2226 * Generator functions may be resumed from any call stack, which
2227 * defeats the display optimization to static link searching used
2228 * by JSOP_{GET,CALL}UPVAR.
2230 if (cg
->flags
& TCF_FUN_IS_GENERATOR
)
2236 ale
= cg
->upvarList
.lookup(atom
);
2238 index
= ALE_INDEX(ale
);
2240 if (!js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
))
2243 ale
= cg
->upvarList
.add(cg
->compiler
, atom
);
2246 index
= ALE_INDEX(ale
);
2247 JS_ASSERT(index
== cg
->upvarList
.count
- 1);
2249 uint32
*vector
= cg
->upvarMap
.vector
;
2251 uint32 length
= cg
->lexdeps
.count
;
2253 vector
= (uint32
*) js_calloc(length
* sizeof *vector
);
2255 JS_ReportOutOfMemory(cx
);
2258 cg
->upvarMap
.vector
= vector
;
2259 cg
->upvarMap
.length
= length
;
2262 uintN slot
= UPVAR_FRAME_SLOT(cookie
);
2263 if (slot
!= CALLEE_UPVAR_SLOT
&& dn_kind
!= JSDefinition::ARG
) {
2264 JSTreeContext
*tc
= cg
;
2267 } while (tc
->staticLevel
!= level
);
2268 if (tc
->flags
& TCF_IN_FUNCTION
)
2269 slot
+= tc
->fun
->nargs
;
2272 vector
[index
] = MAKE_UPVAR_COOKIE(skip
, slot
);
2276 pn
->pn_cookie
= index
;
2277 pn
->pn_dflags
|= PND_BOUND
;
2282 * We are compiling a function body and may be able to optimize name
2283 * to stack slot. Look for an argument or variable in the function and
2284 * rewrite pn_op and update pn accordingly.
2287 case JSDefinition::UNKNOWN
:
2290 case JSDefinition::LET
:
2292 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2293 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2294 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2295 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2296 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2297 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2298 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2299 default: JS_NOT_REACHED("let");
2303 case JSDefinition::ARG
:
2305 case JSOP_NAME
: op
= JSOP_GETARG
; break;
2306 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
2307 case JSOP_INCNAME
: op
= JSOP_INCARG
; break;
2308 case JSOP_NAMEINC
: op
= JSOP_ARGINC
; break;
2309 case JSOP_DECNAME
: op
= JSOP_DECARG
; break;
2310 case JSOP_NAMEDEC
: op
= JSOP_ARGDEC
; break;
2311 case JSOP_FORNAME
: op
= JSOP_FORARG
; break;
2312 default: JS_NOT_REACHED("arg");
2314 JS_ASSERT(!pn
->isConst());
2317 case JSDefinition::VAR
:
2318 if (PN_OP(dn
) == JSOP_CALLEE
) {
2319 JS_ASSERT(op
!= JSOP_CALLEE
);
2320 JS_ASSERT((cg
->fun
->flags
& JSFUN_LAMBDA
) && atom
== cg
->fun
->atom
);
2323 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2324 * cannot be sure cg->fun is not something of the form:
2326 * var ff = (function f(s) { eval(s); return f; });
2328 * where a caller invokes ff("var f = 42"). The result returned for
2329 * such an invocation must be 42, since the callee name is
2330 * lexically bound in an outer declarative environment from the
2331 * function's activation. See jsfun.cpp:call_resolve.
2333 JS_ASSERT(op
!= JSOP_DELNAME
);
2334 if (!(cg
->flags
& TCF_FUN_HEAVYWEIGHT
)) {
2336 pn
->pn_dflags
|= PND_CONST
;
2340 pn
->pn_dflags
|= PND_BOUND
;
2346 JS_ASSERT_IF(dn_kind
!= JSDefinition::FUNCTION
,
2347 dn_kind
== JSDefinition::VAR
||
2348 dn_kind
== JSDefinition::CONST
);
2350 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2351 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2352 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
2353 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2354 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2355 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2356 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2357 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2358 default: JS_NOT_REACHED("local");
2360 JS_ASSERT_IF(dn_kind
== JSDefinition::CONST
, pn
->pn_dflags
& PND_CONST
);
2364 JS_ASSERT(op
!= PN_OP(pn
));
2366 pn
->pn_cookie
= UPVAR_FRAME_SLOT(cookie
);
2367 pn
->pn_dflags
|= PND_BOUND
;
2372 * If pn contains a useful expression, return true with *answer set to true.
2373 * If pn contains a useless expression, return true with *answer set to false.
2374 * Return false on error.
2376 * The caller should initialize *answer to false and invoke this function on
2377 * an expression statement or similar subtree to decide whether the tree could
2378 * produce code that has any side effects. For an expression statement, we
2379 * define useless code as code with no side effects, because the main effect,
2380 * the value left on the stack after the code executes, will be discarded by a
2384 CheckSideEffects(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2394 switch (pn
->pn_arity
) {
2397 * A named function, contrary to ES3, is no longer useful, because we
2398 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2399 * Object instance and binding a readonly, permanent property in it
2400 * (the object and binding can be detected and hijacked or captured).
2401 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2407 if (pn
->pn_op
== JSOP_NOP
||
2408 pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2409 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2411 * Non-operators along with ||, &&, ===, and !== never invoke
2412 * toString or valueOf.
2414 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
2415 ok
&= CheckSideEffects(cx
, cg
, pn2
, answer
);
2418 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2419 * are presumed to be useful, because they may have side effects
2420 * even if their main effect (their return value) is discarded.
2422 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2423 * to avoid too much recursion. All such lists must be presumed
2424 * to be useful because each index operation could invoke a getter
2425 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2426 * does not apply here: arguments[i][j] might invoke a getter).
2428 * Likewise, array and object initialisers may call prototype
2429 * setters (the __defineSetter__ built-in, and writable __proto__
2430 * on Array.prototype create this hazard). Initialiser list nodes
2431 * have JSOP_NEWINIT in their pn_op.
2438 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid1
, answer
) &&
2439 CheckSideEffects(cx
, cg
, pn
->pn_kid2
, answer
) &&
2440 CheckSideEffects(cx
, cg
, pn
->pn_kid3
, answer
);
2444 if (pn
->pn_type
== TOK_ASSIGN
) {
2446 * Assignment is presumed to be useful, even if the next operation
2447 * is another assignment overwriting this one's ostensible effect,
2448 * because the left operand may be a property with a setter that
2451 * The only exception is assignment of a useless value to a const
2452 * declared in the function currently being compiled.
2455 if (pn2
->pn_type
!= TOK_NAME
) {
2458 if (!BindNameToSlot(cx
, cg
, pn2
))
2460 if (!CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
))
2462 if (!*answer
&& (pn
->pn_op
!= JSOP_NOP
|| !pn2
->isConst()))
2466 if (pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2467 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2469 * ||, &&, ===, and !== do not convert their operands via
2470 * toString or valueOf method calls.
2472 ok
= CheckSideEffects(cx
, cg
, pn
->pn_left
, answer
) &&
2473 CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
);
2476 * We can't easily prove that neither operand ever denotes an
2477 * object with a toString or valueOf method.
2485 switch (pn
->pn_type
) {
2488 switch (pn2
->pn_type
) {
2490 if (!BindNameToSlot(cx
, cg
, pn2
))
2492 if (pn2
->isConst()) {
2498 #if JS_HAS_XML_SUPPORT
2503 /* All these delete addressing modes have effects too. */
2507 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2513 if (pn
->pn_op
== JSOP_NOT
) {
2514 /* ! does not convert its operand via toString or valueOf. */
2515 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2522 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2523 * have direct effects. Of the remaining unary-arity node types,
2524 * we can't easily prove that the operand never denotes an object
2525 * with a toString or valueOf method.
2534 * Take care to avoid trying to bind a label name (labels, both for
2535 * statements and property values in object initialisers, have pn_op
2536 * defaulted to JSOP_NOP).
2538 if (pn
->pn_type
== TOK_NAME
&& pn
->pn_op
!= JSOP_NOP
) {
2539 if (!BindNameToSlot(cx
, cg
, pn
))
2541 if (pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
&&
2542 pn
->pn_cookie
== FREE_UPVAR_COOKIE
) {
2544 * Not an argument or local variable use, and not a use of a
2545 * unshadowed named function expression's given name, so this
2546 * expression could invoke a getter that has side effects.
2551 pn2
= pn
->maybeExpr();
2552 if (pn
->pn_type
== TOK_DOT
) {
2553 if (pn2
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn2
))
2555 if (!(pn2
->pn_op
== JSOP_ARGUMENTS
&&
2556 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
)) {
2558 * Any dotted property reference could call a getter, except
2559 * for arguments.length where arguments is unambiguous.
2564 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2568 ok
= CheckSideEffects(cx
, cg
, pn
->pn_tree
, answer
);
2572 if (pn
->pn_type
== TOK_DEBUGGER
)
2580 EmitNameOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2585 if (!BindNameToSlot(cx
, cg
, pn
))
2601 op
= JSOP_CALLLOCAL
;
2604 op
= JSOP_CALLUPVAR
;
2607 op
= JSOP_CALLDSLOT
;
2610 JS_ASSERT(op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
);
2615 if (op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
) {
2616 if (js_Emit1(cx
, cg
, op
) < 0)
2618 if (callContext
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
2621 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
2622 EMIT_UINT16_IMM_OP(op
, pn
->pn_cookie
);
2624 if (!EmitAtomOp(cx
, pn
, op
, cg
))
2632 #if JS_HAS_XML_SUPPORT
2634 EmitXMLName(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2639 JS_ASSERT(pn
->pn_type
== TOK_UNARYOP
);
2640 JS_ASSERT(pn
->pn_op
== JSOP_XMLNAME
);
2641 JS_ASSERT(op
== JSOP_XMLNAME
|| op
== JSOP_CALLXMLNAME
);
2644 oldflags
= cg
->flags
;
2645 cg
->flags
&= ~TCF_IN_FOR_INIT
;
2646 if (!js_EmitTree(cx
, cg
, pn2
))
2648 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
2649 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2650 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2654 return js_Emit1(cx
, cg
, op
) >= 0;
2659 EmitSpecialPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2662 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2663 * deoptimize away from fast paths in the interpreter and trace recorder,
2664 * which skip dense array instances by going up to Array.prototype before
2665 * looking up the property name.
2667 JSAtomListElement
*ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
2670 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
2672 if (js_Emit1(cx
, cg
, op
) < 0)
2678 EmitPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
,
2681 JSParseNode
*pn2
, *pndot
, *pnup
, *pndown
;
2684 JS_ASSERT(pn
->pn_arity
== PN_NAME
);
2685 pn2
= pn
->maybeExpr();
2687 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2688 if ((op
== JSOP_GETPROP
|| op
== JSOP_CALLPROP
) &&
2689 (pn
->pn_atom
== cx
->runtime
->atomState
.protoAtom
||
2690 pn
->pn_atom
== cx
->runtime
->atomState
.parentAtom
||
2691 pn
->pn_atom
== cx
->runtime
->atomState
.countAtom
)) {
2692 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
2694 return EmitSpecialPropOp(cx
, pn
, callContext
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
);
2698 JS_ASSERT(pn
->pn_type
== TOK_DOT
);
2699 JS_ASSERT(op
== JSOP_GETPROP
);
2701 } else if (op
== JSOP_GETPROP
&& pn
->pn_type
== TOK_DOT
) {
2702 if (pn2
->pn_op
== JSOP_THIS
) {
2703 if (pn
->pn_atom
!= cx
->runtime
->atomState
.lengthAtom
) {
2704 /* Fast path for gets of |this.foo|. */
2705 return EmitAtomOp(cx
, pn
, JSOP_GETTHISPROP
, cg
);
2707 } else if (pn2
->pn_type
== TOK_NAME
) {
2710 * - arguments.length into JSOP_ARGCNT
2711 * - argname.prop into JSOP_GETARGPROP
2712 * - localname.prop into JSOP_GETLOCALPROP
2713 * but don't do this if the property is 'length' -- prefer to emit
2714 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2716 if (!BindNameToSlot(cx
, cg
, pn2
))
2718 if (pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
2719 if (pn2
->pn_op
== JSOP_ARGUMENTS
)
2720 return js_Emit1(cx
, cg
, JSOP_ARGCNT
) >= 0;
2722 switch (pn2
->pn_op
) {
2724 op
= JSOP_GETARGPROP
;
2727 op
= JSOP_GETLOCALPROP
;
2729 JSAtomListElement
*ale
;
2732 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
2735 atomIndex
= ALE_INDEX(ale
);
2736 return EmitSlotIndexOp(cx
, op
, pn2
->pn_cookie
, atomIndex
, cg
);
2746 * If the object operand is also a dotted property reference, reverse the
2747 * list linked via pn_expr temporarily so we can iterate over it from the
2748 * bottom up (reversing again as we go), to avoid excessive recursion.
2750 if (pn2
->pn_type
== TOK_DOT
) {
2753 top
= CG_OFFSET(cg
);
2755 /* Reverse pndot->pn_expr to point up, not down. */
2756 pndot
->pn_offset
= top
;
2757 JS_ASSERT(!pndot
->pn_used
);
2758 pndown
= pndot
->pn_expr
;
2759 pndot
->pn_expr
= pnup
;
2760 if (pndown
->pn_type
!= TOK_DOT
)
2766 /* pndown is a primary expression, not a dotted property reference. */
2767 if (!js_EmitTree(cx
, cg
, pndown
))
2771 /* Walk back up the list, emitting annotated name ops. */
2772 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2773 CG_OFFSET(cg
) - pndown
->pn_offset
) < 0) {
2778 * Special case deoptimization on __proto__, __count__ and
2779 * __parent__, as above.
2781 if (pndot
->pn_arity
== PN_NAME
&&
2782 (pndot
->pn_atom
== cx
->runtime
->atomState
.protoAtom
||
2783 pndot
->pn_atom
== cx
->runtime
->atomState
.parentAtom
||
2784 pndot
->pn_atom
== cx
->runtime
->atomState
.countAtom
)) {
2785 if (!EmitSpecialPropOp(cx
, pndot
, JSOP_GETELEM
, cg
))
2787 } else if (!EmitAtomOp(cx
, pndot
, PN_OP(pndot
), cg
)) {
2791 /* Reverse the pn_expr link again. */
2792 pnup
= pndot
->pn_expr
;
2793 pndot
->pn_expr
= pndown
;
2795 } while ((pndot
= pnup
) != NULL
);
2797 if (!js_EmitTree(cx
, cg
, pn2
))
2801 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2802 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2806 return EmitAtomOp(cx
, pn
, op
, cg
);
2810 EmitElemOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2813 JSParseNode
*left
, *right
, *next
, ltmp
, rtmp
;
2816 top
= CG_OFFSET(cg
);
2817 if (pn
->pn_arity
== PN_LIST
) {
2818 /* Left-associative operator chain to avoid too much recursion. */
2819 JS_ASSERT(pn
->pn_op
== JSOP_GETELEM
);
2820 JS_ASSERT(pn
->pn_count
>= 3);
2823 next
= left
->pn_next
;
2824 JS_ASSERT(next
!= right
);
2827 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2828 * one or more index expression and JSOP_GETELEM op pairs.
2830 if (left
->pn_type
== TOK_NAME
&& next
->pn_type
== TOK_NUMBER
) {
2831 if (!BindNameToSlot(cx
, cg
, left
))
2833 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2834 JSDOUBLE_IS_INT(next
->pn_dval
, slot
) &&
2835 (jsuint
)slot
< JS_BIT(16)) {
2837 * arguments[i]() requires arguments object as "this".
2838 * Check that we never generates list for that usage.
2840 JS_ASSERT(op
!= JSOP_CALLELEM
|| next
->pn_next
);
2841 left
->pn_offset
= next
->pn_offset
= top
;
2842 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2844 next
= left
->pn_next
;
2849 * Check whether we generated JSOP_ARGSUB, just above, and have only
2850 * one more index expression to emit. Given arguments[0][j], we must
2851 * skip the while loop altogether, falling through to emit code for j
2852 * (in the subtree referenced by right), followed by the annotated op,
2853 * at the bottom of this function.
2855 JS_ASSERT(next
!= right
|| pn
->pn_count
== 3);
2856 if (left
== pn
->pn_head
) {
2857 if (!js_EmitTree(cx
, cg
, left
))
2860 while (next
!= right
) {
2861 if (!js_EmitTree(cx
, cg
, next
))
2863 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2865 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
2867 next
= next
->pn_next
;
2870 if (pn
->pn_arity
== PN_NAME
) {
2872 * Set left and right so pn appears to be a TOK_LB node, instead
2873 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2874 * EmitDestructuringOps nearer below. In the destructuring case,
2875 * the base expression (pn_expr) of the name may be null, which
2876 * means we have to emit a JSOP_BINDNAME.
2878 left
= pn
->maybeExpr();
2881 left
->pn_type
= TOK_STRING
;
2882 left
->pn_op
= JSOP_BINDNAME
;
2883 left
->pn_arity
= PN_NULLARY
;
2884 left
->pn_pos
= pn
->pn_pos
;
2885 left
->pn_atom
= pn
->pn_atom
;
2888 right
->pn_type
= TOK_STRING
;
2889 JS_ASSERT(ATOM_IS_STRING(pn
->pn_atom
));
2890 right
->pn_op
= js_IsIdentifier(ATOM_TO_STRING(pn
->pn_atom
))
2893 right
->pn_arity
= PN_NULLARY
;
2894 right
->pn_pos
= pn
->pn_pos
;
2895 right
->pn_atom
= pn
->pn_atom
;
2897 JS_ASSERT(pn
->pn_arity
== PN_BINARY
);
2899 right
= pn
->pn_right
;
2902 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2903 if (op
== JSOP_GETELEM
&&
2904 left
->pn_type
== TOK_NAME
&&
2905 right
->pn_type
== TOK_NUMBER
) {
2906 if (!BindNameToSlot(cx
, cg
, left
))
2908 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2909 JSDOUBLE_IS_INT(right
->pn_dval
, slot
) &&
2910 (jsuint
)slot
< JS_BIT(16)) {
2911 left
->pn_offset
= right
->pn_offset
= top
;
2912 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2917 if (!js_EmitTree(cx
, cg
, left
))
2921 /* The right side of the descendant operator is implicitly quoted. */
2922 JS_ASSERT(op
!= JSOP_DESCENDANTS
|| right
->pn_type
!= TOK_STRING
||
2923 right
->pn_op
== JSOP_QNAMEPART
);
2924 if (!js_EmitTree(cx
, cg
, right
))
2926 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2928 return js_Emit1(cx
, cg
, op
) >= 0;
2932 EmitNumberOp(JSContext
*cx
, jsdouble dval
, JSCodeGenerator
*cg
)
2939 JSAtomListElement
*ale
;
2941 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
2943 return js_Emit1(cx
, cg
, JSOP_ZERO
) >= 0;
2945 return js_Emit1(cx
, cg
, JSOP_ONE
) >= 0;
2946 if ((jsint
)(int8
)ival
== ival
)
2947 return js_Emit2(cx
, cg
, JSOP_INT8
, (jsbytecode
)(int8
)ival
) >= 0;
2950 if (u
< JS_BIT(16)) {
2951 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2952 } else if (u
< JS_BIT(24)) {
2953 off
= js_EmitN(cx
, cg
, JSOP_UINT24
, 3);
2956 pc
= CG_CODE(cg
, off
);
2959 off
= js_EmitN(cx
, cg
, JSOP_INT32
, 4);
2962 pc
= CG_CODE(cg
, off
);
2963 SET_INT32(pc
, ival
);
2968 atom
= js_AtomizeDouble(cx
, dval
);
2972 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
2975 return EmitIndexOp(cx
, JSOP_DOUBLE
, ALE_INDEX(ale
), cg
);
2979 EmitSwitch(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2980 JSStmtInfo
*stmtInfo
)
2983 JSBool ok
, hasDefault
, constPropagated
;
2984 ptrdiff_t top
, off
, defaultOffset
;
2985 JSParseNode
*pn2
, *pn3
, *pn4
;
2986 uint32 caseCount
, tableLength
;
2987 JSParseNode
**table
;
2992 JSAtomListElement
*ale
;
2994 size_t switchSize
, tableSize
;
2995 jsbytecode
*pc
, *savepc
;
2996 #if JS_HAS_BLOCK_SCOPE
3000 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3001 switchOp
= JSOP_TABLESWITCH
;
3003 hasDefault
= constPropagated
= JS_FALSE
;
3007 * If the switch contains let variables scoped by its body, model the
3008 * resulting block on the stack first, before emitting the discriminant's
3009 * bytecode (in case the discriminant contains a stack-model dependency
3010 * such as a let expression).
3013 #if JS_HAS_BLOCK_SCOPE
3014 if (pn2
->pn_type
== TOK_LEXICALSCOPE
) {
3016 * Push the body's block scope before discriminant code-gen for proper
3017 * static block scope linkage in case the discriminant contains a let
3018 * expression. The block's locals must lie under the discriminant on
3019 * the stack so that case-dispatch bytecodes can find the discriminant
3022 count
= OBJ_BLOCK_COUNT(cx
, pn2
->pn_objbox
->object
);
3023 js_PushBlockScope(cg
, stmtInfo
, pn2
->pn_objbox
->object
, -1);
3024 stmtInfo
->type
= STMT_SWITCH
;
3026 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3027 if (!EmitEnterBlock(cx
, pn2
, cg
))
3031 * Pop the switch's statement info around discriminant code-gen. Note
3032 * how this leaves cg->blockChain referencing the switch's
3033 * block scope object, which is necessary for correct block parenting
3034 * in the case where the discriminant contains a let expression.
3036 cg
->topStmt
= stmtInfo
->down
;
3037 cg
->topScopeStmt
= stmtInfo
->downScope
;
3047 * Emit code for the discriminant first (or nearly first, in the case of a
3048 * switch whose body is a block scope).
3050 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
3053 /* Switch bytecodes run from here till end of final case. */
3054 top
= CG_OFFSET(cg
);
3055 #if !JS_HAS_BLOCK_SCOPE
3056 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3058 if (pn2
->pn_type
== TOK_LC
) {
3059 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3061 /* Re-push the switch's statement info record. */
3062 cg
->topStmt
= cg
->topScopeStmt
= stmtInfo
;
3064 /* Set the statement info record's idea of top. */
3065 stmtInfo
->update
= top
;
3067 /* Advance pn2 to refer to the switch case list. */
3072 caseCount
= pn2
->pn_count
;
3076 if (caseCount
== 0 ||
3078 (hasDefault
= (pn2
->pn_head
->pn_type
== TOK_DEFAULT
)))) {
3083 #define INTMAP_LENGTH 256
3084 jsbitmap intmap_space
[INTMAP_LENGTH
];
3085 jsbitmap
*intmap
= NULL
;
3086 int32 intmap_bitlen
= 0;
3088 low
= JSVAL_INT_MAX
;
3089 high
= JSVAL_INT_MIN
;
3091 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3092 if (pn3
->pn_type
== TOK_DEFAULT
) {
3093 hasDefault
= JS_TRUE
;
3094 caseCount
--; /* one of the "cases" was the default */
3098 JS_ASSERT(pn3
->pn_type
== TOK_CASE
);
3099 if (switchOp
== JSOP_CONDSWITCH
)
3103 while (pn4
->pn_type
== TOK_RP
)
3105 switch (pn4
->pn_type
) {
3108 if (JSDOUBLE_IS_INT(d
, i
) && INT_FITS_IN_JSVAL(i
)) {
3109 pn3
->pn_val
= INT_TO_JSVAL(i
);
3111 atom
= js_AtomizeDouble(cx
, d
);
3116 pn3
->pn_val
= ATOM_KEY(atom
);
3120 pn3
->pn_val
= ATOM_KEY(pn4
->pn_atom
);
3123 if (!pn4
->maybeExpr()) {
3124 ok
= LookupCompileTimeConstant(cx
, cg
, pn4
->pn_atom
, &v
);
3127 if (v
!= JSVAL_HOLE
) {
3128 if (!JSVAL_IS_PRIMITIVE(v
)) {
3130 * XXX JSOP_LOOKUPSWITCH does not support const-
3131 * propagated object values, see bug 407186.
3133 switchOp
= JSOP_CONDSWITCH
;
3137 constPropagated
= JS_TRUE
;
3143 if (pn4
->pn_op
== JSOP_TRUE
) {
3144 pn3
->pn_val
= JSVAL_TRUE
;
3147 if (pn4
->pn_op
== JSOP_FALSE
) {
3148 pn3
->pn_val
= JSVAL_FALSE
;
3153 switchOp
= JSOP_CONDSWITCH
;
3157 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3
->pn_val
));
3159 if (switchOp
!= JSOP_TABLESWITCH
)
3161 if (!JSVAL_IS_INT(pn3
->pn_val
)) {
3162 switchOp
= JSOP_LOOKUPSWITCH
;
3165 i
= JSVAL_TO_INT(pn3
->pn_val
);
3166 if ((jsuint
)(i
+ (jsint
)JS_BIT(15)) >= (jsuint
)JS_BIT(16)) {
3167 switchOp
= JSOP_LOOKUPSWITCH
;
3176 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3177 * We bias i by 65536 if it's negative, and hope that's a rare
3178 * case (because it requires a malloc'd bitmap).
3182 if (i
>= intmap_bitlen
) {
3184 i
< (INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
)) {
3185 intmap
= intmap_space
;
3186 intmap_bitlen
= INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
;
3188 /* Just grab 8K for the worst-case bitmap. */
3189 intmap_bitlen
= JS_BIT(16);
3190 intmap
= (jsbitmap
*)
3191 cx
->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2
)
3192 * sizeof(jsbitmap
));
3194 JS_ReportOutOfMemory(cx
);
3198 memset(intmap
, 0, intmap_bitlen
>> JS_BITS_PER_BYTE_LOG2
);
3200 if (JS_TEST_BIT(intmap
, i
)) {
3201 switchOp
= JSOP_LOOKUPSWITCH
;
3204 JS_SET_BIT(intmap
, i
);
3208 if (intmap
&& intmap
!= intmap_space
)
3214 * Compute table length and select lookup instead if overlarge or
3215 * more than half-sparse.
3217 if (switchOp
== JSOP_TABLESWITCH
) {
3218 tableLength
= (uint32
)(high
- low
+ 1);
3219 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
3220 switchOp
= JSOP_LOOKUPSWITCH
;
3221 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3223 * Lookup switch supports only atom indexes below 64K limit.
3224 * Conservatively estimate the maximum possible index during
3225 * switch generation and use conditional switch if it exceeds
3228 if (caseCount
+ cg
->atomList
.count
> JS_BIT(16))
3229 switchOp
= JSOP_CONDSWITCH
;
3234 * Emit a note with two offsets: first tells total switch code length,
3235 * second tells offset to first JSOP_CASE if condswitch.
3237 noteIndex
= js_NewSrcNote3(cx
, cg
, SRC_SWITCH
, 0, 0);
3241 if (switchOp
== JSOP_CONDSWITCH
) {
3243 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3246 } else if (switchOp
== JSOP_TABLESWITCH
) {
3248 * 3 offsets (len, low, high) before the table, 1 per entry.
3250 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
3253 * JSOP_LOOKUPSWITCH:
3254 * 1 offset (len) and 1 atom index (npairs) before the table,
3255 * 1 atom index and 1 jump offset per entry.
3257 switchSize
= (size_t)(JUMP_OFFSET_LEN
+ INDEX_LEN
+
3258 (INDEX_LEN
+ JUMP_OFFSET_LEN
) * caseCount
);
3262 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3264 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3265 * to emit the immediate operand(s) by which bytecode readers such as
3266 * BuildSpanDepTable discover the length of the switch opcode *before*
3267 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3268 * also important to zero all unknown jump offset immediate operands,
3269 * so they can be converted to span dependencies with null targets to
3270 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3272 if (js_EmitN(cx
, cg
, switchOp
, switchSize
) < 0)
3276 if (switchOp
== JSOP_CONDSWITCH
) {
3277 intN caseNoteIndex
= -1;
3278 JSBool beforeCases
= JS_TRUE
;
3280 /* Emit code for evaluating cases and jumping to case statements. */
3281 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3283 if (pn4
&& !js_EmitTree(cx
, cg
, pn4
))
3285 if (caseNoteIndex
>= 0) {
3286 /* off is the previous JSOP_CASE's bytecode offset. */
3287 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3288 CG_OFFSET(cg
) - off
)) {
3293 JS_ASSERT(pn3
->pn_type
== TOK_DEFAULT
);
3296 caseNoteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
3297 if (caseNoteIndex
< 0)
3299 off
= EmitJump(cx
, cg
, JSOP_CASE
, 0);
3302 pn3
->pn_offset
= off
;
3304 uintN noteCount
, noteCountDelta
;
3306 /* Switch note's second offset is to first JSOP_CASE. */
3307 noteCount
= CG_NOTE_COUNT(cg
);
3308 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
3312 noteCountDelta
= CG_NOTE_COUNT(cg
) - noteCount
;
3313 if (noteCountDelta
!= 0)
3314 caseNoteIndex
+= noteCountDelta
;
3315 beforeCases
= JS_FALSE
;
3320 * If we didn't have an explicit default (which could fall in between
3321 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3322 * call in the loop above), link the last case to the implicit default
3323 * for the decompiler.
3326 caseNoteIndex
>= 0 &&
3327 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3328 CG_OFFSET(cg
) - off
)) {
3332 /* Emit default even if no explicit default statement. */
3333 defaultOffset
= EmitJump(cx
, cg
, JSOP_DEFAULT
, 0);
3334 if (defaultOffset
< 0)
3337 pc
= CG_CODE(cg
, top
+ JUMP_OFFSET_LEN
);
3339 if (switchOp
== JSOP_TABLESWITCH
) {
3340 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3341 SET_JUMP_OFFSET(pc
, low
);
3342 pc
+= JUMP_OFFSET_LEN
;
3343 SET_JUMP_OFFSET(pc
, high
);
3344 pc
+= JUMP_OFFSET_LEN
;
3347 * Use malloc to avoid arena bloat for programs with many switches.
3348 * We free table if non-null at label out, so all control flow must
3349 * exit this function through goto out or goto bad.
3351 if (tableLength
!= 0) {
3352 tableSize
= (size_t)tableLength
* sizeof *table
;
3353 table
= (JSParseNode
**) cx
->malloc(tableSize
);
3356 memset(table
, 0, tableSize
);
3357 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3358 if (pn3
->pn_type
== TOK_DEFAULT
)
3360 i
= JSVAL_TO_INT(pn3
->pn_val
);
3362 JS_ASSERT((uint32
)i
< tableLength
);
3367 JS_ASSERT(switchOp
== JSOP_LOOKUPSWITCH
);
3369 /* Fill in the number of cases. */
3370 SET_INDEX(pc
, caseCount
);
3375 * After this point, all control flow involving JSOP_TABLESWITCH
3376 * must set ok and goto out to exit this function. To keep things
3377 * simple, all switchOp cases exit that way.
3379 MUST_FLOW_THROUGH("out");
3382 * We have already generated at least one big jump so we must
3383 * explicitly add span dependencies for the switch jumps. When
3384 * called below, js_SetJumpOffset can only do it when patching
3385 * the first big jump or when cg->spanDeps is null.
3387 if (!AddSwitchSpanDeps(cx
, cg
, CG_CODE(cg
, top
)))
3391 if (constPropagated
) {
3393 * Skip switchOp, as we are not setting jump offsets in the two
3394 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3395 * unless there was an error.
3397 savepc
= CG_NEXT(cg
);
3398 CG_NEXT(cg
) = pc
+ 1;
3399 if (switchOp
== JSOP_TABLESWITCH
) {
3400 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3403 (pn4
= pn3
->pn_left
) != NULL
&&
3404 pn4
->pn_type
== TOK_NAME
) {
3405 /* Note a propagated constant with the const's name. */
3406 JS_ASSERT(!pn4
->maybeExpr());
3407 ale
= cg
->atomList
.add(cg
->compiler
, pn4
->pn_atom
);
3411 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3412 ALE_INDEX(ale
)) < 0) {
3416 pc
+= JUMP_OFFSET_LEN
;
3419 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3421 if (pn4
&& pn4
->pn_type
== TOK_NAME
) {
3422 /* Note a propagated constant with the const's name. */
3423 JS_ASSERT(!pn4
->maybeExpr());
3424 ale
= cg
->atomList
.add(cg
->compiler
, pn4
->pn_atom
);
3428 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3429 ALE_INDEX(ale
)) < 0) {
3433 pc
+= INDEX_LEN
+ JUMP_OFFSET_LEN
;
3436 CG_NEXT(cg
) = savepc
;
3440 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3441 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3442 if (switchOp
== JSOP_CONDSWITCH
&& pn3
->pn_type
!= TOK_DEFAULT
)
3443 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx
, cg
, pn3
->pn_offset
, goto bad
);
3444 pn4
= pn3
->pn_right
;
3445 ok
= js_EmitTree(cx
, cg
, pn4
);
3448 pn3
->pn_offset
= pn4
->pn_offset
;
3449 if (pn3
->pn_type
== TOK_DEFAULT
)
3450 off
= pn3
->pn_offset
- top
;
3454 /* If no default case, offset for default is to end of switch. */
3455 off
= CG_OFFSET(cg
) - top
;
3458 /* We better have set "off" by now. */
3459 JS_ASSERT(off
!= -1);
3461 /* Set the default offset (to end of switch if no default). */
3462 if (switchOp
== JSOP_CONDSWITCH
) {
3464 JS_ASSERT(defaultOffset
!= -1);
3465 ok
= js_SetJumpOffset(cx
, cg
, CG_CODE(cg
, defaultOffset
),
3466 off
- (defaultOffset
- top
));
3470 pc
= CG_CODE(cg
, top
);
3471 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3474 pc
+= JUMP_OFFSET_LEN
;
3477 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3478 off
= CG_OFFSET(cg
) - top
;
3479 ok
= js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, off
);
3483 if (switchOp
== JSOP_TABLESWITCH
) {
3484 /* Skip over the already-initialized switch bounds. */
3485 pc
+= 2 * JUMP_OFFSET_LEN
;
3487 /* Fill in the jump table, if there is one. */
3488 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3490 off
= pn3
? pn3
->pn_offset
- top
: 0;
3491 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3494 pc
+= JUMP_OFFSET_LEN
;
3496 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3497 /* Skip over the already-initialized number of cases. */
3500 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3501 if (pn3
->pn_type
== TOK_DEFAULT
)
3503 if (!js_AtomizePrimitiveValue(cx
, pn3
->pn_val
, &atom
))
3505 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
3508 SET_INDEX(pc
, ALE_INDEX(ale
));
3511 off
= pn3
->pn_offset
- top
;
3512 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3515 pc
+= JUMP_OFFSET_LEN
;
3523 ok
= js_PopStatementCG(cx
, cg
);
3525 #if JS_HAS_BLOCK_SCOPE
3526 if (ok
&& pn
->pn_right
->pn_type
== TOK_LEXICALSCOPE
)
3527 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
3538 js_EmitFunctionScript(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*body
)
3540 if (cg
->flags
& TCF_FUN_IS_GENERATOR
) {
3541 /* JSOP_GENERATOR must be the first instruction. */
3542 CG_SWITCH_TO_PROLOG(cg
);
3543 JS_ASSERT(CG_NEXT(cg
) == CG_BASE(cg
));
3544 if (js_Emit1(cx
, cg
, JSOP_GENERATOR
) < 0)
3546 CG_SWITCH_TO_MAIN(cg
);
3549 * Emit a trace hint opcode only if not in a generator, since generators
3550 * are not yet traced and both want to be the first instruction.
3552 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
3556 return js_EmitTree(cx
, cg
, body
) &&
3557 js_Emit1(cx
, cg
, JSOP_STOP
) >= 0 &&
3558 js_NewScriptFromCG(cx
, cg
);
3561 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3562 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3564 uintN line_ = (line); \
3565 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3566 if (delta_ != 0) { \
3568 * Encode any change in the current source line number by using \
3569 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3570 * whichever consumes less space. \
3572 * NB: We handle backward line number deltas (possible with for \
3573 * loops where the update part is emitted after the body, but its \
3574 * line number is <= any line number in the body) here by letting \
3575 * unsigned delta_ wrap to a very large number, which triggers a \
3578 CG_CURRENT_LINE(cg) = line_; \
3579 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3580 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3584 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3586 } while (--delta_ != 0); \
3591 /* A function, so that we avoid macro-bloating all the other callsites. */
3593 UpdateLineNumberNotes(JSContext
*cx
, JSCodeGenerator
*cg
, uintN line
)
3595 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, line
);
3600 MaybeEmitVarDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3601 JSParseNode
*pn
, jsatomid
*result
)
3604 JSAtomListElement
*ale
;
3606 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
3607 atomIndex
= (jsatomid
) UPVAR_FRAME_SLOT(pn
->pn_cookie
);
3609 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom
);
3612 atomIndex
= ALE_INDEX(ale
);
3615 if (JOF_OPTYPE(pn
->pn_op
) == JOF_ATOM
&&
3616 (!(cg
->flags
& TCF_IN_FUNCTION
) || (cg
->flags
& TCF_FUN_HEAVYWEIGHT
))) {
3617 CG_SWITCH_TO_PROLOG(cg
);
3618 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.begin
.lineno
))
3620 EMIT_INDEX_OP(prologOp
, atomIndex
);
3621 CG_SWITCH_TO_MAIN(cg
);
3625 *result
= atomIndex
;
3629 #if JS_HAS_DESTRUCTURING
3632 (*DestructuringDeclEmitter
)(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3636 EmitDestructuringDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3639 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
3640 if (!BindNameToSlot(cx
, cg
, pn
))
3643 JS_ASSERT(PN_OP(pn
) != JSOP_ARGUMENTS
&& PN_OP(pn
) != JSOP_CALLEE
);
3644 return MaybeEmitVarDecl(cx
, cg
, prologOp
, pn
, NULL
);
3648 EmitDestructuringDecls(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3651 JSParseNode
*pn2
, *pn3
;
3652 DestructuringDeclEmitter emitter
;
3654 if (pn
->pn_type
== TOK_RB
) {
3655 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3656 if (pn2
->pn_type
== TOK_COMMA
)
3658 emitter
= (pn2
->pn_type
== TOK_NAME
)
3659 ? EmitDestructuringDecl
3660 : EmitDestructuringDecls
;
3661 if (!emitter(cx
, cg
, prologOp
, pn2
))
3665 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3666 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3667 pn3
= pn2
->pn_right
;
3668 emitter
= (pn3
->pn_type
== TOK_NAME
)
3669 ? EmitDestructuringDecl
3670 : EmitDestructuringDecls
;
3671 if (!emitter(cx
, cg
, prologOp
, pn3
))
3679 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
);
3682 EmitDestructuringLHS(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3687 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3688 * destructuring initialiser-form, call ourselves to handle it, then
3689 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3690 * ending with a JSOP_ENUMELEM or equivalent op.
3692 if (pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
) {
3693 if (!EmitDestructuringOpsHelper(cx
, cg
, pn
))
3695 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3698 if (pn
->pn_type
== TOK_NAME
) {
3699 if (!BindNameToSlot(cx
, cg
, pn
))
3701 if (pn
->isConst() && !pn
->isInitialized())
3702 return js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3705 switch (pn
->pn_op
) {
3708 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3709 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3710 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3712 if (!EmitElemOp(cx
, pn
, JSOP_ENUMELEM
, cg
))
3717 if (!EmitElemOp(cx
, pn
, JSOP_ENUMCONSTELEM
, cg
))
3722 slot
= (jsuint
) pn
->pn_cookie
;
3723 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, slot
);
3728 slot
= (jsuint
) pn
->pn_cookie
;
3729 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
3730 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3738 top
= CG_OFFSET(cg
);
3739 if (!js_EmitTree(cx
, cg
, pn
))
3741 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
3743 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
3757 * Recursive helper for EmitDestructuringOps.
3759 * Given a value to destructure on the stack, walk over an object or array
3760 * initialiser at pn, emitting bytecodes to match property values and store
3761 * them in the lvalues identified by the matched property names.
3764 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3767 JSParseNode
*pn2
, *pn3
;
3771 intN stackDepth
= cg
->stackDepth
;
3772 JS_ASSERT(stackDepth
!= 0);
3773 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
3774 JS_ASSERT(pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
);
3777 if (pn
->pn_count
== 0) {
3778 /* Emit a DUP;POP sequence for the decompiler. */
3779 return js_Emit1(cx
, cg
, JSOP_DUP
) >= 0 &&
3780 js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3784 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3786 * Duplicate the value being destructured to use as a reference base.
3787 * If dup is not the first one, annotate it for the decompiler.
3789 if (pn2
!= pn
->pn_head
&& js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
3791 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
3795 * Now push the property name currently being matched, which is either
3796 * the array initialiser's current index, or the current property name
3797 * "label" on the left of a colon in the object initialiser. Set pn3
3798 * to the lvalue node, which is in the value-initializing position.
3801 if (pn
->pn_type
== TOK_RB
) {
3802 if (!EmitNumberOp(cx
, index
, cg
))
3806 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3807 JS_ASSERT(pn2
->pn_type
== TOK_COLON
);
3809 if (pn3
->pn_type
== TOK_NUMBER
) {
3811 * If we are emitting an object destructuring initialiser,
3812 * annotate the index op with SRC_INITPROP so we know we are
3813 * not decompiling an array initialiser.
3815 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
3817 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
3820 JS_ASSERT(pn3
->pn_type
== TOK_STRING
||
3821 pn3
->pn_type
== TOK_NAME
);
3822 if (!EmitAtomOp(cx
, pn3
, JSOP_GETPROP
, cg
))
3824 doElemOp
= JS_FALSE
;
3826 pn3
= pn2
->pn_right
;
3831 * Ok, get the value of the matching property name. This leaves
3832 * that value on top of the value being destructured, so the stack
3833 * is one deeper than when we started.
3835 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
3837 JS_ASSERT(cg
->stackDepth
== stackDepth
+ 1);
3840 /* Nullary comma node makes a hole in the array destructurer. */
3841 if (pn3
->pn_type
== TOK_COMMA
&& pn3
->pn_arity
== PN_NULLARY
) {
3842 JS_ASSERT(pn
->pn_type
== TOK_RB
);
3843 JS_ASSERT(pn2
== pn3
);
3844 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3847 if (!EmitDestructuringLHS(cx
, cg
, pn3
))
3851 JS_ASSERT(cg
->stackDepth
== stackDepth
);
3859 OpToDeclType(JSOp op
)
3863 return SRC_DECL_LET
;
3865 return SRC_DECL_CONST
;
3867 return SRC_DECL_VAR
;
3869 return SRC_DECL_NONE
;
3874 EmitDestructuringOps(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3878 * If we're called from a variable declaration, help the decompiler by
3879 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3880 * If the destructuring initialiser is empty, our helper will emit a
3881 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3883 if (js_NewSrcNote2(cx
, cg
, SRC_DESTRUCT
, OpToDeclType(prologOp
)) < 0)
3887 * Call our recursive helper to emit the destructuring assignments and
3888 * related stack manipulations.
3890 return EmitDestructuringOpsHelper(cx
, cg
, pn
);
3894 EmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3895 JSParseNode
*lhs
, JSParseNode
*rhs
)
3897 jsuint depth
, limit
, i
, nslots
;
3900 depth
= limit
= (uintN
) cg
->stackDepth
;
3901 for (pn
= rhs
->pn_head
; pn
; pn
= pn
->pn_next
) {
3902 if (limit
== JS_BIT(16)) {
3903 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), rhs
, JSREPORT_ERROR
,
3904 JSMSG_ARRAY_INIT_TOO_BIG
);
3908 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3909 JS_ASSERT(!(pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
));
3910 if (!js_EmitTree(cx
, cg
, pn
))
3915 if (js_NewSrcNote2(cx
, cg
, SRC_GROUPASSIGN
, OpToDeclType(prologOp
)) < 0)
3919 for (pn
= lhs
->pn_head
; pn
; pn
= pn
->pn_next
, ++i
) {
3920 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3921 JS_ASSERT(i
< limit
);
3922 jsint slot
= AdjustBlockSlot(cx
, cg
, i
);
3925 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL
, slot
);
3927 if (pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
) {
3928 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3931 if (!EmitDestructuringLHS(cx
, cg
, pn
))
3936 nslots
= limit
- depth
;
3937 EMIT_UINT16_IMM_OP(JSOP_POPN
, nslots
);
3938 cg
->stackDepth
= (uintN
) depth
;
3943 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3944 * can emit a group assignment sequence, which results in 0 stack depth delta,
3945 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3948 MaybeEmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3949 JSParseNode
*pn
, JSOp
*pop
)
3951 JSParseNode
*lhs
, *rhs
;
3953 JS_ASSERT(pn
->pn_type
== TOK_ASSIGN
);
3954 JS_ASSERT(*pop
== JSOP_POP
|| *pop
== JSOP_POPV
);
3957 if (lhs
->pn_type
== TOK_RB
&& rhs
->pn_type
== TOK_RB
&&
3958 !(rhs
->pn_xflags
& PNX_HOLEY
) &&
3959 lhs
->pn_count
<= rhs
->pn_count
) {
3960 if (!EmitGroupAssignment(cx
, cg
, prologOp
, lhs
, rhs
))
3967 #endif /* JS_HAS_DESTRUCTURING */
3970 EmitVariables(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
3971 JSBool inLetHead
, ptrdiff_t *headNoteIndex
)
3973 bool let
, forInVar
, first
;
3974 #if JS_HAS_BLOCK_SCOPE
3975 bool forInLet
, popScope
;
3976 JSStmtInfo
*stmt
, *scopeStmt
;
3978 ptrdiff_t off
, noteIndex
, tmp
;
3979 JSParseNode
*pn2
, *pn3
, *next
;
3984 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3985 *headNoteIndex
= -1;
3988 * Let blocks and expressions have a parenthesized head in which the new
3989 * scope is not yet open. Initializer evaluation uses the parent node's
3990 * lexical scope. If popScope is true below, then we hide the top lexical
3991 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3992 * it won't find any names in the new let block.
3994 * The same goes for let declarations in the head of any kind of for loop.
3995 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3996 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3997 * in the containing scope, and puts x in the loop body's scope.
3999 let
= (pn
->pn_op
== JSOP_NOP
);
4000 forInVar
= (pn
->pn_xflags
& PNX_FORINVAR
) != 0;
4001 #if JS_HAS_BLOCK_SCOPE
4002 forInLet
= let
&& forInVar
;
4003 popScope
= (inLetHead
|| (let
&& (cg
->flags
& TCF_IN_FOR_INIT
)));
4006 scopeStmt
= cg
->topScopeStmt
;
4009 else stmt
= scopeStmt
= NULL
; /* quell GCC overwarning */
4011 JS_ASSERT(!popScope
|| let
);
4014 off
= noteIndex
= -1;
4015 for (pn2
= pn
->pn_head
; ; pn2
= next
) {
4016 first
= pn2
== pn
->pn_head
;
4017 next
= pn2
->pn_next
;
4019 if (pn2
->pn_type
!= TOK_NAME
) {
4020 #if JS_HAS_DESTRUCTURING
4021 if (pn2
->pn_type
== TOK_RB
|| pn2
->pn_type
== TOK_RC
) {
4023 * Emit variable binding ops, but not destructuring ops.
4024 * The parser (see Variables, jsparse.c) has ensured that
4025 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4026 * and that case will emit the destructuring code only after
4027 * emitting an enumerating opcode and a branch that tests
4028 * whether the enumeration ended.
4030 JS_ASSERT(forInVar
);
4031 JS_ASSERT(pn
->pn_count
== 1);
4032 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn2
))
4039 * A destructuring initialiser assignment preceded by var will
4040 * never occur to the left of 'in' in a for-in loop. As with 'for
4041 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4042 * i' to be hoisted out of the loop.
4044 JS_ASSERT(pn2
->pn_type
== TOK_ASSIGN
);
4045 JS_ASSERT(!forInVar
);
4048 * To allow the front end to rewrite var f = x; as f = x; when a
4049 * function f(){} precedes the var, detect simple name assignment
4050 * here and initialize the name.
4052 #if !JS_HAS_DESTRUCTURING
4053 JS_ASSERT(pn2
->pn_left
->pn_type
== TOK_NAME
);
4055 if (pn2
->pn_left
->pn_type
== TOK_NAME
)
4058 pn3
= pn2
->pn_right
;
4063 #if JS_HAS_DESTRUCTURING
4064 if (pn
->pn_count
== 1) {
4066 * If this is the only destructuring assignment in the list,
4067 * try to optimize to a group assignment. If we're in a let
4068 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4069 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4071 JS_ASSERT(noteIndex
< 0 && !pn2
->pn_next
);
4073 if (!MaybeEmitGroupAssignment(cx
, cg
,
4074 inLetHead
? JSOP_POP
: PN_OP(pn
),
4078 if (op
== JSOP_NOP
) {
4079 pn
->pn_xflags
= (pn
->pn_xflags
& ~PNX_POPVAR
) | PNX_GROUPINIT
;
4085 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn3
))
4088 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4092 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4093 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4094 * we will emit at the bottom of this function.
4096 if (!EmitDestructuringOps(cx
, cg
,
4097 inLetHead
? JSOP_POP
: PN_OP(pn
),
4106 * Load initializer early to share code above that jumps to do_name.
4107 * NB: if this var redeclares an existing binding, then pn2 is linked
4108 * on its definition's use-chain and pn_expr has been overlayed with
4111 pn3
= pn2
->maybeExpr();
4114 if (!BindNameToSlot(cx
, cg
, pn2
))
4118 if (op
== JSOP_ARGUMENTS
) {
4119 /* JSOP_ARGUMENTS => no initializer */
4120 JS_ASSERT(!pn3
&& !let
);
4123 atomIndex
= 0; /* quell GCC overwarning */
4126 JS_ASSERT(op
!= JSOP_CALLEE
);
4127 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
|| !let
);
4128 if (!MaybeEmitVarDecl(cx
, cg
, PN_OP(pn
), pn2
, &atomIndex
))
4132 JS_ASSERT(!forInVar
);
4133 if (op
== JSOP_SETNAME
) {
4135 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
4137 if (pn
->pn_op
== JSOP_DEFCONST
&&
4138 !js_DefineCompileTimeConstant(cx
, cg
, pn2
->pn_atom
, pn3
)) {
4142 #if JS_HAS_BLOCK_SCOPE
4143 /* Evaluate expr in the outer lexical scope if requested. */
4145 cg
->topStmt
= stmt
->down
;
4146 cg
->topScopeStmt
= scopeStmt
->downScope
;
4150 oldflags
= cg
->flags
;
4151 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4152 if (!js_EmitTree(cx
, cg
, pn3
))
4154 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
4156 #if JS_HAS_BLOCK_SCOPE
4159 cg
->topScopeStmt
= scopeStmt
;
4166 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4167 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4168 * a TOK_SEQ node to make the two statements appear as one. Therefore
4169 * if this declaration is part of a for-in loop head, we do not need to
4170 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4171 * js_EmitTree, will annotate appropriately.
4173 JS_ASSERT_IF(pn2
->pn_defn
, pn3
== pn2
->pn_expr
);
4175 JS_ASSERT(pn
->pn_count
== 1);
4182 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4183 (pn
->pn_op
== JSOP_DEFCONST
)
4185 : (pn
->pn_op
== JSOP_DEFVAR
)
4187 : SRC_DECL_LET
) < 0) {
4190 if (op
== JSOP_ARGUMENTS
) {
4191 if (js_Emit1(cx
, cg
, op
) < 0)
4193 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4194 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4196 EMIT_INDEX_OP(op
, atomIndex
);
4199 #if JS_HAS_DESTRUCTURING
4202 tmp
= CG_OFFSET(cg
);
4203 if (noteIndex
>= 0) {
4204 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
4210 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
4211 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4215 /* If this is a let head, emit and return a srcnote on the pop. */
4217 *headNoteIndex
= js_NewSrcNote(cx
, cg
, SRC_DECL
);
4218 if (*headNoteIndex
< 0)
4220 if (!(pn
->pn_xflags
& PNX_POPVAR
))
4221 return js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4224 return !(pn
->pn_xflags
& PNX_POPVAR
) || js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
4227 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4229 GettableNoteForNextOp(JSCodeGenerator
*cg
)
4231 ptrdiff_t offset
, target
;
4232 jssrcnote
*sn
, *end
;
4235 target
= CG_OFFSET(cg
);
4236 for (sn
= CG_NOTES(cg
), end
= sn
+ CG_NOTE_COUNT(cg
); sn
< end
;
4238 if (offset
== target
&& SN_IS_GETTABLE(sn
))
4240 offset
+= SN_DELTA(sn
);
4246 /* Top-level named functions need a nop for decompilation. */
4248 EmitFunctionDefNop(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
4250 return js_NewSrcNote2(cx
, cg
, SRC_FUNCDEF
, (ptrdiff_t)index
) >= 0 &&
4251 js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4255 EmitNewInit(JSContext
*cx
, JSCodeGenerator
*cg
, JSProtoKey key
, JSParseNode
*pn
, int sharpnum
)
4257 if (js_Emit2(cx
, cg
, JSOP_NEWINIT
, (jsbytecode
) key
) < 0)
4259 #if JS_HAS_SHARP_VARS
4260 if (cg
->hasSharps()) {
4261 if (pn
->pn_count
!= 0)
4262 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4264 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, sharpnum
);
4266 JS_ASSERT(sharpnum
< 0);
4273 EmitEndInit(JSContext
*cx
, JSCodeGenerator
*cg
, uint32 count
)
4275 #if JS_HAS_SHARP_VARS
4276 /* Emit an op for sharp array cleanup and decompilation. */
4277 if (cg
->hasSharps() && count
!= 0)
4278 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4280 return js_Emit1(cx
, cg
, JSOP_ENDINIT
) >= 0;
4283 /* See the SRC_FOR source note offsetBias comments later in this file. */
4284 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
4285 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4288 js_EmitTree(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
4290 JSBool ok
, useful
, wantval
;
4291 JSStmtInfo
*stmt
, stmtInfo
;
4292 ptrdiff_t top
, off
, tmp
, beq
, jmp
;
4293 JSParseNode
*pn2
, *pn3
;
4295 JSAtomListElement
*ale
;
4298 ptrdiff_t noteIndex
;
4299 JSSrcNoteType noteType
;
4304 #if JS_HAS_SHARP_VARS
4308 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
4312 pn
->pn_offset
= top
= CG_OFFSET(cg
);
4314 /* Emit notes to tell the current bytecode's source line number. */
4315 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
->pn_pos
.begin
.lineno
);
4317 switch (pn
->pn_type
) {
4323 #if JS_HAS_XML_SUPPORT
4324 if (pn
->pn_arity
== PN_NULLARY
) {
4325 if (js_Emit1(cx
, cg
, JSOP_GETFUNNS
) < 0)
4331 fun
= (JSFunction
*) pn
->pn_funbox
->object
;
4332 JS_ASSERT(FUN_INTERPRETED(fun
));
4333 if (fun
->u
.i
.script
) {
4335 * This second pass is needed to emit JSOP_NOP with a source note
4336 * for the already-emitted function definition prolog opcode. See
4337 * comments in the TOK_LC case.
4339 JS_ASSERT(pn
->pn_op
== JSOP_NOP
);
4340 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
4341 if (!EmitFunctionDefNop(cx
, cg
, pn
->pn_index
))
4346 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
4348 (!pn
->pn_used
&& !pn
->isTopLevel()) ||
4349 (fun
->flags
& JSFUN_LAMBDA
));
4351 JS_ASSERT_IF(pn
->pn_funbox
->tcflags
& TCF_FUN_HEAVYWEIGHT
,
4352 FUN_KIND(fun
) == JSFUN_INTERPRETED
);
4354 /* Generate code for the function's body. */
4355 void *cg2mark
= JS_ARENA_MARK(cg
->codePool
);
4357 JS_ARENA_ALLOCATE_TYPE(cg2space
, JSCodeGenerator
, cg
->codePool
);
4359 js_ReportOutOfScriptQuota(cx
);
4362 JSCodeGenerator
*cg2
=
4363 new (cg2space
) JSCodeGenerator(cg
->compiler
,
4364 cg
->codePool
, cg
->notePool
,
4365 pn
->pn_pos
.begin
.lineno
);
4366 cg2
->flags
= pn
->pn_funbox
->tcflags
| TCF_IN_FUNCTION
;
4367 #if JS_HAS_SHARP_VARS
4368 if (cg2
->flags
& TCF_HAS_SHARPS
) {
4369 cg2
->sharpSlotBase
= fun
->sharpSlotBase(cx
);
4370 if (cg2
->sharpSlotBase
< 0)
4375 cg2
->funbox
= pn
->pn_funbox
;
4379 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4380 * bits and to reserve the all-ones value, thereby reserving the magic
4381 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4383 JS_ASSERT(cg
->staticLevel
< JS_BITMASK(16) - 1);
4384 cg2
->staticLevel
= cg
->staticLevel
+ 1;
4386 /* We measured the max scope depth when we parsed the function. */
4387 JS_SCOPE_DEPTH_METERING(cg2
->maxScopeDepth
= uint16(-1));
4388 if (!js_EmitFunctionScript(cx
, cg2
, pn
->pn_body
))
4391 cg2
->~JSCodeGenerator();
4392 JS_ARENA_RELEASE(cg
->codePool
, cg2mark
);
4397 /* Make the function object a literal in the outer script's pool. */
4398 index
= cg
->objectList
.index(pn
->pn_funbox
);
4400 /* Emit a bytecode pointing to the closure object in its immediate. */
4402 if (op
!= JSOP_NOP
) {
4403 if ((pn
->pn_funbox
->tcflags
& TCF_GENEXP_LAMBDA
) &&
4404 js_NewSrcNote(cx
, cg
, SRC_GENEXP
) < 0) {
4407 EMIT_INDEX_OP(op
, index
);
4412 * For a script we emit the code as we parse. Thus the bytecode for
4413 * top-level functions should go in the prolog to predefine their
4414 * names in the variable object before the already-generated main code
4415 * is executed. This extra work for top-level scripts is not necessary
4416 * when we emit the code for a function. It is fully parsed prior to
4417 * invocation of the emitter and calls to js_EmitTree for function
4418 * definitions can be scheduled before generating the rest of code.
4420 if (!(cg
->flags
& TCF_IN_FUNCTION
)) {
4421 JS_ASSERT(!cg
->topStmt
);
4422 CG_SWITCH_TO_PROLOG(cg
);
4423 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFFUN_FC
: JSOP_DEFFUN
;
4424 EMIT_INDEX_OP(op
, index
);
4425 CG_SWITCH_TO_MAIN(cg
);
4427 /* Emit NOP for the decompiler. */
4428 if (!EmitFunctionDefNop(cx
, cg
, index
))
4432 JSLocalKind localKind
=
4434 js_LookupLocal(cx
, cg
->fun
, fun
->atom
, &slot
);
4435 JS_ASSERT(localKind
== JSLOCAL_VAR
|| localKind
== JSLOCAL_CONST
);
4436 JS_ASSERT(index
< JS_BIT(20));
4437 pn
->pn_index
= index
;
4438 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFLOCALFUN_FC
: JSOP_DEFLOCALFUN
;
4439 if (!EmitSlotIndexOp(cx
, op
, slot
, index
, cg
))
4446 ok
= js_EmitTree(cx
, cg
, pn
->last());
4450 JS_ASSERT(cg
->lexdeps
.count
== 0);
4451 JS_ASSERT(pn
->pn_names
.count
!= 0);
4452 cg
->lexdeps
= pn
->pn_names
;
4453 ok
= js_EmitTree(cx
, cg
, pn
->pn_tree
);
4457 /* Initialize so we can detect else-if chains and avoid recursion. */
4458 stmtInfo
.type
= STMT_IF
;
4463 /* Emit code for the condition before pushing stmtInfo. */
4464 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4466 top
= CG_OFFSET(cg
);
4467 if (stmtInfo
.type
== STMT_IF
) {
4468 js_PushStatement(cg
, &stmtInfo
, STMT_IF
, top
);
4471 * We came here from the goto further below that detects else-if
4472 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4473 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4474 * to help the decompiler. Actually, we need two offsets, one for
4475 * decompiling any else clause and the second for decompiling an
4476 * else-if chain without bracing, overindenting, or incorrectly
4477 * scoping let declarations.
4479 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4480 stmtInfo
.type
= STMT_IF
;
4481 stmtInfo
.update
= top
;
4482 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4484 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 1, top
- jmp
))
4488 /* Emit an annotated branch-if-false around the then part. */
4490 noteIndex
= js_NewSrcNote(cx
, cg
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4493 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4497 /* Emit code for the then and optional else parts. */
4498 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4501 /* Modify stmtInfo so we know we're in the else part. */
4502 stmtInfo
.type
= STMT_ELSE
;
4505 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4506 * around the else part. The js_PopStatementCG call at the bottom
4507 * of this switch case will fix up the backpatch chain linked from
4510 jmp
= EmitGoto(cx
, cg
, &stmtInfo
, &stmtInfo
.breaks
, NULL
, SRC_NULL
);
4514 /* Ensure the branch-if-false comes here, then emit the else. */
4515 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4516 if (pn3
->pn_type
== TOK_IF
) {
4521 if (!js_EmitTree(cx
, cg
, pn3
))
4525 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4526 * the decompiler's benefit. We can't just "back up" from the pc
4527 * of the else clause, because we don't know whether an extended
4528 * jump was required to leap from the end of the then clause over
4531 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4534 /* No else part, fixup the branch-if-false to come here. */
4535 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4537 ok
= js_PopStatementCG(cx
, cg
);
4541 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4542 ok
= EmitSwitch(cx
, cg
, pn
, &stmtInfo
);
4547 * Minimize bytecodes issued for one or more iterations by jumping to
4548 * the condition below the body and closing the loop if the condition
4549 * is true with a backward branch. For iteration count i:
4551 * i test at the top test at the bottom
4552 * = =============== ==================
4553 * 0 ifeq-pass goto; ifne-fail
4554 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4555 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4557 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4559 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4560 * test at the top. When JSParseNode trees were added during the ES3
4561 * work (1998-9), the code generation scheme was not optimized, and
4562 * the decompiler continued to take advantage of the branch and jump
4563 * that bracketed the body. But given the SRC_WHILE note, it is easy
4564 * to support the more efficient scheme.
4566 js_PushStatement(cg
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
4567 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4570 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4573 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4576 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4578 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4579 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4581 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4584 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, beq
- jmp
))
4586 ok
= js_PopStatementCG(cx
, cg
);
4590 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4591 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4592 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
4595 /* Compile the loop body. */
4596 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4599 js_PushStatement(cg
, &stmtInfo
, STMT_DO_LOOP
, top
);
4600 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4603 /* Set loop and enclosing label update offsets, for continue. */
4606 stmt
->update
= CG_OFFSET(cg
);
4607 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4609 /* Compile the loop condition, now that continues know where to go. */
4610 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4614 * Since we use JSOP_IFNE for other purposes as well as for do-while
4615 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4616 * and the decompiler must get that delta and decompile recursively.
4618 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4621 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, 1 + (beq
- top
)))
4623 ok
= js_PopStatementCG(cx
, cg
);
4627 beq
= 0; /* suppress gcc warnings */
4630 js_PushStatement(cg
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4632 if (pn2
->pn_type
== TOK_IN
) {
4633 /* Set stmtInfo type for later testing. */
4634 stmtInfo
.type
= STMT_FOR_IN_LOOP
;
4637 * If the left part is 'var x', emit code to define x if necessary
4638 * using a prolog opcode, but do not emit a pop. If the left part
4639 * is 'var x = i', emit prolog code to define x if necessary; then
4640 * emit code to evaluate i, assign the result to x, and pop the
4641 * result off the stack.
4643 * All the logic to do this is implemented in the outer switch's
4644 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4646 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4647 * called here will generate the proper note for the assignment
4648 * op that sets x = i, hoisting the initialized var declaration
4649 * out of the loop: 'var x = i; for (x in o) ...'.
4651 * In the 'for (var x in o) ...' case, nothing but the prolog op
4652 * (if needed) should be generated here, we must emit the note
4653 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4654 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4656 * A 'for (let x = i in o)' loop must not be hoisted, since in
4657 * this form the let variable is scoped by the loop body (but not
4658 * the head). The initializer expression i must be evaluated for
4659 * any side effects. So we hoist only i in the let case.
4662 type
= PN_TYPE(pn3
);
4663 cg
->flags
|= TCF_IN_FOR_INIT
;
4664 if (TOKEN_TYPE_IS_DECL(type
) && !js_EmitTree(cx
, cg
, pn3
))
4666 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4668 /* Compile the object expression to the right of 'in'. */
4669 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4673 * Emit a bytecode to convert top of stack value to the iterator
4674 * object depending on the loop variant (for-in, for-each-in, or
4675 * destructuring for-in).
4677 JS_ASSERT(pn
->pn_op
== JSOP_ITER
);
4678 if (js_Emit2(cx
, cg
, JSOP_ITER
, (uint8
) pn
->pn_iflags
) < 0)
4681 /* Annotate so the decompiler can find the loop-closing jump. */
4682 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR_IN
);
4687 * Jump down to the loop condition to minimize overhead assuming at
4688 * least one iteration, as the other loop forms do.
4690 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4694 top
= CG_OFFSET(cg
);
4695 SET_STATEMENT_TOP(&stmtInfo
, top
);
4696 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4700 intN loopDepth
= cg
->stackDepth
;
4704 * Compile a JSOP_FOR* bytecode based on the left hand side.
4706 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4707 * or similar, to signify assignment, rather than declaration, to
4708 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4709 * parameter and emits the appropriate source note, defaulting to
4710 * assignment, so JSOP_SETNAME is not critical here; many similar
4711 * ops could be used -- just not JSOP_NOP (which means 'let').
4715 #if JS_HAS_BLOCK_SCOPE
4719 JS_ASSERT(pn3
->pn_arity
== PN_LIST
&& pn3
->pn_count
== 1);
4721 #if JS_HAS_DESTRUCTURING
4722 if (pn3
->pn_type
== TOK_ASSIGN
) {
4724 JS_ASSERT(pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
);
4726 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4727 op
= PN_OP(pn2
->pn_left
);
4728 goto destructuring_for
;
4731 JS_ASSERT(pn3
->pn_type
== TOK_NAME
);
4737 * Always annotate JSOP_FORLOCAL if given input of the form
4738 * 'for (let x in * o)' -- the decompiler must not hoist the
4739 * 'let x' out of the loop head, or x will be bound in the
4740 * wrong scope. Likewise, but in this case only for the sake
4741 * of higher decompilation fidelity only, do not hoist 'var x'
4742 * when given 'for (var x in o)'.
4745 #if JS_HAS_BLOCK_SCOPE
4748 (type
== TOK_VAR
&& !pn3
->maybeExpr())) &&
4749 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4752 : SRC_DECL_LET
) < 0) {
4755 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4758 case JSOP_GETARG
: /* FALL THROUGH */
4759 case JSOP_SETARG
: op
= JSOP_FORARG
; break;
4760 case JSOP_GETGVAR
: /* FALL THROUGH */
4761 case JSOP_SETGVAR
: op
= JSOP_FORNAME
; break;
4762 case JSOP_GETLOCAL
: /* FALL THROUGH */
4763 case JSOP_SETLOCAL
: op
= JSOP_FORLOCAL
; break;
4764 default: JS_ASSERT(0);
4767 pn3
->pn_op
= JSOP_FORNAME
;
4768 if (!BindNameToSlot(cx
, cg
, pn3
))
4772 if (pn3
->isConst()) {
4773 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn3
, JSREPORT_ERROR
,
4774 JSMSG_BAD_FOR_LEFTSIDE
);
4777 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4778 atomIndex
= (jsatomid
) pn3
->pn_cookie
;
4779 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4781 if (!EmitAtomOp(cx
, pn3
, op
, cg
))
4788 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4789 * has no side effects.
4792 if (!CheckSideEffects(cx
, cg
, pn3
->expr(), &useful
))
4795 if (!EmitPropOp(cx
, pn3
, JSOP_FORPROP
, cg
, JS_FALSE
))
4801 #if JS_HAS_DESTRUCTURING
4805 if (js_Emit1(cx
, cg
, JSOP_FORELEM
) < 0)
4807 JS_ASSERT(cg
->stackDepth
>= 3);
4809 #if JS_HAS_DESTRUCTURING
4810 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4811 if (!EmitDestructuringOps(cx
, cg
, op
, pn3
))
4813 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4817 if (pn3
->pn_type
== TOK_LP
) {
4818 JS_ASSERT(pn3
->pn_op
== JSOP_SETCALL
);
4819 if (!js_EmitTree(cx
, cg
, pn3
))
4821 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4824 #if JS_HAS_XML_SUPPORT
4825 if (pn3
->pn_type
== TOK_UNARYOP
) {
4826 JS_ASSERT(pn3
->pn_op
== JSOP_BINDXMLNAME
);
4827 if (!js_EmitTree(cx
, cg
, pn3
))
4829 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4833 if (!EmitElemOp(cx
, pn3
, JSOP_ENUMELEM
, cg
))
4838 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4839 JS_ASSERT(cg
->stackDepth
== loopDepth
);
4841 /* Set the first srcnote offset so we can find the start of the loop body. */
4842 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, CG_OFFSET(cg
) - jmp
))
4845 /* Emit code for the loop body. */
4846 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4849 /* Set loop and enclosing "update" offsets, for continue. */
4852 stmt
->update
= CG_OFFSET(cg
);
4853 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4856 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4858 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4859 if (js_Emit1(cx
, cg
, JSOP_NEXTITER
) < 0)
4861 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4865 /* Set the second srcnote offset so we can find the closing jump. */
4866 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1, beq
- jmp
))
4869 /* C-style for (init; cond; update) ... loop. */
4873 /* No initializer: emit an annotated nop for the decompiler. */
4876 cg
->flags
|= TCF_IN_FOR_INIT
;
4877 #if JS_HAS_DESTRUCTURING
4878 if (pn3
->pn_type
== TOK_ASSIGN
&&
4879 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4883 if (op
== JSOP_POP
) {
4884 if (!js_EmitTree(cx
, cg
, pn3
))
4886 if (TOKEN_TYPE_IS_DECL(pn3
->pn_type
)) {
4888 * Check whether a destructuring-initialized var decl
4889 * was optimized to a group assignment. If so, we do
4890 * not need to emit a pop below, so switch to a nop,
4891 * just for the decompiler.
4893 JS_ASSERT(pn3
->pn_arity
== PN_LIST
);
4894 if (pn3
->pn_xflags
& PNX_GROUPINIT
)
4898 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4902 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4903 * Use tmp to hold the biased srcnote "top" offset, which differs
4904 * from the top local variable by the length of the JSOP_GOTO{,X}
4905 * emitted in between tmp and top if this loop has a condition.
4907 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR
);
4908 if (noteIndex
< 0 || js_Emit1(cx
, cg
, op
) < 0)
4910 tmp
= CG_OFFSET(cg
);
4913 /* Goto the loop condition, which branches back to iterate. */
4914 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4919 top
= CG_OFFSET(cg
);
4920 SET_STATEMENT_TOP(&stmtInfo
, top
);
4922 /* Emit code for the loop body. */
4923 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4925 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4928 /* Set the second note offset so we can find the update part. */
4929 JS_ASSERT(noteIndex
!= -1);
4930 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
4931 CG_OFFSET(cg
) - tmp
)) {
4935 /* Set loop and enclosing "update" offsets, for continue. */
4938 stmt
->update
= CG_OFFSET(cg
);
4939 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4941 /* Check for update code to do before the condition (if any). */
4945 #if JS_HAS_DESTRUCTURING
4946 if (pn3
->pn_type
== TOK_ASSIGN
&&
4947 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4951 if (op
== JSOP_POP
&& !js_EmitTree(cx
, cg
, pn3
))
4954 /* Always emit the POP or NOP, to help the decompiler. */
4955 if (js_Emit1(cx
, cg
, op
) < 0)
4958 /* Restore the absolute line number for source note readers. */
4959 off
= (ptrdiff_t) pn
->pn_pos
.end
.lineno
;
4960 if (CG_CURRENT_LINE(cg
) != (uintN
) off
) {
4961 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, off
) < 0)
4963 CG_CURRENT_LINE(cg
) = (uintN
) off
;
4967 /* Set the first note offset so we can find the loop condition. */
4968 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
4969 CG_OFFSET(cg
) - tmp
)) {
4974 /* Fix up the goto from top to target the loop condition. */
4975 JS_ASSERT(jmp
>= 0);
4976 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4978 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid2
))
4982 /* The third note offset helps us find the loop-closing jump. */
4983 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 2,
4984 CG_OFFSET(cg
) - tmp
)) {
4989 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4993 /* No loop condition -- emit the loop-closing jump. */
4994 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, top
- CG_OFFSET(cg
));
5000 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5001 if (!js_PopStatementCG(cx
, cg
))
5004 if (pn2
->pn_type
== TOK_IN
) {
5006 * JSOP_ENDITER must have a slot to save an exception thrown from
5007 * the body of for-in loop when closing the iterator object, and
5008 * fortunately it does: the slot that was set by JSOP_NEXTITER to
5009 * the return value of iterator.next().
5011 JS_ASSERT(js_CodeSpec
[JSOP_ENDITER
].nuses
== 2);
5012 if (!NewTryNote(cx
, cg
, JSTRY_ITER
, cg
->stackDepth
, top
, CG_OFFSET(cg
)) ||
5013 js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0) {
5023 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
5026 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
)
5028 noteType
= SRC_BREAK2LABEL
;
5031 while (!STMT_IS_LOOP(stmt
) && stmt
->type
!= STMT_SWITCH
)
5033 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_NULL
: SRC_BREAK
;
5036 if (EmitGoto(cx
, cg
, stmt
, &stmt
->breaks
, ale
, noteType
) < 0)
5044 /* Find the loop statement enclosed by the matching label. */
5045 JSStmtInfo
*loop
= NULL
;
5046 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
5049 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
) {
5050 if (STMT_IS_LOOP(stmt
))
5055 noteType
= SRC_CONT2LABEL
;
5058 while (!STMT_IS_LOOP(stmt
))
5060 noteType
= SRC_CONTINUE
;
5063 if (EmitGoto(cx
, cg
, stmt
, &stmt
->continues
, ale
, noteType
) < 0)
5068 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5070 js_PushStatement(cg
, &stmtInfo
, STMT_WITH
, CG_OFFSET(cg
));
5071 if (js_Emit1(cx
, cg
, JSOP_ENTERWITH
) < 0)
5073 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5075 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
5077 ok
= js_PopStatementCG(cx
, cg
);
5082 ptrdiff_t tryStart
, tryEnd
, catchJump
, finallyStart
;
5084 JSParseNode
*lastCatch
;
5089 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5092 * When a finally block is active (STMT_FINALLY in our tree context),
5093 * non-local jumps (including jumps-over-catches) result in a GOSUB
5094 * being written into the bytecode stream and fixed-up later (c.f.
5095 * EmitBackPatchOp and BackPatch).
5097 js_PushStatement(cg
, &stmtInfo
,
5098 pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
,
5102 * Since an exception can be thrown at any place inside the try block,
5103 * we need to restore the stack and the scope chain before we transfer
5104 * the control to the exception handler.
5106 * For that we store in a try note associated with the catch or
5107 * finally block the stack depth upon the try entry. The interpreter
5108 * uses this depth to properly unwind the stack and the scope chain.
5110 depth
= cg
->stackDepth
;
5112 /* Mark try location for decompilation, then emit try block. */
5113 if (js_Emit1(cx
, cg
, JSOP_TRY
) < 0)
5115 tryStart
= CG_OFFSET(cg
);
5116 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5118 JS_ASSERT(depth
== cg
->stackDepth
);
5120 /* GOSUB to finally, if present. */
5122 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5124 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(stmtInfo
));
5129 /* Emit (hidden) jump over catch and/or finally. */
5130 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5132 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5136 tryEnd
= CG_OFFSET(cg
);
5138 /* If this try has a catch block, emit it. */
5142 jsint count
= 0; /* previous catch block's population */
5145 * The emitted code for a catch block looks like:
5147 * [throwing] only if 2nd+ catch block
5148 * [leaveblock] only if 2nd+ catch block
5149 * enterblock with SRC_CATCH
5151 * [dup] only if catchguard
5152 * setlocalpop <slot> or destructuring code
5153 * [< catchguard code >] if there's a catchguard
5154 * [ifeq <offset to next catch block>] " "
5155 * [pop] only if catchguard
5156 * < catch block contents >
5158 * goto <end of catch blocks> non-local; finally applies
5160 * If there's no catch block without a catchguard, the last
5161 * <offset to next catch block> points to rethrow code. This
5162 * code will [gosub] to the finally code if appropriate, and is
5163 * also used for the catch-all trynote for capturing exceptions
5164 * thrown from catch{} blocks.
5166 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
5167 ptrdiff_t guardJump
, catchNote
;
5169 JS_ASSERT(cg
->stackDepth
== depth
);
5170 guardJump
= GUARDJUMP(stmtInfo
);
5171 if (guardJump
!= -1) {
5172 /* Fix up and clean up previous catch block. */
5173 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, guardJump
);
5176 * Account for JSOP_ENTERBLOCK (whose block object count
5177 * is saved below) and pushed exception object that we
5178 * still have after the jumping from the previous guard.
5180 cg
->stackDepth
= depth
+ count
+ 1;
5183 * Move exception back to cx->exception to prepare for
5184 * the next catch. We hide [throwing] from the decompiler
5185 * since it compensates for the hidden JSOP_DUP at the
5186 * start of the previous guarded catch.
5188 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5189 js_Emit1(cx
, cg
, JSOP_THROWING
) < 0) {
5192 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5194 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
5195 JS_ASSERT(cg
->stackDepth
== depth
);
5199 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5200 * by the call to js_EmitTree immediately below. Save this
5201 * source note's index in stmtInfo for use by the TOK_CATCH:
5202 * case, where the length of the catch guard is set as the
5205 catchNote
= js_NewSrcNote2(cx
, cg
, SRC_CATCH
, 0);
5208 CATCHNOTE(stmtInfo
) = catchNote
;
5211 * Emit the lexical scope and catch body. Save the catch's
5212 * block object population via count, for use when targeting
5213 * guardJump at the next catch (the guard mismatch case).
5215 JS_ASSERT(pn3
->pn_type
== TOK_LEXICALSCOPE
);
5216 count
= OBJ_BLOCK_COUNT(cx
, pn3
->pn_objbox
->object
);
5217 if (!js_EmitTree(cx
, cg
, pn3
))
5220 /* gosub <finally>, if required */
5222 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
,
5226 JS_ASSERT(cg
->stackDepth
== depth
);
5230 * Jump over the remaining catch blocks. This will get fixed
5231 * up to jump to after catch/finally.
5233 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5235 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5240 * Save a pointer to the last catch node to handle try-finally
5241 * and try-catch(guard)-finally special cases.
5243 lastCatch
= pn3
->expr();
5248 * Last catch guard jumps to the rethrow code sequence if none of the
5249 * guards match. Target guardJump at the beginning of the rethrow
5250 * sequence, just in case a guard expression throws and leaves the
5253 if (lastCatch
&& lastCatch
->pn_kid2
) {
5254 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, GUARDJUMP(stmtInfo
));
5256 /* Sync the stack to take into account pushed exception. */
5257 JS_ASSERT(cg
->stackDepth
== depth
);
5258 cg
->stackDepth
= depth
+ 1;
5261 * Rethrow the exception, delegating executing of finally if any
5262 * to the exception handler.
5264 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5265 js_Emit1(cx
, cg
, JSOP_THROW
) < 0) {
5270 JS_ASSERT(cg
->stackDepth
== depth
);
5272 /* Emit finally handler if any. */
5273 finallyStart
= 0; /* to quell GCC uninitialized warnings */
5276 * Fix up the gosubs that might have been emitted before non-local
5277 * jumps to the finally code.
5279 if (!BackPatch(cx
, cg
, GOSUBS(stmtInfo
), CG_NEXT(cg
), JSOP_GOSUB
))
5282 finallyStart
= CG_OFFSET(cg
);
5284 /* Indicate that we're emitting a subroutine body. */
5285 stmtInfo
.type
= STMT_SUBROUTINE
;
5286 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_kid3
->pn_pos
.begin
.lineno
))
5288 if (js_Emit1(cx
, cg
, JSOP_FINALLY
) < 0 ||
5289 !js_EmitTree(cx
, cg
, pn
->pn_kid3
) ||
5290 js_Emit1(cx
, cg
, JSOP_RETSUB
) < 0) {
5293 JS_ASSERT(cg
->stackDepth
== depth
);
5295 if (!js_PopStatementCG(cx
, cg
))
5298 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5299 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5303 /* Fix up the end-of-try/catch jumps to come here. */
5304 if (!BackPatch(cx
, cg
, catchJump
, CG_NEXT(cg
), JSOP_GOTO
))
5308 * Add the try note last, to let post-order give us the right ordering
5309 * (first to last for a given nesting level, inner to outer by level).
5312 !NewTryNote(cx
, cg
, JSTRY_CATCH
, depth
, tryStart
, tryEnd
)) {
5317 * If we've got a finally, mark try+catch region with additional
5318 * trynote to catch exceptions (re)thrown from a catch block or
5319 * for the try{}finally{} case.
5322 !NewTryNote(cx
, cg
, JSTRY_FINALLY
, depth
, tryStart
, finallyStart
)) {
5330 ptrdiff_t catchStart
, guardJump
;
5334 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5335 * and save the block object atom.
5338 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& (stmt
->flags
& SIF_SCOPE
));
5339 stmt
->type
= STMT_CATCH
;
5340 catchStart
= stmt
->update
;
5341 blockObj
= stmt
->blockObj
;
5343 /* Go up one statement info record to the TRY or FINALLY record. */
5345 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
5347 /* Pick up the pending exception and bind it to the catch variable. */
5348 if (js_Emit1(cx
, cg
, JSOP_EXCEPTION
) < 0)
5352 * Dup the exception object if there is a guard for rethrowing to use
5353 * it later when rethrowing or in other catches.
5355 if (pn
->pn_kid2
&& js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5359 switch (pn2
->pn_type
) {
5360 #if JS_HAS_DESTRUCTURING
5363 if (!EmitDestructuringOps(cx
, cg
, JSOP_NOP
, pn2
))
5365 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5371 /* Inline and specialize BindNameToSlot for pn2. */
5372 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
);
5373 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, pn2
->pn_cookie
);
5380 /* Emit the guard expression, if there is one. */
5382 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5384 if (!js_SetSrcNoteOffset(cx
, cg
, CATCHNOTE(*stmt
), 0,
5385 CG_OFFSET(cg
) - catchStart
)) {
5388 /* ifeq <next block> */
5389 guardJump
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5392 GUARDJUMP(*stmt
) = guardJump
;
5394 /* Pop duplicated exception object as we no longer need it. */
5395 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5399 /* Emit the catch body. */
5400 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5404 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5405 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5407 off
= cg
->stackDepth
;
5408 if (js_NewSrcNote2(cx
, cg
, SRC_CATCH
, off
) < 0)
5414 if (!EmitVariables(cx
, cg
, pn
, JS_FALSE
, ¬eIndex
))
5419 /* Push a return value */
5422 if (!js_EmitTree(cx
, cg
, pn2
))
5425 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5430 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5431 * blocks having finally clauses and to exit intermingled let blocks.
5432 * We can't simply transfer control flow to our caller in that case,
5433 * because we must gosub to those finally clauses from inner to outer,
5434 * with the correct stack pointer (i.e., after popping any with,
5435 * for/in, etc., slots nested inside the finally's try).
5437 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5438 * extra JSOP_RETRVAL after the fixups.
5440 top
= CG_OFFSET(cg
);
5441 if (js_Emit1(cx
, cg
, JSOP_RETURN
) < 0)
5443 if (!EmitNonLocalJumpFixup(cx
, cg
, NULL
))
5445 if (top
+ JSOP_RETURN_LENGTH
!= CG_OFFSET(cg
)) {
5446 CG_BASE(cg
)[top
] = JSOP_SETRVAL
;
5447 if (js_Emit1(cx
, cg
, JSOP_RETRVAL
) < 0)
5452 #if JS_HAS_GENERATORS
5454 if (!(cg
->flags
& TCF_IN_FUNCTION
)) {
5455 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
5456 JSMSG_BAD_RETURN_OR_YIELD
,
5461 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5464 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5467 if (pn
->pn_hidden
&& js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5469 if (js_Emit1(cx
, cg
, JSOP_YIELD
) < 0)
5476 #if JS_HAS_XML_SUPPORT
5477 if (pn
->pn_arity
== PN_UNARY
) {
5478 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5480 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5486 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5489 tmp
= CG_OFFSET(cg
);
5490 if (pn
->pn_xflags
& PNX_NEEDBRACES
) {
5491 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5492 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
5496 js_PushStatement(cg
, &stmtInfo
, STMT_BLOCK
, top
);
5498 JSParseNode
*pnchild
= pn
->pn_head
;
5499 if (pn
->pn_xflags
& PNX_FUNCDEFS
) {
5501 * This block contains top-level function definitions. To ensure
5502 * that we emit the bytecode defining them before the rest of code
5503 * in the block we use a separate pass over functions. During the
5504 * main pass later the emitter will add JSOP_NOP with source notes
5505 * for the function to preserve the original functions position
5508 * Currently this is used only for functions, as compile-as-we go
5509 * mode for scripts does not allow separate emitter passes.
5511 JS_ASSERT(cg
->flags
& TCF_IN_FUNCTION
);
5512 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
5514 * Assign the destructuring arguments before defining any
5515 * functions, see bug 419662.
5517 JS_ASSERT(pnchild
->pn_type
== TOK_SEMI
);
5518 JS_ASSERT(pnchild
->pn_kid
->pn_type
== TOK_COMMA
);
5519 if (!js_EmitTree(cx
, cg
, pnchild
))
5521 pnchild
= pnchild
->pn_next
;
5524 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5525 if (pn2
->pn_type
== TOK_FUNCTION
) {
5526 if (pn2
->pn_op
== JSOP_NOP
) {
5527 if (!js_EmitTree(cx
, cg
, pn2
))
5531 * JSOP_DEFFUN in a top-level block with function
5532 * definitions appears, for example, when "if (true)"
5533 * is optimized away from "if (true) function x() {}".
5536 JS_ASSERT(pn2
->pn_op
== JSOP_DEFFUN
);
5541 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5542 if (!js_EmitTree(cx
, cg
, pn2
))
5546 if (noteIndex
>= 0 &&
5547 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5548 CG_OFFSET(cg
) - tmp
)) {
5552 ok
= js_PopStatementCG(cx
, cg
);
5557 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5558 js_PushStatement(cg
, &stmtInfo
, STMT_SEQ
, top
);
5559 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5560 if (!js_EmitTree(cx
, cg
, pn2
))
5563 ok
= js_PopStatementCG(cx
, cg
);
5570 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5571 * debugger, and eval frames may need the value of the ultimate
5572 * expression statement as the script's result, despite the fact
5573 * that it appears useless to the compiler.
5575 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5576 * calling JS_Compile* to suppress JSOP_POPV.
5578 useful
= wantval
= !(cg
->flags
& (TCF_IN_FUNCTION
| TCF_NO_SCRIPT_RVAL
));
5580 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5585 * Don't eliminate apparently useless expressions if they are
5586 * labeled expression statements. The tc->topStmt->update test
5587 * catches the case where we are nesting in js_EmitTree for a
5588 * labeled compound statement.
5592 cg
->topStmt
->type
!= STMT_LABEL
||
5593 cg
->topStmt
->update
< CG_OFFSET(cg
))) {
5594 CG_CURRENT_LINE(cg
) = pn2
->pn_pos
.begin
.lineno
;
5595 if (!js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn2
,
5598 JSMSG_USELESS_EXPR
)) {
5602 op
= wantval
? JSOP_POPV
: JSOP_POP
;
5603 #if JS_HAS_DESTRUCTURING
5605 pn2
->pn_type
== TOK_ASSIGN
&&
5606 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn2
, &op
)) {
5610 if (op
!= JSOP_NOP
) {
5611 if (!js_EmitTree(cx
, cg
, pn2
))
5613 if (js_Emit1(cx
, cg
, op
) < 0)
5621 /* Emit an annotated nop so we know to decompile a label. */
5623 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
5627 noteType
= (pn2
->pn_type
== TOK_LC
||
5628 (pn2
->pn_type
== TOK_LEXICALSCOPE
&&
5629 pn2
->expr()->pn_type
== TOK_LC
))
5632 noteIndex
= js_NewSrcNote2(cx
, cg
, noteType
,
5633 (ptrdiff_t) ALE_INDEX(ale
));
5634 if (noteIndex
< 0 ||
5635 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5639 /* Emit code for the labeled statement. */
5640 js_PushStatement(cg
, &stmtInfo
, STMT_LABEL
, CG_OFFSET(cg
));
5641 stmtInfo
.label
= atom
;
5642 if (!js_EmitTree(cx
, cg
, pn2
))
5644 if (!js_PopStatementCG(cx
, cg
))
5647 /* If the statement was compound, emit a note for the end brace. */
5648 if (noteType
== SRC_LABELBRACE
) {
5649 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5650 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5658 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5659 * These notes help the decompiler bracket the bytecodes generated
5660 * from each sub-expression that follows a comma.
5662 off
= noteIndex
= -1;
5663 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
5664 if (!js_EmitTree(cx
, cg
, pn2
))
5666 tmp
= CG_OFFSET(cg
);
5667 if (noteIndex
>= 0) {
5668 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5674 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5675 if (noteIndex
< 0 ||
5676 js_Emit1(cx
, cg
, JSOP_POP
) < 0) {
5684 * Check left operand type and generate specialized code for it.
5685 * Specialize to avoid ECMA "reference type" values on the operand
5686 * stack, which impose pervasive runtime "GetValue" costs.
5689 atomIndex
= (jsatomid
) -1; /* quell GCC overwarning */
5690 switch (pn2
->pn_type
) {
5692 if (!BindNameToSlot(cx
, cg
, pn2
))
5694 if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
5695 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
5697 ale
= cg
->atomList
.add(cg
->compiler
, pn2
->pn_atom
);
5700 atomIndex
= ALE_INDEX(ale
);
5701 if (!pn2
->isConst())
5702 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
5706 if (!js_EmitTree(cx
, cg
, pn2
->expr()))
5708 ale
= cg
->atomList
.add(cg
->compiler
, pn2
->pn_atom
);
5711 atomIndex
= ALE_INDEX(ale
);
5714 JS_ASSERT(pn2
->pn_arity
== PN_BINARY
);
5715 if (!js_EmitTree(cx
, cg
, pn2
->pn_left
))
5717 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
5720 #if JS_HAS_DESTRUCTURING
5726 if (!js_EmitTree(cx
, cg
, pn2
))
5729 #if JS_HAS_XML_SUPPORT
5731 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5732 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5734 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5743 #if JS_HAS_GETTER_SETTER
5744 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
5745 if (pn2
->pn_type
== TOK_NAME
&& PN_OP(pn2
) != JSOP_SETNAME
) {
5747 * x getter = y where x is a local or let variable is not
5750 js_ReportCompileErrorNumber(cx
,
5752 pn2
, JSREPORT_ERROR
,
5753 JSMSG_BAD_GETTER_OR_SETTER
,
5760 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5763 /* If += or similar, dup the left operand and get its value. */
5764 if (op
!= JSOP_NOP
) {
5765 switch (pn2
->pn_type
) {
5767 if (pn2
->isConst()) {
5768 if (PN_OP(pn2
) == JSOP_CALLEE
) {
5769 if (js_Emit1(cx
, cg
, JSOP_CALLEE
) < 0)
5772 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5774 } else if (PN_OP(pn2
) == JSOP_SETNAME
) {
5775 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5777 EMIT_INDEX_OP(JSOP_GETXPROP
, atomIndex
);
5779 EMIT_UINT16_IMM_OP((PN_OP(pn2
) == JSOP_SETGVAR
)
5781 : (PN_OP(pn2
) == JSOP_GETUPVAR
)
5783 : (PN_OP(pn2
) == JSOP_SETARG
)
5790 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5792 if (pn2
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
5793 if (js_Emit1(cx
, cg
, JSOP_LENGTH
) < 0)
5795 } else if (pn2
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
5796 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, atomIndex
, cg
))
5798 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5801 EMIT_INDEX_OP(JSOP_GETPROP
, atomIndex
);
5806 #if JS_HAS_XML_SUPPORT
5809 if (js_Emit1(cx
, cg
, JSOP_DUP2
) < 0)
5811 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5818 /* Now emit the right operand (it may affect the namespace). */
5819 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5822 /* If += etc., emit the binary operator with a decompiler note. */
5823 if (op
!= JSOP_NOP
) {
5825 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5826 * declared in the current compilation unit, as in this case (just
5827 * a bit further below) we will avoid emitting the assignment op.
5829 if (pn2
->pn_type
!= TOK_NAME
|| !pn2
->isConst()) {
5830 if (js_NewSrcNote(cx
, cg
, SRC_ASSIGNOP
) < 0)
5833 if (js_Emit1(cx
, cg
, op
) < 0)
5837 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5838 if (pn2
->pn_type
!= TOK_NAME
&&
5839 #if JS_HAS_DESTRUCTURING
5840 pn2
->pn_type
!= TOK_RB
&&
5841 pn2
->pn_type
!= TOK_RC
&&
5843 js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0) {
5847 /* Finally, emit the specialized assignment bytecode. */
5848 switch (pn2
->pn_type
) {
5854 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5858 if (js_Emit1(cx
, cg
, JSOP_SETELEM
) < 0)
5861 #if JS_HAS_DESTRUCTURING
5864 if (!EmitDestructuringOps(cx
, cg
, JSOP_SETNAME
, pn2
))
5868 #if JS_HAS_XML_SUPPORT
5870 if (js_Emit1(cx
, cg
, JSOP_SETXMLNAME
) < 0)
5880 /* Emit the condition, then branch if false to the else part. */
5881 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5883 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_COND
);
5886 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5887 if (beq
< 0 || !js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5890 /* Jump around else, fixup the branch, emit else, fixup jump. */
5891 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
5894 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
5897 * Because each branch pushes a single value, but our stack budgeting
5898 * analysis ignores branches, we now have to adjust cg->stackDepth to
5899 * ignore the value pushed by the first branch. Execution will follow
5900 * only one path, so we must decrement cg->stackDepth.
5902 * Failing to do this will foil code, such as the try/catch/finally
5903 * exception handling code generator, that samples cg->stackDepth for
5904 * use at runtime (JSOP_SETSP), or in let expression and block code
5905 * generation, which must use the stack depth to compute local stack
5906 * indexes correctly.
5908 JS_ASSERT(cg
->stackDepth
> 0);
5910 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5912 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5913 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
5920 * JSOP_OR converts the operand on the stack to boolean, and if true,
5921 * leaves the original operand value on the stack and jumps; otherwise
5922 * it pops and falls into the next bytecode, which evaluates the right
5923 * operand. The jump goes around the right operand evaluation.
5925 * JSOP_AND converts the operand on the stack to boolean, and if false,
5926 * leaves the original operand value on the stack and jumps; otherwise
5927 * it pops and falls into the right operand's bytecode.
5929 if (pn
->pn_arity
== PN_BINARY
) {
5930 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5932 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5935 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5937 off
= CG_OFFSET(cg
);
5938 pc
= CG_CODE(cg
, top
);
5939 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5942 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5943 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5945 /* Left-associative operator chain: avoid too much recursion. */
5947 if (!js_EmitTree(cx
, cg
, pn2
))
5949 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5953 /* Emit nodes between the head and the tail. */
5955 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5956 if (!js_EmitTree(cx
, cg
, pn2
))
5958 off
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5961 if (!SetBackPatchDelta(cx
, cg
, CG_CODE(cg
, jmp
), off
- jmp
))
5966 if (!js_EmitTree(cx
, cg
, pn2
))
5970 off
= CG_OFFSET(cg
);
5972 pc
= CG_CODE(cg
, top
);
5973 tmp
= GetJumpOffset(cg
, pc
);
5974 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5977 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5982 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5983 if (pn
->pn_arity
== PN_LIST
&& pn
->pn_count
< JS_BIT(16) &&
5984 (cg
->flags
& TCF_IN_FUNCTION
)) {
5985 /* Emit up to the first string literal conventionally. */
5986 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5987 if (pn2
->pn_type
== TOK_STRING
)
5989 if (!js_EmitTree(cx
, cg
, pn2
))
5991 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
5995 /* Emit remainder as a single JSOP_CONCATN. */
5996 for (index
= 0; pn2
; pn2
= pn2
->pn_next
, index
++) {
5997 if (!js_EmitTree(cx
, cg
, pn2
))
6002 EMIT_UINT16_IMM_OP(JSOP_CONCATN
, index
);
6004 /* If we had a prefix, we need to be added to it now. */
6005 if (pn
->pn_head
->pn_type
!= TOK_STRING
&&
6006 js_Emit1(cx
, cg
, JSOP_ADD
) < 0) {
6018 case TOK_INSTANCEOF
:
6023 if (pn
->pn_arity
== PN_LIST
) {
6024 /* Left-associative operator chain: avoid too much recursion. */
6026 if (!js_EmitTree(cx
, cg
, pn2
))
6029 while ((pn2
= pn2
->pn_next
) != NULL
) {
6030 if (!js_EmitTree(cx
, cg
, pn2
))
6032 if (js_Emit1(cx
, cg
, op
) < 0)
6036 #if JS_HAS_XML_SUPPORT
6040 if (pn
->pn_arity
== PN_NAME
) {
6041 if (!js_EmitTree(cx
, cg
, pn
->expr()))
6043 if (!EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
))
6049 * Binary :: has a right operand that brackets arbitrary code,
6050 * possibly including a let (a = b) ... expression. We must clear
6051 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6053 oldflags
= cg
->flags
;
6054 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6057 /* Binary operators that evaluate both operands unconditionally. */
6058 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6060 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6062 #if JS_HAS_XML_SUPPORT
6063 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6065 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6071 #if JS_HAS_XML_SUPPORT
6074 JS_ASSERT(pn
->pn_arity
== PN_UNARY
);
6081 /* Unary op, including unary +/-. */
6083 #if JS_HAS_XML_SUPPORT
6084 if (op
== JSOP_XMLNAME
) {
6085 if (!EmitXMLName(cx
, pn
, op
, cg
))
6092 /* See js_FoldConstants for why this assertion holds true. */
6093 JS_ASSERT_IF(op
== JSOP_TYPEOF
, pn2
->pn_type
== TOK_NAME
);
6095 oldflags
= cg
->flags
;
6096 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6097 if (!js_EmitTree(cx
, cg
, pn2
))
6099 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6100 if (js_Emit1(cx
, cg
, op
) < 0)
6107 /* Emit lvalue-specialized code for ++/-- operators. */
6109 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
6111 switch (pn2
->pn_type
) {
6113 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
6115 if (!BindNameToSlot(cx
, cg
, pn2
))
6118 if (op
== JSOP_CALLEE
) {
6119 if (js_Emit1(cx
, cg
, op
) < 0)
6121 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
6122 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
6123 EMIT_UINT16_IMM_OP(op
, atomIndex
);
6125 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
6126 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6130 if (pn2
->isConst()) {
6131 if (js_Emit1(cx
, cg
, JSOP_POS
) < 0)
6134 if (!(js_CodeSpec
[op
].format
& JOF_POST
)) {
6135 if (js_Emit1(cx
, cg
, JSOP_ONE
) < 0)
6137 op
= (js_CodeSpec
[op
].format
& JOF_INC
) ? JSOP_ADD
: JSOP_SUB
;
6138 if (js_Emit1(cx
, cg
, op
) < 0)
6144 if (!EmitPropOp(cx
, pn2
, op
, cg
, JS_FALSE
))
6148 if (!EmitElemOp(cx
, pn2
, op
, cg
))
6152 if (!js_EmitTree(cx
, cg
, pn2
))
6154 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
6155 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
6158 if (js_Emit1(cx
, cg
, op
) < 0)
6161 #if JS_HAS_XML_SUPPORT
6163 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
6164 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
6166 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
6168 if (js_Emit1(cx
, cg
, op
) < 0)
6177 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6178 * must evaluate the operand if it appears it might have side effects.
6181 switch (pn2
->pn_type
) {
6183 if (!BindNameToSlot(cx
, cg
, pn2
))
6186 if (op
== JSOP_FALSE
) {
6187 if (js_Emit1(cx
, cg
, op
) < 0)
6190 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6195 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, cg
, JS_FALSE
))
6198 #if JS_HAS_XML_SUPPORT
6200 if (!EmitElemOp(cx
, pn2
, JSOP_DELDESC
, cg
))
6205 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, cg
))
6210 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6211 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6214 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
6217 off
= noteIndex
= -1;
6219 if (pn2
->pn_op
== JSOP_SETCALL
)
6220 pn2
->pn_op
= JSOP_CALL
;
6221 if (!js_EmitTree(cx
, cg
, pn2
))
6223 off
= CG_OFFSET(cg
);
6224 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
6225 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
6228 if (js_Emit1(cx
, cg
, JSOP_TRUE
) < 0)
6230 if (noteIndex
>= 0) {
6231 tmp
= CG_OFFSET(cg
);
6232 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
6238 #if JS_HAS_XML_SUPPORT
6240 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6242 jmp
= js_Emit3(cx
, cg
, JSOP_FILTER
, 0, 0);
6245 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
6248 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6250 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
6251 if (EmitJump(cx
, cg
, JSOP_ENDFILTER
, top
- CG_OFFSET(cg
)) < 0)
6258 * Pop a stack operand, convert it to object, get a property named by
6259 * this bytecode's immediate-indexed atom operand, and push its value
6260 * (not a reference to it).
6262 ok
= EmitPropOp(cx
, pn
, PN_OP(pn
), cg
, JS_FALSE
);
6266 #if JS_HAS_XML_SUPPORT
6270 * Pop two operands, convert the left one to object and the right one
6271 * to property name (atom or tagged int), get the named property, and
6272 * push its value. Set the "obj" register to the result of ToObject
6273 * on the left operand.
6275 ok
= EmitElemOp(cx
, pn
, PN_OP(pn
), cg
);
6281 bool callop
= (PN_TYPE(pn
) == TOK_LP
);
6285 * Emit callable invocation or operator new (constructor call) code.
6286 * First, emit code for the left operand to evaluate the callable or
6287 * constructable object expression.
6289 * For operator new applied to other expressions than E4X ones, we emit
6290 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6291 * interpose the lambda-initialized method read barrier -- see the code
6292 * in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6294 * Then (or in a call case that has no explicit reference-base object)
6295 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6296 * parameter: in the operator new case, the newborn instance; in the
6297 * base-less call case, a cookie meaning "use the global object as the
6298 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6299 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6302 switch (pn2
->pn_type
) {
6304 if (!EmitNameOp(cx
, cg
, pn2
, callop
))
6308 if (!EmitPropOp(cx
, pn2
, PN_OP(pn2
), cg
, callop
))
6312 JS_ASSERT(pn2
->pn_op
== JSOP_GETELEM
);
6313 if (!EmitElemOp(cx
, pn2
, callop
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
))
6317 #if JS_HAS_XML_SUPPORT
6318 if (pn2
->pn_op
== JSOP_XMLNAME
) {
6319 if (!EmitXMLName(cx
, pn2
, JSOP_CALLXMLNAME
, cg
))
6321 callop
= true; /* suppress JSOP_NULL after */
6328 * Push null as a placeholder for the global object, per ECMA-262
6331 if (!js_EmitTree(cx
, cg
, pn2
))
6333 callop
= false; /* trigger JSOP_NULL after */
6336 if (!callop
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
6339 /* Remember start of callable-object bytecode for decompilation hint. */
6343 * Emit code for each argument in order, then emit the JSOP_*CALL or
6344 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6345 * were pushed on the operand stack.
6347 oldflags
= cg
->flags
;
6348 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6349 for (pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
6350 if (!js_EmitTree(cx
, cg
, pn3
))
6353 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6354 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - off
) < 0)
6357 argc
= pn
->pn_count
- 1;
6358 if (js_Emit3(cx
, cg
, PN_OP(pn
), ARGC_HI(argc
), ARGC_LO(argc
)) < 0)
6360 if (PN_OP(pn
) == JSOP_CALL
) {
6361 /* Add a trace hint opcode for recursion. */
6362 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
6365 if (PN_OP(pn
) == JSOP_EVAL
)
6366 EMIT_UINT16_IMM_OP(JSOP_LINENO
, pn
->pn_pos
.begin
.lineno
);
6370 case TOK_LEXICALSCOPE
:
6372 JSObjectBox
*objbox
;
6375 objbox
= pn
->pn_objbox
;
6376 js_PushBlockScope(cg
, &stmtInfo
, objbox
->object
, CG_OFFSET(cg
));
6379 * If this lexical scope is not for a catch block, let block or let
6380 * expression, or any kind of for loop (where the scope starts in the
6381 * head after the first part if for (;;), else in the body if for-in);
6382 * and if our container is top-level but not a function body, or else
6383 * a block statement; then emit a SRC_BRACE note. All other container
6384 * statements get braces by default from the decompiler.
6387 type
= PN_TYPE(pn
->expr());
6388 if (type
!= TOK_CATCH
&& type
!= TOK_LET
&& type
!= TOK_FOR
&&
6389 (!(stmt
= stmtInfo
.down
)
6390 ? !(cg
->flags
& TCF_IN_FUNCTION
)
6391 : stmt
->type
== STMT_BLOCK
)) {
6392 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6393 /* There must be no source note already output for the next op. */
6394 JS_ASSERT(CG_NOTE_COUNT(cg
) == 0 ||
6395 CG_LAST_NOTE_OFFSET(cg
) != CG_OFFSET(cg
) ||
6396 !GettableNoteForNextOp(cg
));
6398 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
6403 JS_ASSERT(CG_OFFSET(cg
) == top
);
6404 if (!EmitEnterBlock(cx
, pn
, cg
))
6407 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
6411 if (op
== JSOP_LEAVEBLOCKEXPR
) {
6412 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
6415 if (noteIndex
>= 0 &&
6416 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6417 CG_OFFSET(cg
) - top
)) {
6422 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6423 count
= OBJ_BLOCK_COUNT(cx
, objbox
->object
);
6424 EMIT_UINT16_IMM_OP(op
, count
);
6426 ok
= js_PopStatementCG(cx
, cg
);
6430 #if JS_HAS_BLOCK_SCOPE
6432 /* Let statements have their variable declarations on the left. */
6433 if (pn
->pn_arity
== PN_BINARY
) {
6440 /* Non-null pn2 means that pn is the variable list from a let head. */
6441 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
6442 if (!EmitVariables(cx
, cg
, pn
, pn2
!= NULL
, ¬eIndex
))
6445 /* Thus non-null pn2 is the body of the let block or expression. */
6446 tmp
= CG_OFFSET(cg
);
6447 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
6450 if (noteIndex
>= 0 &&
6451 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6452 CG_OFFSET(cg
) - tmp
)) {
6456 #endif /* JS_HAS_BLOCK_SCOPE */
6458 #if JS_HAS_GENERATORS
6459 case TOK_ARRAYPUSH
: {
6463 * The array object's stack index is in cg->arrayCompDepth. See below
6464 * under the array initialiser code generator for array comprehension
6467 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6469 slot
= AdjustBlockSlot(cx
, cg
, cg
->arrayCompDepth
);
6472 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
6478 #if JS_HAS_GENERATORS
6482 * Emit code for [a, b, c] that is equivalent to constructing a new
6483 * array and in source order evaluating each element value and adding
6484 * it to the array, without invoking latent setters. We use the
6485 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6486 * avoid dup'ing and popping the array as each element is added, as
6487 * JSOP_SETELEM/JSOP_SETPROP would do.
6489 * If no sharp variable is defined, the initializer is not for an array
6490 * comprehension, the initializer is not overlarge, and the initializer
6491 * is not in global code (whose stack growth cannot be precisely modeled
6492 * due to the need to reserve space for global variables and regular
6493 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6494 * array using a fast, all-at-once process rather than a slow, element-
6495 * by-element process.
6497 #if JS_HAS_SHARP_VARS
6502 op
= (JS_LIKELY(pn
->pn_count
< JS_BIT(16)) && (cg
->flags
& TCF_IN_FUNCTION
))
6506 #if JS_HAS_GENERATORS
6507 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6510 #if JS_HAS_SHARP_VARS
6511 JS_ASSERT_IF(sharpnum
>= 0, cg
->hasSharps());
6512 if (cg
->hasSharps())
6516 if (op
== JSOP_NEWINIT
&& !EmitNewInit(cx
, cg
, JSProto_Array
, pn
, sharpnum
))
6519 #if JS_HAS_GENERATORS
6520 if (pn
->pn_type
== TOK_ARRAYCOMP
) {
6524 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6525 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6526 * its kids under pn2 to generate this comprehension.
6528 JS_ASSERT(cg
->stackDepth
> 0);
6529 saveDepth
= cg
->arrayCompDepth
;
6530 cg
->arrayCompDepth
= (uint32
) (cg
->stackDepth
- 1);
6531 if (!js_EmitTree(cx
, cg
, pn
->pn_head
))
6533 cg
->arrayCompDepth
= saveDepth
;
6535 /* Emit the usual op needed for decompilation. */
6536 if (!EmitEndInit(cx
, cg
, 1))
6540 #endif /* JS_HAS_GENERATORS */
6543 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6544 if (op
== JSOP_NEWINIT
&& !EmitNumberOp(cx
, atomIndex
, cg
))
6546 if (pn2
->pn_type
== TOK_COMMA
&& pn2
->pn_arity
== PN_NULLARY
) {
6547 if (js_Emit1(cx
, cg
, JSOP_HOLE
) < 0)
6550 if (!js_EmitTree(cx
, cg
, pn2
))
6553 if (op
== JSOP_NEWINIT
&& js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6556 JS_ASSERT(atomIndex
== pn
->pn_count
);
6558 if (pn
->pn_xflags
& PNX_ENDCOMMA
) {
6559 /* Emit a source note so we know to decompile an extra comma. */
6560 if (js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
6564 if (op
== JSOP_NEWINIT
) {
6566 * Emit an op to finish the array and, secondarily, to aid in sharp
6567 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6569 if (!EmitEndInit(cx
, cg
, atomIndex
))
6574 JS_ASSERT(atomIndex
< JS_BIT(16));
6575 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY
, atomIndex
);
6579 #if JS_HAS_SHARP_VARS
6583 #if JS_HAS_DESTRUCTURING_SHORTHAND
6584 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
6585 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
6586 JSMSG_BAD_OBJECT_INIT
);
6591 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6592 * a new object and in source order evaluating each property value and
6593 * adding the property to the object, without invoking latent setters.
6594 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6595 * ignore setters and to avoid dup'ing and popping the object as each
6596 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6598 if (!EmitNewInit(cx
, cg
, JSProto_Object
, pn
, sharpnum
))
6601 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6602 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6604 if (pn3
->pn_type
== TOK_NUMBER
) {
6605 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
6609 /* Emit code for the property initializer. */
6610 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
6613 #if JS_HAS_GETTER_SETTER
6615 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
6616 if (js_Emit1(cx
, cg
, op
) < 0)
6620 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6621 if (pn3
->pn_type
== TOK_NUMBER
) {
6622 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
6624 if (js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6627 JS_ASSERT(pn3
->pn_type
== TOK_NAME
||
6628 pn3
->pn_type
== TOK_STRING
);
6629 ale
= cg
->atomList
.add(cg
->compiler
, pn3
->pn_atom
);
6633 JSOp initOp
= (PN_OP(pn2
->pn_right
) == JSOP_LAMBDA
&&
6634 !(pn2
->pn_right
->pn_funbox
->tcflags
6635 & (TCF_FUN_USES_ARGUMENTS
| TCF_FUN_USES_OWN_NAME
))
6636 #if JS_HAS_GETTER_SETTER
6637 && op
!= JSOP_GETTER
&& op
!= JSOP_SETTER
6642 EMIT_INDEX_OP(initOp
, ALE_INDEX(ale
));
6646 if (!EmitEndInit(cx
, cg
, pn
->pn_count
))
6650 #if JS_HAS_SHARP_VARS
6652 JS_ASSERT(cg
->hasSharps());
6653 sharpnum
= pn
->pn_num
;
6655 if (pn
->pn_type
== TOK_RB
)
6657 # if JS_HAS_GENERATORS
6658 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6661 if (pn
->pn_type
== TOK_RC
)
6662 goto do_emit_object
;
6664 if (!js_EmitTree(cx
, cg
, pn
))
6666 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, (jsatomid
) sharpnum
);
6670 JS_ASSERT(cg
->hasSharps());
6671 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP
, cg
->sharpSlotBase
, (jsatomid
) pn
->pn_num
);
6673 #endif /* JS_HAS_SHARP_VARS */
6677 * Cope with a left-over function definition that was replaced by a use
6678 * of a later function definition of the same name. See FunctionDef and
6679 * MakeDefIntoUse in jsparse.cpp.
6681 if (pn
->pn_op
== JSOP_NOP
)
6683 if (!EmitNameOp(cx
, cg
, pn
, JS_FALSE
))
6687 #if JS_HAS_XML_SUPPORT
6692 case TOK_XMLCOMMENT
:
6695 ok
= EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6699 ok
= EmitNumberOp(cx
, pn
->pn_dval
, cg
);
6704 * If the regexp's script is one-shot, we can avoid the extra
6705 * fork-on-exec costs of JSOP_REGEXP by selecting JSOP_OBJECT.
6706 * Otherwise, to avoid incorrect proto, parent, and lastIndex
6707 * sharing among threads and sequentially across re-execution,
6708 * select JSOP_REGEXP.
6710 JS_ASSERT(pn
->pn_op
== JSOP_REGEXP
);
6711 if (cg
->flags
& TCF_COMPILE_N_GO
) {
6712 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_OBJECT
, cg
);
6714 ok
= EmitIndexOp(cx
, JSOP_REGEXP
,
6715 cg
->regexpList
.index(pn
->pn_objbox
),
6720 #if JS_HAS_XML_SUPPORT
6724 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6728 #if JS_HAS_DEBUGGER_KEYWORD
6730 if (js_Emit1(cx
, cg
, JSOP_DEBUGGER
) < 0)
6733 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6735 #if JS_HAS_XML_SUPPORT
6738 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6739 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6743 JS_ASSERT(PN_TYPE(pn
) == TOK_XMLLIST
|| pn
->pn_count
!= 0);
6744 switch (pn
->pn_head
? PN_TYPE(pn
->pn_head
) : TOK_XMLLIST
) {
6752 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6756 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6757 if (pn2
->pn_type
== TOK_LC
&&
6758 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6761 if (!js_EmitTree(cx
, cg
, pn2
))
6763 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6767 if (pn
->pn_xflags
& PNX_XMLROOT
) {
6768 if (pn
->pn_count
== 0) {
6769 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
);
6770 atom
= cx
->runtime
->atomState
.emptyAtom
;
6771 ale
= cg
->atomList
.add(cg
->compiler
, atom
);
6774 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6776 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6781 JS_ASSERT(pn
->pn_count
!= 0);
6786 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6787 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6797 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6800 ale
= cg
->atomList
.add(cg
->compiler
,
6801 (pn
->pn_type
== TOK_XMLETAGO
)
6802 ? cx
->runtime
->atomState
.etagoAtom
6803 : cx
->runtime
->atomState
.stagoAtom
);
6806 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6808 JS_ASSERT(pn
->pn_count
!= 0);
6810 if (pn2
->pn_type
== TOK_LC
&& js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0)
6812 if (!js_EmitTree(cx
, cg
, pn2
))
6814 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6817 for (pn2
= pn2
->pn_next
, i
= 0; pn2
; pn2
= pn2
->pn_next
, i
++) {
6818 if (pn2
->pn_type
== TOK_LC
&&
6819 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6822 if (!js_EmitTree(cx
, cg
, pn2
))
6824 if ((i
& 1) && pn2
->pn_type
== TOK_LC
) {
6825 if (js_Emit1(cx
, cg
, JSOP_TOATTRVAL
) < 0)
6828 if (js_Emit1(cx
, cg
,
6829 (i
& 1) ? JSOP_ADDATTRVAL
: JSOP_ADDATTRNAME
) < 0) {
6834 ale
= cg
->atomList
.add(cg
->compiler
,
6835 (pn
->pn_type
== TOK_XMLPTAGC
)
6836 ? cx
->runtime
->atomState
.ptagcAtom
6837 : cx
->runtime
->atomState
.tagcAtom
);
6840 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6841 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6844 if ((pn
->pn_xflags
& PNX_XMLROOT
) && js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6850 if (pn
->pn_arity
== PN_LIST
) {
6851 JS_ASSERT(pn
->pn_count
!= 0);
6852 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6853 if (pn2
->pn_type
== TOK_LC
&&
6854 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6857 if (!js_EmitTree(cx
, cg
, pn2
))
6859 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6863 JS_ASSERT(pn
->pn_arity
== PN_NULLARY
);
6864 ok
= (pn
->pn_op
== JSOP_OBJECT
)
6865 ? EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
)
6866 : EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6871 ale
= cg
->atomList
.add(cg
->compiler
, pn
->pn_atom2
);
6874 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
6876 if (!EmitAtomOp(cx
, pn
, JSOP_XMLPI
, cg
))
6879 #endif /* JS_HAS_XML_SUPPORT */
6885 if (ok
&& --cg
->emitLevel
== 0) {
6887 ok
= OptimizeSpanDeps(cx
, cg
);
6888 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.end
.lineno
))
6896 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6897 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6899 JS_FRIEND_DATA(JSSrcNoteSpec
) js_SrcNoteSpec
[] = {
6902 {"if-else", 2, 0, 1},
6905 {"continue", 0, 0, 0},
6907 {"pcdelta", 1, 0, 1},
6908 {"assignop", 0, 0, 0},
6911 {"hidden", 0, 0, 0},
6912 {"pcbase", 1, 0, -1},
6914 {"labelbrace", 1, 0, 0},
6915 {"endbrace", 0, 0, 0},
6916 {"break2label", 1, 0, 0},
6917 {"cont2label", 1, 0, 0},
6918 {"switch", 2, 0, 1},
6919 {"funcdef", 1, 0, 0},
6921 {"extended", -1, 0, 0},
6922 {"newline", 0, 0, 0},
6923 {"setline", 1, 0, 0},
6924 {"xdelta", 0, 0, 0},
6928 AllocSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
)
6934 index
= CG_NOTE_COUNT(cg
);
6935 if (((uintN
)index
& CG_NOTE_MASK(cg
)) == 0) {
6936 pool
= cg
->notePool
;
6937 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6938 if (!CG_NOTES(cg
)) {
6939 /* Allocate the first note array lazily; leave noteMask alone. */
6940 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
);
6942 /* Grow by doubling note array size; update noteMask on success. */
6943 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6945 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6947 if (!CG_NOTES(cg
)) {
6948 js_ReportOutOfScriptQuota(cx
);
6953 CG_NOTE_COUNT(cg
) = index
+ 1;
6958 js_NewSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
)
6962 ptrdiff_t offset
, delta
, xdelta
;
6965 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6966 * incrementing CG_NOTE_COUNT(cg).
6968 index
= AllocSrcNote(cx
, cg
);
6971 sn
= &CG_NOTES(cg
)[index
];
6974 * Compute delta from the last annotated bytecode's offset. If it's too
6975 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6977 offset
= CG_OFFSET(cg
);
6978 delta
= offset
- CG_LAST_NOTE_OFFSET(cg
);
6979 CG_LAST_NOTE_OFFSET(cg
) = offset
;
6980 if (delta
>= SN_DELTA_LIMIT
) {
6982 xdelta
= JS_MIN(delta
, SN_XDELTA_MASK
);
6983 SN_MAKE_XDELTA(sn
, xdelta
);
6985 index
= AllocSrcNote(cx
, cg
);
6988 sn
= &CG_NOTES(cg
)[index
];
6989 } while (delta
>= SN_DELTA_LIMIT
);
6993 * Initialize type and delta, then allocate the minimum number of notes
6994 * needed for type's arity. Usually, we won't need more, but if an offset
6995 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6997 SN_MAKE_NOTE(sn
, type
, delta
);
6998 for (n
= (intN
)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
6999 if (js_NewSrcNote(cx
, cg
, SRC_NULL
) < 0)
7006 js_NewSrcNote2(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7011 index
= js_NewSrcNote(cx
, cg
, type
);
7013 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset
))
7020 js_NewSrcNote3(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7021 ptrdiff_t offset1
, ptrdiff_t offset2
)
7025 index
= js_NewSrcNote(cx
, cg
, type
);
7027 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset1
))
7029 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 1, offset2
))
7036 GrowSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
)
7041 /* Grow by doubling note array size; update noteMask on success. */
7042 pool
= cg
->notePool
;
7043 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
7044 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
7045 if (!CG_NOTES(cg
)) {
7046 js_ReportOutOfScriptQuota(cx
);
7049 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
7054 js_AddToSrcNoteDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*sn
,
7057 ptrdiff_t base
, limit
, newdelta
, diff
;
7061 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7062 * main script note deltas, and only by a small positive amount.
7064 JS_ASSERT(cg
->current
== &cg
->main
);
7065 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
7067 base
= SN_DELTA(sn
);
7068 limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
7069 newdelta
= base
+ delta
;
7070 if (newdelta
< limit
) {
7071 SN_SET_DELTA(sn
, newdelta
);
7073 index
= sn
- cg
->main
.notes
;
7074 if ((cg
->main
.noteCount
& cg
->main
.noteMask
) == 0) {
7075 if (!GrowSrcNotes(cx
, cg
))
7077 sn
= cg
->main
.notes
+ index
;
7079 diff
= cg
->main
.noteCount
- index
;
7080 cg
->main
.noteCount
++;
7081 memmove(sn
+ 1, sn
, SRCNOTE_SIZE(diff
));
7082 SN_MAKE_XDELTA(sn
, delta
);
7088 JS_FRIEND_API(uintN
)
7089 js_SrcNoteLength(jssrcnote
*sn
)
7094 arity
= (intN
)js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7095 for (base
= sn
++; arity
; sn
++, arity
--) {
7096 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7102 JS_FRIEND_API(ptrdiff_t)
7103 js_GetSrcNoteOffset(jssrcnote
*sn
, uintN which
)
7105 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7106 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7107 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7108 for (sn
++; which
; sn
++, which
--) {
7109 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7112 if (*sn
& SN_3BYTE_OFFSET_FLAG
) {
7113 return (ptrdiff_t)(((uint32
)(sn
[0] & SN_3BYTE_OFFSET_MASK
) << 16)
7117 return (ptrdiff_t)*sn
;
7121 js_SetSrcNoteOffset(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
,
7122 uintN which
, ptrdiff_t offset
)
7127 if ((jsuword
)offset
>= (jsuword
)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG
<< 16)) {
7128 ReportStatementTooLarge(cx
, cg
);
7132 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7133 sn
= &CG_NOTES(cg
)[index
];
7134 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7135 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7136 for (sn
++; which
; sn
++, which
--) {
7137 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7141 /* See if the new offset requires three bytes. */
7142 if (offset
> (ptrdiff_t)SN_3BYTE_OFFSET_MASK
) {
7143 /* Maybe this offset was already set to a three-byte value. */
7144 if (!(*sn
& SN_3BYTE_OFFSET_FLAG
)) {
7145 /* Losing, need to insert another two bytes for this offset. */
7146 index
= sn
- CG_NOTES(cg
);
7149 * Simultaneously test to see if the source note array must grow to
7150 * accommodate either the first or second byte of additional storage
7151 * required by this 3-byte offset.
7153 if (((CG_NOTE_COUNT(cg
) + 1) & CG_NOTE_MASK(cg
)) <= 1) {
7154 if (!GrowSrcNotes(cx
, cg
))
7156 sn
= CG_NOTES(cg
) + index
;
7158 CG_NOTE_COUNT(cg
) += 2;
7160 diff
= CG_NOTE_COUNT(cg
) - (index
+ 3);
7161 JS_ASSERT(diff
>= 0);
7163 memmove(sn
+ 3, sn
+ 1, SRCNOTE_SIZE(diff
));
7165 *sn
++ = (jssrcnote
)(SN_3BYTE_OFFSET_FLAG
| (offset
>> 16));
7166 *sn
++ = (jssrcnote
)(offset
>> 8);
7168 *sn
= (jssrcnote
)offset
;
7173 #define DEBUG_srcnotesize
7176 #ifdef DEBUG_srcnotesize
7178 static uint32 hist
[NBINS
];
7180 void DumpSrcNoteSizeHist()
7186 fp
= fopen("/tmp/srcnotes.hist", "w");
7189 setvbuf(fp
, NULL
, _IONBF
, 0);
7191 fprintf(fp
, "SrcNote size histogram:\n");
7192 for (i
= 0; i
< NBINS
; i
++) {
7193 fprintf(fp
, "%4u %4u ", JS_BIT(i
), hist
[i
]);
7194 for (n
= (int) JS_HOWMANY(hist
[i
], 10); n
> 0; --n
)
7203 * Fill in the storage at notes with prolog and main srcnotes; the space at
7204 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7205 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7206 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7209 js_FinishTakingSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*notes
)
7211 uintN prologCount
, mainCount
, totalCount
;
7212 ptrdiff_t offset
, delta
;
7215 JS_ASSERT(cg
->current
== &cg
->main
);
7217 prologCount
= cg
->prolog
.noteCount
;
7218 if (prologCount
&& cg
->prolog
.currentLine
!= cg
->firstLine
) {
7219 CG_SWITCH_TO_PROLOG(cg
);
7220 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, (ptrdiff_t)cg
->firstLine
) < 0)
7222 prologCount
= cg
->prolog
.noteCount
;
7223 CG_SWITCH_TO_MAIN(cg
);
7226 * Either no prolog srcnotes, or no line number change over prolog.
7227 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7228 * of the first main note, by adding to its delta and possibly even
7229 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7230 * that came at and after the last annotated bytecode.
7232 offset
= CG_PROLOG_OFFSET(cg
) - cg
->prolog
.lastNoteOffset
;
7233 JS_ASSERT(offset
>= 0);
7234 if (offset
> 0 && cg
->main
.noteCount
!= 0) {
7235 /* NB: Use as much of the first main note's delta as we can. */
7236 sn
= cg
->main
.notes
;
7237 delta
= SN_IS_XDELTA(sn
)
7238 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
7239 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
7243 if (!js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
))
7248 delta
= JS_MIN(offset
, SN_XDELTA_MASK
);
7249 sn
= cg
->main
.notes
;
7254 mainCount
= cg
->main
.noteCount
;
7255 totalCount
= prologCount
+ mainCount
;
7257 memcpy(notes
, cg
->prolog
.notes
, SRCNOTE_SIZE(prologCount
));
7258 memcpy(notes
+ prologCount
, cg
->main
.notes
, SRCNOTE_SIZE(mainCount
));
7259 SN_MAKE_TERMINATOR(¬es
[totalCount
]);
7262 { int bin
= JS_CeilingLog2(totalCount
);
7272 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
7273 uintN stackDepth
, size_t start
, size_t end
)
7277 JS_ASSERT((uintN
)(uint16
)stackDepth
== stackDepth
);
7278 JS_ASSERT(start
<= end
);
7279 JS_ASSERT((size_t)(uint32
)start
== start
);
7280 JS_ASSERT((size_t)(uint32
)end
== end
);
7282 JS_ARENA_ALLOCATE_TYPE(tryNode
, JSTryNode
, &cx
->tempPool
);
7284 js_ReportOutOfScriptQuota(cx
);
7288 tryNode
->note
.kind
= kind
;
7289 tryNode
->note
.stackDepth
= (uint16
)stackDepth
;
7290 tryNode
->note
.start
= (uint32
)start
;
7291 tryNode
->note
.length
= (uint32
)(end
- start
);
7292 tryNode
->prev
= cg
->lastTryNode
;
7293 cg
->lastTryNode
= tryNode
;
7299 js_FinishTakingTryNotes(JSCodeGenerator
*cg
, JSTryNoteArray
*array
)
7304 JS_ASSERT(array
->length
> 0 && array
->length
== cg
->ntrynotes
);
7305 tn
= array
->vector
+ array
->length
;
7306 tryNode
= cg
->lastTryNode
;
7308 *--tn
= tryNode
->note
;
7309 } while ((tryNode
= tryNode
->prev
) != NULL
);
7310 JS_ASSERT(tn
== array
->vector
);
7314 * Find the index of the given object for code generator.
7316 * Since the emitter refers to each parsed object only once, for the index we
7317 * use the number of already indexes objects. We also add the object to a list
7318 * to convert the list to a fixed-size array when we complete code generation,
7319 * see JSCGObjectList::finish below.
7321 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7322 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7323 * directly used to store and fetch a reference to a cloned RegExp object that
7324 * shares the same JSRegExp private data created for the object literal in
7325 * objbox. We need a cloned object to hold lastIndex and other direct properties
7326 * that should not be shared among threads sharing a precompiled function or
7329 * If the code being compiled is function code, allocate a reserved slot in
7330 * the cloned function object that shares its precompiled script with other
7331 * cloned function objects and with the compiler-created clone-parent. There
7332 * are nregexps = script->regexps()->length such reserved slots in each
7333 * function object cloned from fun->object. NB: during compilation, a funobj
7334 * slots element must never be allocated, because js_AllocSlot could hand out
7335 * one of the slots that should be given to a regexp clone.
7337 * If the code being compiled is global code, the cloned regexp are stored in
7338 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7339 * fp->nvars to ngvars + nregexps.
7341 * The slots initially contain undefined or null. We populate them lazily when
7342 * JSOP_REGEXP is executed for the first time.
7344 * Why clone regexp objects? ECMA specifies that when a regular expression
7345 * literal is scanned, a RegExp object is created. In the spec, compilation
7346 * and execution happen indivisibly, but in this implementation and many of
7347 * its embeddings, code is precompiled early and re-executed in multiple
7348 * threads, or using multiple global objects, or both, for efficiency.
7350 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7351 * objects, which makes for collisions on the lastIndex property (especially
7352 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
7353 * __parent__ refer to the pre-compilation prototype and global objects, a
7354 * pigeon-hole problem for instanceof tests.
7357 JSCGObjectList::index(JSObjectBox
*objbox
)
7359 JS_ASSERT(!objbox
->emitLink
);
7360 objbox
->emitLink
= lastbox
;
7366 JSCGObjectList::finish(JSObjectArray
*array
)
7369 JSObjectBox
*objbox
;
7371 JS_ASSERT(length
<= INDEX_LIMIT
);
7372 JS_ASSERT(length
== array
->length
);
7374 cursor
= array
->vector
+ array
->length
;
7378 JS_ASSERT(!*cursor
);
7379 *cursor
= objbox
->object
;
7380 } while ((objbox
= objbox
->emitLink
) != NULL
);
7381 JS_ASSERT(cursor
== array
->vector
);