1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
59 #include "jsversion.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
85 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
86 uintN stackDepth
, size_t start
, size_t end
);
88 JSCodeGenerator::JSCodeGenerator(Parser
*parser
,
89 JSArenaPool
*cpool
, JSArenaPool
*npool
,
91 : JSTreeContext(parser
),
92 codePool(cpool
), notePool(npool
),
93 codeMark(cpool
->getMark()), noteMark(npool
->getMark()),
94 stackDepth(0), maxStackDepth(0),
95 ntrynotes(0), lastTryNode(NULL
),
96 spanDeps(NULL
), jumpTargets(NULL
), jtFreeList(NULL
),
97 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
100 constMap(parser
->context
)
102 flags
= TCF_COMPILING
;
103 memset(&prolog
, 0, sizeof prolog
);
104 memset(&main
, 0, sizeof main
);
106 firstLine
= prolog
.currentLine
= main
.currentLine
= lineno
;
107 prolog
.noteMask
= main
.noteMask
= SRCNOTE_CHUNK
- 1;
108 memset(&upvarMap
, 0, sizeof upvarMap
);
111 bool JSCodeGenerator::init()
113 return constMap
.init();
116 JSCodeGenerator::~JSCodeGenerator()
118 codePool
->release(codeMark
);
119 notePool
->release(noteMark
);
121 /* NB: non-null only after OOM. */
123 parser
->context
->free(spanDeps
);
126 parser
->context
->free(upvarMap
.vector
);
130 EmitCheck(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t delta
)
132 jsbytecode
*base
, *limit
, *next
;
133 ptrdiff_t offset
, length
;
138 limit
= CG_LIMIT(cg
);
139 offset
= next
- base
;
140 if (next
+ delta
> limit
) {
141 length
= offset
+ delta
;
142 length
= (length
<= BYTECODE_CHUNK
)
144 : JS_BIT(JS_CeilingLog2(length
));
145 incr
= BYTECODE_SIZE(length
);
147 cg
->codePool
->allocateCast
<jsbytecode
*>(base
, incr
);
149 size
= BYTECODE_SIZE(limit
- base
);
151 cg
->codePool
->growCast
<jsbytecode
*>(base
, size
, incr
);
154 js_ReportOutOfScriptQuota(cx
);
158 CG_LIMIT(cg
) = base
+ length
;
159 CG_NEXT(cg
) = base
+ offset
;
165 UpdateDepth(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t target
)
169 const JSCodeSpec
*cs
;
170 uintN extra
, depth
, nuses
;
173 pc
= CG_CODE(cg
, target
);
175 cs
= &js_CodeSpec
[op
];
177 extern uint8 js_opcode2extra
[];
178 extra
= js_opcode2extra
[op
];
182 if ((cs
->format
& JOF_TMPSLOT_MASK
) || extra
) {
183 depth
= (uintN
) cg
->stackDepth
+
184 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
) +
186 if (depth
> cg
->maxStackDepth
)
187 cg
->maxStackDepth
= depth
;
190 nuses
= js_GetStackUses(cs
, op
, pc
);
191 cg
->stackDepth
-= nuses
;
192 JS_ASSERT(cg
->stackDepth
>= 0);
193 if (cg
->stackDepth
< 0) {
197 JS_snprintf(numBuf
, sizeof numBuf
, "%d", target
);
198 ts
= &cg
->parser
->tokenStream
;
199 JS_ReportErrorFlagsAndNumber(cx
, JSREPORT_WARNING
,
200 js_GetErrorMessage
, NULL
,
201 JSMSG_STACK_UNDERFLOW
,
202 ts
->getFilename() ? ts
->getFilename() : "stdin",
209 /* We just executed IndexParsedObject */
210 JS_ASSERT(op
== JSOP_ENTERBLOCK
);
211 JS_ASSERT(nuses
== 0);
212 blockObj
= cg
->objectList
.lastbox
->object
;
213 JS_ASSERT(blockObj
->getClass() == &js_BlockClass
);
214 JS_ASSERT(JSVAL_IS_VOID(blockObj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
216 OBJ_SET_BLOCK_DEPTH(cx
, blockObj
, cg
->stackDepth
);
217 ndefs
= OBJ_BLOCK_COUNT(cx
, blockObj
);
219 cg
->stackDepth
+= ndefs
;
220 if ((uintN
)cg
->stackDepth
> cg
->maxStackDepth
)
221 cg
->maxStackDepth
= cg
->stackDepth
;
225 js_Emit1(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
)
227 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 1);
230 *CG_NEXT(cg
)++ = (jsbytecode
)op
;
231 UpdateDepth(cx
, cg
, offset
);
237 js_Emit2(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
)
239 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 2);
242 jsbytecode
*next
= CG_NEXT(cg
);
243 next
[0] = (jsbytecode
)op
;
245 CG_NEXT(cg
) = next
+ 2;
246 UpdateDepth(cx
, cg
, offset
);
252 js_Emit3(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
,
255 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 3);
258 jsbytecode
*next
= CG_NEXT(cg
);
259 next
[0] = (jsbytecode
)op
;
262 CG_NEXT(cg
) = next
+ 3;
263 UpdateDepth(cx
, cg
, offset
);
269 js_EmitN(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, size_t extra
)
271 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
272 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, length
);
275 jsbytecode
*next
= CG_NEXT(cg
);
276 *next
= (jsbytecode
)op
;
277 memset(next
+ 1, 0, BYTECODE_SIZE(extra
));
278 CG_NEXT(cg
) = next
+ length
;
281 * Don't UpdateDepth if op's use-count comes from the immediate
282 * operand yet to be stored in the extra bytes after op.
284 if (js_CodeSpec
[op
].nuses
>= 0)
285 UpdateDepth(cx
, cg
, offset
);
290 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
291 const char js_with_statement_str
[] = "with statement";
292 const char js_finally_block_str
[] = "finally block";
293 const char js_script_str
[] = "script";
295 static const char *statementName
[] = {
296 "label statement", /* LABEL */
297 "if statement", /* IF */
298 "else statement", /* ELSE */
299 "destructuring body", /* BODY */
300 "switch statement", /* SWITCH */
302 js_with_statement_str
, /* WITH */
303 "catch block", /* CATCH */
304 "try block", /* TRY */
305 js_finally_block_str
, /* FINALLY */
306 js_finally_block_str
, /* SUBROUTINE */
307 "do loop", /* DO_LOOP */
308 "for loop", /* FOR_LOOP */
309 "for/in loop", /* FOR_IN_LOOP */
310 "while loop", /* WHILE_LOOP */
313 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
316 StatementName(JSCodeGenerator
*cg
)
319 return js_script_str
;
320 return statementName
[cg
->topStmt
->type
];
324 ReportStatementTooLarge(JSContext
*cx
, JSCodeGenerator
*cg
)
326 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_NEED_DIET
,
331 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
332 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
333 into unconditional (gotos and gosubs), and conditional jumps or branches
334 (which pop a value, test it, and jump depending on its value). Most jumps
335 have just one immediate operand, a signed offset from the jump opcode's pc
336 to the target bytecode. The lookup and table switch opcodes may contain
339 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
340 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
341 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
342 the extended form of the JSOP_OR branch opcode). The unextended or short
343 formats have 16-bit signed immediate offset operands, the extended or long
344 formats have 32-bit signed immediates. The span-dependency problem consists
345 of selecting as few long instructions as possible, or about as few -- since
346 jumps can span other jumps, extending one jump may cause another to need to
349 Most JS scripts are short, so need no extended jumps. We optimize for this
350 case by generating short jumps until we know a long jump is needed. After
351 that point, we keep generating short jumps, but each jump's 16-bit immediate
352 offset operand is actually an unsigned index into cg->spanDeps, an array of
353 JSSpanDep structs. Each struct tells the top offset in the script of the
354 opcode, the "before" offset of the jump (which will be the same as top for
355 simplex jumps, but which will index further into the bytecode array for a
356 non-initial jump offset in a lookup or table switch), the after "offset"
357 adjusted during span-dependent instruction selection (initially the same
358 value as the "before" offset), and the jump target (more below).
360 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
361 ensure that all bytecode generated so far can be inspected to discover where
362 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
363 that we generate span-dependency records sorted by their offsets, so we can
364 binary-search when trying to find a JSSpanDep for a given bytecode offset,
365 or the nearest JSSpanDep at or above a given pc.
367 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
368 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
369 tells us that we need to binary-search for the cg->spanDeps entry by the
370 jump opcode's bytecode offset (sd->before).
372 Jump targets need to be maintained in a data structure that lets us look
373 up an already-known target by its address (jumps may have a common target),
374 and that also lets us update the addresses (script-relative, a.k.a. absolute
375 offsets) of targets that come after a jump target (for when a jump below
376 that target needs to be extended). We use an AVL tree, implemented using
377 recursion, but with some tricky optimizations to its height-balancing code
378 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
380 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
381 positive sign, even though they link "backward" (i.e., toward lower bytecode
382 address). We don't want to waste space and search time in the AVL tree for
383 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
384 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
385 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
386 target, or is still awaiting backpatching.
388 Note that backpatch chains would present a problem for BuildSpanDepTable,
389 which inspects bytecode to build cg->spanDeps on demand, when the first
390 short jump offset overflows. To solve this temporary problem, we emit a
391 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
392 nuses/ndefs counts help keep the stack balanced, but whose opcode format
393 distinguishes its backpatch delta immediate operand from a normal jump
397 BalanceJumpTargets(JSJumpTarget
**jtp
)
399 JSJumpTarget
*jt
, *jt2
, *root
;
400 int dir
, otherDir
, heightChanged
;
404 JS_ASSERT(jt
->balance
!= 0);
406 if (jt
->balance
< -1) {
408 doubleRotate
= (jt
->kids
[JT_LEFT
]->balance
> 0);
409 } else if (jt
->balance
> 1) {
411 doubleRotate
= (jt
->kids
[JT_RIGHT
]->balance
< 0);
416 otherDir
= JT_OTHER_DIR(dir
);
418 jt2
= jt
->kids
[otherDir
];
419 *jtp
= root
= jt2
->kids
[dir
];
421 jt
->kids
[otherDir
] = root
->kids
[dir
];
422 root
->kids
[dir
] = jt
;
424 jt2
->kids
[dir
] = root
->kids
[otherDir
];
425 root
->kids
[otherDir
] = jt2
;
428 root
->kids
[JT_LEFT
]->balance
= -JS_MAX(root
->balance
, 0);
429 root
->kids
[JT_RIGHT
]->balance
= -JS_MIN(root
->balance
, 0);
432 *jtp
= root
= jt
->kids
[otherDir
];
433 jt
->kids
[otherDir
] = root
->kids
[dir
];
434 root
->kids
[dir
] = jt
;
436 heightChanged
= (root
->balance
!= 0);
437 jt
->balance
= -((dir
== JT_LEFT
) ? --root
->balance
: ++root
->balance
);
440 return heightChanged
;
443 typedef struct AddJumpTargetArgs
{
451 AddJumpTarget(AddJumpTargetArgs
*args
, JSJumpTarget
**jtp
)
458 JSCodeGenerator
*cg
= args
->cg
;
462 cg
->jtFreeList
= jt
->kids
[JT_LEFT
];
464 args
->cx
->tempPool
.allocateCast
<JSJumpTarget
*>(jt
, sizeof *jt
);
466 js_ReportOutOfScriptQuota(args
->cx
);
470 jt
->offset
= args
->offset
;
472 jt
->kids
[JT_LEFT
] = jt
->kids
[JT_RIGHT
] = NULL
;
473 cg
->numJumpTargets
++;
479 if (jt
->offset
== args
->offset
) {
484 if (args
->offset
< jt
->offset
)
485 balanceDelta
= -AddJumpTarget(args
, &jt
->kids
[JT_LEFT
]);
487 balanceDelta
= AddJumpTarget(args
, &jt
->kids
[JT_RIGHT
]);
491 jt
->balance
+= balanceDelta
;
492 return (balanceDelta
&& jt
->balance
)
493 ? 1 - BalanceJumpTargets(jtp
)
498 static int AVLCheck(JSJumpTarget
*jt
)
503 JS_ASSERT(-1 <= jt
->balance
&& jt
->balance
<= 1);
504 lh
= AVLCheck(jt
->kids
[JT_LEFT
]);
505 rh
= AVLCheck(jt
->kids
[JT_RIGHT
]);
506 JS_ASSERT(jt
->balance
== rh
- lh
);
507 return 1 + JS_MAX(lh
, rh
);
512 SetSpanDepTarget(JSContext
*cx
, JSCodeGenerator
*cg
, JSSpanDep
*sd
,
515 AddJumpTargetArgs args
;
517 if (off
< JUMPX_OFFSET_MIN
|| JUMPX_OFFSET_MAX
< off
) {
518 ReportStatementTooLarge(cx
, cg
);
524 args
.offset
= sd
->top
+ off
;
526 AddJumpTarget(&args
, &cg
->jumpTargets
);
531 AVLCheck(cg
->jumpTargets
);
534 SD_SET_TARGET(sd
, args
.node
);
538 #define SPANDEPS_MIN 256
539 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
540 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
543 AddSpanDep(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
, jsbytecode
*pc2
,
547 JSSpanDep
*sdbase
, *sd
;
550 index
= cg
->numSpanDeps
;
551 if (index
+ 1 == 0) {
552 ReportStatementTooLarge(cx
, cg
);
556 if ((index
& (index
- 1)) == 0 &&
557 (!(sdbase
= cg
->spanDeps
) || index
>= SPANDEPS_MIN
)) {
558 size
= sdbase
? SPANDEPS_SIZE(index
) : SPANDEPS_SIZE_MIN
/ 2;
559 sdbase
= (JSSpanDep
*) cx
->realloc(sdbase
, size
+ size
);
562 cg
->spanDeps
= sdbase
;
565 cg
->numSpanDeps
= index
+ 1;
566 sd
= cg
->spanDeps
+ index
;
567 sd
->top
= pc
- CG_BASE(cg
);
568 sd
->offset
= sd
->before
= pc2
- CG_BASE(cg
);
570 if (js_CodeSpec
[*pc
].format
& JOF_BACKPATCH
) {
571 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
573 JS_ASSERT(off
>= 1 + JUMP_OFFSET_LEN
);
574 if (off
> BPDELTA_MAX
) {
575 ReportStatementTooLarge(cx
, cg
);
579 SD_SET_BPDELTA(sd
, off
);
580 } else if (off
== 0) {
581 /* Jump offset will be patched directly, without backpatch chaining. */
582 SD_SET_TARGET(sd
, 0);
584 /* The jump offset in off is non-zero, therefore it's already known. */
585 if (!SetSpanDepTarget(cx
, cg
, sd
, off
))
589 if (index
> SPANDEP_INDEX_MAX
)
590 index
= SPANDEP_INDEX_HUGE
;
591 SET_SPANDEP_INDEX(pc2
, index
);
596 AddSwitchSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
)
602 uintN njumps
, indexlen
;
605 JS_ASSERT(op
== JSOP_TABLESWITCH
|| op
== JSOP_LOOKUPSWITCH
);
607 off
= GET_JUMP_OFFSET(pc2
);
608 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
610 pc2
+= JUMP_OFFSET_LEN
;
611 if (op
== JSOP_TABLESWITCH
) {
612 low
= GET_JUMP_OFFSET(pc2
);
613 pc2
+= JUMP_OFFSET_LEN
;
614 high
= GET_JUMP_OFFSET(pc2
);
615 pc2
+= JUMP_OFFSET_LEN
;
616 njumps
= (uintN
) (high
- low
+ 1);
619 njumps
= GET_UINT16(pc2
);
621 indexlen
= INDEX_LEN
;
626 off
= GET_JUMP_OFFSET(pc2
);
627 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
629 pc2
+= JUMP_OFFSET_LEN
;
635 BuildSpanDepTable(JSContext
*cx
, JSCodeGenerator
*cg
)
637 jsbytecode
*pc
, *end
;
639 const JSCodeSpec
*cs
;
642 pc
= CG_BASE(cg
) + cg
->spanDepTodo
;
647 cs
= &js_CodeSpec
[op
];
649 switch (JOF_TYPE(cs
->format
)) {
650 case JOF_TABLESWITCH
:
651 case JOF_LOOKUPSWITCH
:
652 pc
= AddSwitchSpanDeps(cx
, cg
, pc
);
658 off
= GET_JUMP_OFFSET(pc
);
659 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
672 GetSpanDep(JSCodeGenerator
*cg
, jsbytecode
*pc
)
679 index
= GET_SPANDEP_INDEX(pc
);
680 if (index
!= SPANDEP_INDEX_HUGE
)
681 return cg
->spanDeps
+ index
;
683 offset
= pc
- CG_BASE(cg
);
685 hi
= cg
->numSpanDeps
- 1;
688 sd
= cg
->spanDeps
+ mid
;
689 if (sd
->before
== offset
)
691 if (sd
->before
< offset
)
702 SetBackPatchDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
707 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
708 if (!cg
->spanDeps
&& delta
< JUMP_OFFSET_MAX
) {
709 SET_JUMP_OFFSET(pc
, delta
);
713 if (delta
> BPDELTA_MAX
) {
714 ReportStatementTooLarge(cx
, cg
);
718 if (!cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
721 sd
= GetSpanDep(cg
, pc
);
722 JS_ASSERT(SD_GET_BPDELTA(sd
) == 0);
723 SD_SET_BPDELTA(sd
, delta
);
728 UpdateJumpTargets(JSJumpTarget
*jt
, ptrdiff_t pivot
, ptrdiff_t delta
)
730 if (jt
->offset
> pivot
) {
732 if (jt
->kids
[JT_LEFT
])
733 UpdateJumpTargets(jt
->kids
[JT_LEFT
], pivot
, delta
);
735 if (jt
->kids
[JT_RIGHT
])
736 UpdateJumpTargets(jt
->kids
[JT_RIGHT
], pivot
, delta
);
740 FindNearestSpanDep(JSCodeGenerator
*cg
, ptrdiff_t offset
, int lo
,
744 JSSpanDep
*sdbase
, *sd
;
746 num
= cg
->numSpanDeps
;
749 sdbase
= cg
->spanDeps
;
753 if (sd
->before
== offset
)
755 if (sd
->before
< offset
)
763 JS_ASSERT(sd
->before
>= offset
&& (lo
== 0 || sd
[-1].before
< offset
));
768 FreeJumpTargets(JSCodeGenerator
*cg
, JSJumpTarget
*jt
)
770 if (jt
->kids
[JT_LEFT
])
771 FreeJumpTargets(cg
, jt
->kids
[JT_LEFT
]);
772 if (jt
->kids
[JT_RIGHT
])
773 FreeJumpTargets(cg
, jt
->kids
[JT_RIGHT
]);
774 jt
->kids
[JT_LEFT
] = cg
->jtFreeList
;
779 OptimizeSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
)
781 jsbytecode
*pc
, *oldpc
, *base
, *limit
, *next
;
782 JSSpanDep
*sd
, *sd2
, *sdbase
, *sdlimit
, *sdtop
, guard
;
783 ptrdiff_t offset
, growth
, delta
, top
, pivot
, span
, length
, target
;
788 jssrcnote
*sn
, *snlimit
;
790 uintN i
, n
, noteIndex
;
797 sdbase
= cg
->spanDeps
;
798 sdlimit
= sdbase
+ cg
->numSpanDeps
;
799 offset
= CG_OFFSET(cg
);
814 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
815 JS_ASSERT(JT_HAS_TAG(sd
->target
));
818 if (sd
->top
!= top
) {
821 JS_ASSERT(top
== sd
->before
);
825 type
= JOF_OPTYPE(op
);
826 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
828 * We already extended all the jump offset operands for
829 * the opcode at sd->top. Jumps and branches have only
830 * one jump offset operand, but switches have many, all
831 * of which are adjacent in cg->spanDeps.
836 JS_ASSERT(type
== JOF_JUMP
||
837 type
== JOF_TABLESWITCH
||
838 type
== JOF_LOOKUPSWITCH
);
841 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
842 span
= SD_SPAN(sd
, pivot
);
843 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
844 ptrdiff_t deltaFromTop
= 0;
849 case JSOP_GOTO
: op
= JSOP_GOTOX
; break;
850 case JSOP_IFEQ
: op
= JSOP_IFEQX
; break;
851 case JSOP_IFNE
: op
= JSOP_IFNEX
; break;
852 case JSOP_OR
: op
= JSOP_ORX
; break;
853 case JSOP_AND
: op
= JSOP_ANDX
; break;
854 case JSOP_GOSUB
: op
= JSOP_GOSUBX
; break;
855 case JSOP_CASE
: op
= JSOP_CASEX
; break;
856 case JSOP_DEFAULT
: op
= JSOP_DEFAULTX
; break;
857 case JSOP_TABLESWITCH
: op
= JSOP_TABLESWITCHX
; break;
858 case JSOP_LOOKUPSWITCH
: op
= JSOP_LOOKUPSWITCHX
; break;
860 ReportStatementTooLarge(cx
, cg
);
863 *pc
= (jsbytecode
) op
;
865 for (sd2
= sdtop
; sd2
< sdlimit
&& sd2
->top
== top
; sd2
++) {
868 * sd2->offset already includes delta as it stood
869 * before we entered this loop, but it must also
870 * include the delta relative to top due to all the
871 * extended jump offset immediates for the opcode
872 * starting at top, which we extend in this loop.
874 * If there is only one extended jump offset, then
875 * sd2->offset won't change and this for loop will
878 sd2
->offset
+= deltaFromTop
;
879 deltaFromTop
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
882 * sd2 comes after sd, and won't be revisited by
883 * the outer for loop, so we have to increase its
884 * offset by delta, not merely by deltaFromTop.
886 sd2
->offset
+= delta
;
889 delta
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
890 UpdateJumpTargets(cg
->jumpTargets
, sd2
->offset
,
891 JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
903 TokenStream
*ts
= &cg
->parser
->tokenStream
;
905 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
906 ts
->filename
? ts
->filename
: "stdin", cg
->firstLine
,
907 growth
/ (JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
), cg
->numSpanDeps
,
908 passes
, offset
+ growth
, offset
, growth
);
912 * Ensure that we have room for the extended jumps, but don't round up
913 * to a power of two -- we're done generating code, so we cut to fit.
915 limit
= CG_LIMIT(cg
);
916 length
= offset
+ growth
;
917 next
= base
+ length
;
919 JS_ASSERT(length
> BYTECODE_CHUNK
);
920 size
= BYTECODE_SIZE(limit
- base
);
921 incr
= BYTECODE_SIZE(length
) - size
;
922 cg
->codePool
->growCast
<jsbytecode
*>(base
, size
, incr
);
924 js_ReportOutOfScriptQuota(cx
);
928 CG_LIMIT(cg
) = next
= base
+ length
;
933 * Set up a fake span dependency record to guard the end of the code
934 * being generated. This guard record is returned as a fencepost by
935 * FindNearestSpanDep if there is no real spandep at or above a given
936 * unextended code offset.
939 guard
.offset
= offset
+ growth
;
940 guard
.before
= offset
;
945 * Now work backwards through the span dependencies, copying chunks of
946 * bytecode between each extended jump toward the end of the grown code
947 * space, and restoring immediate offset operands for all jump bytecodes.
948 * The first chunk of bytecodes, starting at base and ending at the first
949 * extended jump offset (NB: this chunk includes the operation bytecode
950 * just before that immediate jump offset), doesn't need to be copied.
952 JS_ASSERT(sd
== sdlimit
);
954 while (--sd
>= sdbase
) {
955 if (sd
->top
!= top
) {
957 op
= (JSOp
) base
[top
];
958 type
= JOF_OPTYPE(op
);
960 for (sd2
= sd
- 1; sd2
>= sdbase
&& sd2
->top
== top
; sd2
--)
964 JS_ASSERT(top
== sd2
->before
);
967 oldpc
= base
+ sd
->before
;
968 span
= SD_SPAN(sd
, pivot
);
971 * If this jump didn't need to be extended, restore its span immediate
972 * offset operand now, overwriting the index of sd within cg->spanDeps
973 * that was stored temporarily after *pc when BuildSpanDepTable ran.
975 * Note that span might fit in 16 bits even for an extended jump op,
976 * if the op has multiple span operands, not all of which overflowed
977 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
978 * range for a short jump, but others are not).
980 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
981 JS_ASSERT(JUMP_OFFSET_MIN
<= span
&& span
<= JUMP_OFFSET_MAX
);
982 SET_JUMP_OFFSET(oldpc
, span
);
987 * Set up parameters needed to copy the next run of bytecode starting
988 * at offset (which is a cursor into the unextended, original bytecode
989 * vector), down to sd->before (a cursor of the same scale as offset,
990 * it's the index of the original jump pc). Reuse delta to count the
991 * nominal number of bytes to copy.
993 pc
= base
+ sd
->offset
;
994 delta
= offset
- sd
->before
;
995 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
998 * Don't bother copying the jump offset we're about to reset, but do
999 * copy the bytecode at oldpc (which comes just before its immediate
1000 * jump offset operand), on the next iteration through the loop, by
1001 * including it in offset's new value.
1003 offset
= sd
->before
+ 1;
1004 size
= BYTECODE_SIZE(delta
- (1 + JUMP_OFFSET_LEN
));
1006 memmove(pc
+ 1 + JUMPX_OFFSET_LEN
,
1007 oldpc
+ 1 + JUMP_OFFSET_LEN
,
1011 SET_JUMPX_OFFSET(pc
, span
);
1016 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1017 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1018 * at each sd that moved. The future may bring different offset sizes
1019 * for span-dependent instruction operands. However, we fix only main
1020 * notes here, not prolog notes -- we know that prolog opcodes are not
1021 * span-dependent, and aren't likely ever to be.
1023 offset
= growth
= 0;
1025 for (sn
= cg
->main
.notes
, snlimit
= sn
+ cg
->main
.noteCount
;
1029 * Recall that the offset of a given note includes its delta, and
1030 * tells the offset of the annotated bytecode from the main entry
1031 * point of the script.
1033 offset
+= SN_DELTA(sn
);
1034 while (sd
< sdlimit
&& sd
->before
< offset
) {
1036 * To compute the delta to add to sn, we need to look at the
1037 * spandep after sd, whose offset - (before + growth) tells by
1038 * how many bytes sd's instruction grew.
1043 delta
= sd2
->offset
- (sd2
->before
+ growth
);
1045 JS_ASSERT(delta
== JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
1046 sn
= js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
);
1049 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1056 * If sn has span-dependent offset operands, check whether each
1057 * covers further span-dependencies, and increase those operands
1058 * accordingly. Some source notes measure offset not from the
1059 * annotated pc, but from that pc plus some small bias. NB: we
1060 * assume that spec->offsetBias can't itself span span-dependent
1063 spec
= &js_SrcNoteSpec
[SN_TYPE(sn
)];
1064 if (spec
->isSpanDep
) {
1065 pivot
= offset
+ spec
->offsetBias
;
1067 for (i
= 0; i
< n
; i
++) {
1068 span
= js_GetSrcNoteOffset(sn
, i
);
1071 target
= pivot
+ span
* spec
->isSpanDep
;
1072 sd2
= FindNearestSpanDep(cg
, target
,
1079 * Increase target by sd2's before-vs-after offset delta,
1080 * which is absolute (i.e., relative to start of script,
1081 * as is target). Recompute the span by subtracting its
1082 * adjusted pivot from target.
1084 target
+= sd2
->offset
- sd2
->before
;
1085 span
= target
- (pivot
+ growth
);
1086 span
*= spec
->isSpanDep
;
1087 noteIndex
= sn
- cg
->main
.notes
;
1088 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, i
, span
))
1090 sn
= cg
->main
.notes
+ noteIndex
;
1091 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1095 cg
->main
.lastNoteOffset
+= growth
;
1098 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1099 * not clear how we can beat that).
1101 for (tryNode
= cg
->lastTryNode
; tryNode
; tryNode
= tryNode
->prev
) {
1103 * First, look for the nearest span dependency at/above tn->start.
1104 * There may not be any such spandep, in which case the guard will
1107 offset
= tryNode
->note
.start
;
1108 sd
= FindNearestSpanDep(cg
, offset
, 0, &guard
);
1109 delta
= sd
->offset
- sd
->before
;
1110 tryNode
->note
.start
= offset
+ delta
;
1113 * Next, find the nearest spandep at/above tn->start + tn->length.
1114 * Use its delta minus tn->start's delta to increase tn->length.
1116 length
= tryNode
->note
.length
;
1117 sd2
= FindNearestSpanDep(cg
, offset
+ length
, sd
- sdbase
, &guard
);
1119 tryNode
->note
.length
=
1120 length
+ sd2
->offset
- sd2
->before
- delta
;
1125 #ifdef DEBUG_brendan
1129 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
1130 offset
= sd
->offset
;
1132 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1133 if (sd
->top
!= top
) {
1134 JS_ASSERT(top
== -1 ||
1135 !JOF_TYPE_IS_EXTENDED_JUMP(type
) ||
1139 JS_ASSERT(top
== sd
->before
);
1140 op
= (JSOp
) base
[offset
];
1141 type
= JOF_OPTYPE(op
);
1142 JS_ASSERT(type
== JOF_JUMP
||
1143 type
== JOF_JUMPX
||
1144 type
== JOF_TABLESWITCH
||
1145 type
== JOF_TABLESWITCHX
||
1146 type
== JOF_LOOKUPSWITCH
||
1147 type
== JOF_LOOKUPSWITCHX
);
1152 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
1153 span
= GET_JUMPX_OFFSET(pc
);
1154 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
1157 JS_ASSERT(type
== JOF_TABLESWITCHX
||
1158 type
== JOF_LOOKUPSWITCHX
);
1161 span
= GET_JUMP_OFFSET(pc
);
1163 JS_ASSERT(SD_SPAN(sd
, pivot
) == span
);
1165 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type
) || bigspans
!= 0);
1170 * Reset so we optimize at most once -- cg may be used for further code
1171 * generation of successive, independent, top-level statements. No jump
1172 * can span top-level statements, because JS lacks goto.
1174 size
= SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg
->numSpanDeps
)));
1175 cx
->free(cg
->spanDeps
);
1176 cg
->spanDeps
= NULL
;
1177 FreeJumpTargets(cg
, cg
->jumpTargets
);
1178 cg
->jumpTargets
= NULL
;
1179 cg
->numSpanDeps
= cg
->numJumpTargets
= 0;
1180 cg
->spanDepTodo
= CG_OFFSET(cg
);
1185 EmitJump(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t off
)
1191 extend
= off
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< off
;
1192 if (extend
&& !cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
1195 jmp
= js_Emit3(cx
, cg
, op
, JUMP_OFFSET_HI(off
), JUMP_OFFSET_LO(off
));
1196 if (jmp
>= 0 && (extend
|| cg
->spanDeps
)) {
1197 pc
= CG_CODE(cg
, jmp
);
1198 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
1205 GetJumpOffset(JSCodeGenerator
*cg
, jsbytecode
*pc
)
1212 return GET_JUMP_OFFSET(pc
);
1214 sd
= GetSpanDep(cg
, pc
);
1216 if (!JT_HAS_TAG(jt
))
1217 return JT_TO_BPDELTA(jt
);
1220 while (--sd
>= cg
->spanDeps
&& sd
->top
== top
)
1223 return JT_CLR_TAG(jt
)->offset
- sd
->offset
;
1227 js_SetJumpOffset(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
1230 if (!cg
->spanDeps
) {
1231 if (JUMP_OFFSET_MIN
<= off
&& off
<= JUMP_OFFSET_MAX
) {
1232 SET_JUMP_OFFSET(pc
, off
);
1236 if (!BuildSpanDepTable(cx
, cg
))
1240 return SetSpanDepTarget(cx
, cg
, GetSpanDep(cg
, pc
), off
);
1244 JSTreeContext::inStatement(JSStmtType type
)
1246 for (JSStmtInfo
*stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
1247 if (stmt
->type
== type
)
1254 JSTreeContext::ensureSharpSlots()
1256 #if JS_HAS_SHARP_VARS
1257 JS_STATIC_ASSERT(SHARP_NSLOTS
== 2);
1259 if (sharpSlotBase
>= 0) {
1260 JS_ASSERT(flags
& TCF_HAS_SHARPS
);
1264 JS_ASSERT(!(flags
& TCF_HAS_SHARPS
));
1266 JSContext
*cx
= parser
->context
;
1267 JSAtom
*sharpArrayAtom
= js_Atomize(cx
, "#array", 6, 0);
1268 JSAtom
*sharpDepthAtom
= js_Atomize(cx
, "#depth", 6, 0);
1269 if (!sharpArrayAtom
|| !sharpDepthAtom
)
1272 sharpSlotBase
= fun
->u
.i
.nvars
;
1273 if (!js_AddLocal(cx
, fun
, sharpArrayAtom
, JSLOCAL_VAR
))
1275 if (!js_AddLocal(cx
, fun
, sharpDepthAtom
, JSLOCAL_VAR
))
1279 * Compiler::compileScript will rebase immediate operands indexing
1280 * the sharp slots to come at the end of the global script's |nfixed|
1281 * slots storage, after gvars and regexps.
1285 flags
|= TCF_HAS_SHARPS
;
1291 JSTreeContext::skipSpansGenerator(unsigned skip
)
1293 JSTreeContext
*tc
= this;
1294 for (unsigned i
= 0; i
< skip
; ++i
, tc
= tc
->parent
) {
1297 if (tc
->flags
& TCF_FUN_IS_GENERATOR
)
1304 js_PushStatement(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSStmtType type
,
1309 stmt
->blockid
= tc
->blockid();
1310 SET_STATEMENT_TOP(stmt
, top
);
1312 JS_ASSERT(!stmt
->blockObj
);
1313 stmt
->down
= tc
->topStmt
;
1315 if (STMT_LINKS_SCOPE(stmt
)) {
1316 stmt
->downScope
= tc
->topScopeStmt
;
1317 tc
->topScopeStmt
= stmt
;
1319 stmt
->downScope
= NULL
;
1324 js_PushBlockScope(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSObject
*blockObj
,
1327 js_PushStatement(tc
, stmt
, STMT_BLOCK
, top
);
1328 stmt
->flags
|= SIF_SCOPE
;
1329 blockObj
->setParent(tc
->blockChain
);
1330 stmt
->downScope
= tc
->topScopeStmt
;
1331 tc
->topScopeStmt
= stmt
;
1332 tc
->blockChain
= blockObj
;
1333 stmt
->blockObj
= blockObj
;
1337 * Emit a backpatch op with offset pointing to the previous jump of this type,
1338 * so that we can walk back up the chain fixing up the op and jump offset.
1341 EmitBackPatchOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t *lastp
)
1343 ptrdiff_t offset
, delta
;
1345 offset
= CG_OFFSET(cg
);
1346 delta
= offset
- *lastp
;
1348 JS_ASSERT(delta
> 0);
1349 return EmitJump(cx
, cg
, op
, delta
);
1353 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1354 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1355 * NB: We use cx and cg from our caller's lexical environment, and return
1358 #define EMIT_UINT16_IMM_OP(op, i) \
1360 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1364 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1366 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1369 jsbytecode *pc_ = CG_CODE(cg, off_); \
1370 SET_UINT16(pc_, i); \
1371 pc_ += UINT16_LEN; \
1372 SET_UINT16(pc_, j); \
1376 FlushPops(JSContext
*cx
, JSCodeGenerator
*cg
, intN
*npops
)
1378 JS_ASSERT(*npops
!= 0);
1379 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1381 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
1387 * Emit additional bytecode(s) for non-local jumps.
1390 EmitNonLocalJumpFixup(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
)
1396 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1397 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1398 * end of a with statement, so we save cg->stackDepth here and restore it
1399 * just before a successful return.
1401 depth
= cg
->stackDepth
;
1404 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1406 for (stmt
= cg
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
1407 switch (stmt
->type
) {
1410 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1412 if (EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(*stmt
)) < 0)
1417 /* There's a With object on the stack that we need to pop. */
1419 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1421 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
1425 case STMT_FOR_IN_LOOP
:
1427 * The iterator and the object being iterated need to be popped.
1430 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1432 if (js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0)
1436 case STMT_SUBROUTINE
:
1438 * There's a [exception or hole, retsub pc-index] pair on the
1439 * stack that we need to pop.
1447 if (stmt
->flags
& SIF_SCOPE
) {
1450 /* There is a Block object with locals on the stack to pop. */
1452 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1454 i
= OBJ_BLOCK_COUNT(cx
, stmt
->blockObj
);
1455 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, i
);
1460 cg
->stackDepth
= depth
;
1467 EmitGoto(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
,
1468 ptrdiff_t *lastp
, JSAtomListElement
*label
, JSSrcNoteType noteType
)
1472 if (!EmitNonLocalJumpFixup(cx
, cg
, toStmt
))
1476 index
= js_NewSrcNote2(cx
, cg
, noteType
, (ptrdiff_t) ALE_INDEX(label
));
1477 else if (noteType
!= SRC_NULL
)
1478 index
= js_NewSrcNote(cx
, cg
, noteType
);
1484 return EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, lastp
);
1488 BackPatch(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t last
,
1489 jsbytecode
*target
, jsbytecode op
)
1491 jsbytecode
*pc
, *stop
;
1492 ptrdiff_t delta
, span
;
1494 pc
= CG_CODE(cg
, last
);
1495 stop
= CG_CODE(cg
, -1);
1496 while (pc
!= stop
) {
1497 delta
= GetJumpOffset(cg
, pc
);
1499 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, span
);
1502 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1503 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1504 * and need to see the JSOP_BACKPATCH* op at *pc).
1513 js_PopStatement(JSTreeContext
*tc
)
1518 tc
->topStmt
= stmt
->down
;
1519 if (STMT_LINKS_SCOPE(stmt
)) {
1520 tc
->topScopeStmt
= stmt
->downScope
;
1521 if (stmt
->flags
& SIF_SCOPE
) {
1522 tc
->blockChain
= stmt
->blockObj
->getParent();
1523 JS_SCOPE_DEPTH_METERING(--tc
->scopeDepth
);
1529 js_PopStatementCG(JSContext
*cx
, JSCodeGenerator
*cg
)
1534 if (!STMT_IS_TRYING(stmt
) &&
1535 (!BackPatch(cx
, cg
, stmt
->breaks
, CG_NEXT(cg
), JSOP_GOTO
) ||
1536 !BackPatch(cx
, cg
, stmt
->continues
, CG_CODE(cg
, stmt
->update
),
1540 js_PopStatement(cg
);
1545 js_DefineCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1553 /* XXX just do numbers for now */
1554 if (pn
->pn_type
== TOK_NUMBER
) {
1556 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
1557 v
= INT_TO_JSVAL(ival
);
1560 * We atomize double to root a jsdouble instance that we wrap as
1561 * jsval and store in cg->constList. This works because atoms are
1562 * protected from GC during compilation.
1564 valueAtom
= js_AtomizeDouble(cx
, dval
);
1567 v
= ATOM_KEY(valueAtom
);
1569 if (!cg
->constMap
.put(atom
, v
))
1576 js_LexicalLookup(JSTreeContext
*tc
, JSAtom
*atom
, jsint
*slotp
, JSStmtInfo
*stmt
)
1580 JSScopeProperty
*sprop
;
1583 stmt
= tc
->topScopeStmt
;
1584 for (; stmt
; stmt
= stmt
->downScope
) {
1585 if (stmt
->type
== STMT_WITH
)
1588 /* Skip "maybe scope" statements that don't contain let bindings. */
1589 if (!(stmt
->flags
& SIF_SCOPE
))
1592 obj
= stmt
->blockObj
;
1593 JS_ASSERT(obj
->getClass() == &js_BlockClass
);
1594 scope
= obj
->scope();
1595 sprop
= scope
->lookup(ATOM_TO_JSID(atom
));
1597 JS_ASSERT(sprop
->hasShortID());
1600 JS_ASSERT(JSVAL_IS_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
1601 *slotp
= JSVAL_TO_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]) +
1614 * Check if the attributes describe a property holding a compile-time constant
1615 * or a permanent, read-only property without a getter.
1617 #define IS_CONSTANT_PROPERTY(attrs) \
1618 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1619 (JSPROP_READONLY | JSPROP_PERMANENT))
1622 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1623 * name defining a constant.
1626 LookupCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1631 JSObject
*obj
, *objbox
;
1636 * Chase down the cg stack, but only until we reach the outermost cg.
1637 * This enables propagating consts from top-level into switch cases in a
1638 * function compiled along with the top-level script.
1642 if (cg
->inFunction() && cg
->compileAndGo()) {
1643 /* XXX this will need revising if 'const' becomes block-scoped. */
1644 stmt
= js_LexicalLookup(cg
, atom
, NULL
);
1648 if (JSCodeGenerator::ConstMap::Ptr p
= cg
->constMap
.lookup(atom
)) {
1649 JS_ASSERT(p
->value
!= JSVAL_HOLE
);
1655 * Try looking in the variable object for a direct property that
1656 * is readonly and permanent. We know such a property can't be
1657 * shadowed by another property on obj's prototype chain, or a
1658 * with object or catch variable; nor can prop's value be changed,
1659 * nor can prop be deleted.
1661 if (cg
->inFunction()) {
1662 if (js_LookupLocal(cx
, cg
->fun
, atom
, NULL
) != JSLOCAL_NONE
)
1665 JS_ASSERT(cg
->compileAndGo());
1666 obj
= cg
->scopeChain
;
1667 ok
= obj
->lookupProperty(cx
, ATOM_TO_JSID(atom
), &objbox
, &prop
);
1670 if (objbox
== obj
) {
1672 * We're compiling code that will be executed immediately,
1673 * not re-executed against a different scope chain and/or
1674 * variable object. Therefore we can get constant values
1675 * from our variable object here.
1677 ok
= obj
->getAttributes(cx
, ATOM_TO_JSID(atom
), prop
, &attrs
);
1678 if (ok
&& IS_CONSTANT_PROPERTY(attrs
)) {
1679 ok
= obj
->getProperty(cx
, ATOM_TO_JSID(atom
), vp
);
1680 JS_ASSERT_IF(ok
, *vp
!= JSVAL_HOLE
);
1684 objbox
->dropProperty(cx
, prop
);
1691 } while ((cg
= (JSCodeGenerator
*) cg
->parent
) != NULL
);
1696 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1697 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1698 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1699 * after the main bytecode sequence.
1702 EmitBigIndexPrefix(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
1707 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1710 JS_STATIC_ASSERT(INDEX_LIMIT
<= JS_BIT(24));
1711 JS_STATIC_ASSERT(INDEX_LIMIT
>=
1712 (JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 2) << 16);
1714 if (index
< JS_BIT(16))
1716 indexBase
= index
>> 16;
1717 if (indexBase
<= JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 1) {
1718 if (js_Emit1(cx
, cg
, (JSOp
)(JSOP_INDEXBASE1
+ indexBase
- 1)) < 0)
1720 return JSOP_RESETBASE0
;
1723 if (index
>= INDEX_LIMIT
) {
1724 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1725 JSMSG_TOO_MANY_LITERALS
);
1729 if (js_Emit2(cx
, cg
, JSOP_INDEXBASE
, (JSOp
)indexBase
) < 0)
1731 return JSOP_RESETBASE
;
1735 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1736 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1737 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1738 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1739 * with single-byte JSOP_INDEXBASE[123] codes.
1741 * Such prefixing currently requires a suffix to restore the "zero segment"
1742 * register setting, but this could be optimized further.
1745 EmitIndexOp(JSContext
*cx
, JSOp op
, uintN index
, JSCodeGenerator
*cg
)
1749 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1750 if (bigSuffix
== JSOP_FALSE
)
1752 EMIT_UINT16_IMM_OP(op
, index
);
1753 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1757 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1758 * caller's lexical environment, and embedding a false return on error.
1760 #define EMIT_INDEX_OP(op, index) \
1762 if (!EmitIndexOp(cx, op, index, cg)) \
1767 EmitAtomOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
1769 JSAtomListElement
*ale
;
1771 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1772 if (op
== JSOP_GETPROP
&&
1773 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
1774 return js_Emit1(cx
, cg
, JSOP_LENGTH
) >= 0;
1776 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
1779 return EmitIndexOp(cx
, op
, ALE_INDEX(ale
), cg
);
1783 EmitObjectOp(JSContext
*cx
, JSObjectBox
*objbox
, JSOp op
,
1784 JSCodeGenerator
*cg
)
1786 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1787 return EmitIndexOp(cx
, op
, cg
->objectList
.index(objbox
), cg
);
1791 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1792 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1793 * other parts of the code there's no necessary relationship between the two.
1794 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1795 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1797 JS_STATIC_ASSERT(ARGNO_LEN
== 2);
1798 JS_STATIC_ASSERT(SLOTNO_LEN
== 2);
1801 EmitSlotIndexOp(JSContext
*cx
, JSOp op
, uintN slot
, uintN index
,
1802 JSCodeGenerator
*cg
)
1808 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SLOTATOM
||
1809 JOF_OPTYPE(op
) == JOF_SLOTOBJECT
);
1810 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1811 if (bigSuffix
== JSOP_FALSE
)
1814 /* Emit [op, slot, index]. */
1815 off
= js_EmitN(cx
, cg
, op
, 2 + INDEX_LEN
);
1818 pc
= CG_CODE(cg
, off
);
1819 SET_UINT16(pc
, slot
);
1821 SET_INDEX(pc
, index
);
1822 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1826 * Adjust the slot for a block local to account for the number of variables
1827 * that share the same index space with locals. Due to the incremental code
1828 * generation for top-level script, we do the adjustment via code patching in
1829 * Compiler::compileScript; see comments there.
1831 * The function returns -1 on failures.
1834 AdjustBlockSlot(JSContext
*cx
, JSCodeGenerator
*cg
, jsint slot
)
1836 JS_ASSERT((jsuint
) slot
< cg
->maxStackDepth
);
1837 if (cg
->inFunction()) {
1838 slot
+= cg
->fun
->u
.i
.nvars
;
1839 if ((uintN
) slot
>= SLOTNO_LIMIT
) {
1840 ReportCompileErrorNumber(cx
, CG_TS(cg
), NULL
, JSREPORT_ERROR
, JSMSG_TOO_MANY_LOCALS
);
1848 EmitEnterBlock(JSContext
*cx
, JSParseNode
*pn
, JSCodeGenerator
*cg
)
1850 JS_ASSERT(PN_TYPE(pn
) == TOK_LEXICALSCOPE
);
1851 if (!EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_ENTERBLOCK
, cg
))
1854 JSObject
*blockObj
= pn
->pn_objbox
->object
;
1855 jsint depth
= AdjustBlockSlot(cx
, cg
, OBJ_BLOCK_DEPTH(cx
, blockObj
));
1859 for (uintN slot
= JSSLOT_FREE(&js_BlockClass
),
1860 limit
= slot
+ OBJ_BLOCK_COUNT(cx
, blockObj
);
1861 slot
< limit
; slot
++) {
1862 jsval v
= blockObj
->getSlot(slot
);
1864 /* Beware the empty destructuring dummy. */
1865 if (JSVAL_IS_VOID(v
)) {
1866 JS_ASSERT(slot
+ 1 <= limit
);
1870 JSDefinition
*dn
= (JSDefinition
*) JSVAL_TO_PRIVATE(v
);
1871 JS_ASSERT(dn
->pn_defn
);
1872 JS_ASSERT(uintN(dn
->frameSlot() + depth
) < JS_BIT(16));
1873 dn
->pn_cookie
+= depth
;
1875 for (JSParseNode
*pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
1876 JS_ASSERT(pnu
->pn_lexdef
== dn
);
1877 JS_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
1878 JS_ASSERT(pnu
->pn_cookie
== FREE_UPVAR_COOKIE
);
1883 blockObj
->scope()->freeslot
= JSSLOT_FREE(&js_BlockClass
);
1884 return blockObj
->growSlots(cx
, JSSLOT_FREE(&js_BlockClass
));
1888 * When eval is called from a function, the eval code or function code it
1889 * compiles may reference upvars that live in the eval-calling function. The
1890 * eval-invoked compiler does not have explicit definitions for these upvars
1891 * and we do not attempt to create them a-priori (by inspecting the function's
1892 * args and vars) -- we could, but we'd take an avoidable penalty for each
1893 * function local not referenced by any upvar. Instead, we map such upvars
1894 * lazily, growing upvarMap.vector by powers of two.
1896 * This function knows that it is called with pn pointing to a PN_NAME-arity
1897 * node, and cg->parser->callerFrame having a non-null fun member, and the
1898 * static level of cg at least one greater than the eval-calling function's
1902 MakeUpvarForEval(JSParseNode
*pn
, JSCodeGenerator
*cg
)
1904 JSContext
*cx
= cg
->parser
->context
;
1905 JSFunction
*fun
= cg
->parser
->callerFrame
->fun
;
1906 uintN upvarLevel
= fun
->u
.i
.script
->staticLevel
;
1908 JSFunctionBox
*funbox
= cg
->funbox
;
1911 * Treat top-level function definitions as escaping (i.e., as funargs),
1912 * required since we compile each such top level function or statement
1913 * and throw away the AST, so we can't yet see all funarg uses of this
1914 * function being compiled (cg->funbox->object). See bug 493177.
1916 if (funbox
->level
== fun
->u
.i
.script
->staticLevel
+ 1U &&
1917 !(((JSFunction
*) funbox
->object
)->flags
& JSFUN_LAMBDA
)) {
1918 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
1919 ((JSFunction
*) funbox
->object
)->atom
);
1923 while (funbox
->level
>= upvarLevel
) {
1924 if (funbox
->node
->pn_dflags
& PND_FUNARG
)
1926 funbox
= funbox
->parent
;
1932 JSAtom
*atom
= pn
->pn_atom
;
1935 JSLocalKind localKind
= js_LookupLocal(cx
, fun
, atom
, &index
);
1936 if (localKind
== JSLOCAL_NONE
)
1939 JS_ASSERT(cg
->staticLevel
> upvarLevel
);
1940 if (cg
->staticLevel
>= JS_DISPLAY_SIZE
|| upvarLevel
>= JS_DISPLAY_SIZE
)
1943 JSAtomListElement
*ale
= cg
->upvarList
.lookup(atom
);
1945 if (cg
->inFunction() &&
1946 !js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
)) {
1950 ale
= cg
->upvarList
.add(cg
->parser
, atom
);
1953 JS_ASSERT(ALE_INDEX(ale
) == cg
->upvarList
.count
- 1);
1955 uint32
*vector
= cg
->upvarMap
.vector
;
1956 uint32 length
= cg
->upvarMap
.length
;
1958 JS_ASSERT(ALE_INDEX(ale
) <= length
);
1959 if (ALE_INDEX(ale
) == length
) {
1960 length
= 2 * JS_MAX(2, length
);
1961 vector
= (uint32
*) cx
->realloc(vector
, length
* sizeof *vector
);
1964 cg
->upvarMap
.vector
= vector
;
1965 cg
->upvarMap
.length
= length
;
1968 if (localKind
!= JSLOCAL_ARG
)
1969 index
+= fun
->nargs
;
1970 JS_ASSERT(index
< JS_BIT(16));
1972 uintN skip
= cg
->staticLevel
- upvarLevel
;
1973 vector
[ALE_INDEX(ale
)] = MAKE_UPVAR_COOKIE(skip
, index
);
1976 pn
->pn_op
= JSOP_GETUPVAR
;
1977 pn
->pn_cookie
= MAKE_UPVAR_COOKIE(cg
->staticLevel
, ALE_INDEX(ale
));
1978 pn
->pn_dflags
|= PND_BOUND
;
1983 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1984 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1985 * It returns false on error, true on success.
1987 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1988 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1989 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1990 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1991 * pn->pn_op was modified, if this function finds an argument or local variable
1992 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1993 * successful return.
1995 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1996 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
2000 BindNameToSlot(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
2006 JSDefinition::Kind dn_kind
;
2007 JSAtomListElement
*ale
;
2010 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
2012 /* Idempotency tests come first, since we may be called more than once. */
2013 if (pn
->pn_dflags
& PND_BOUND
)
2016 /* No cookie initialized for these two, they're pre-bound by definition. */
2017 JS_ASSERT(pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
);
2020 * The parser linked all uses (including forward references) to their
2021 * definitions, unless a with statement or direct eval intervened.
2024 JS_ASSERT(pn
->pn_cookie
== FREE_UPVAR_COOKIE
);
2026 JS_ASSERT(dn
->pn_defn
);
2027 if (pn
->isDeoptimized())
2029 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
2033 dn
= (JSDefinition
*) pn
;
2040 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2042 cookie
= dn
->pn_cookie
;
2043 dn_kind
= dn
->kind();
2046 * Turn attempts to mutate const-declared bindings into get ops (for
2047 * pre-increment and pre-decrement ops, our caller will have to emit
2048 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2050 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2051 * bindings visible to the compiler are permanent in JS unless the
2052 * declaration originates in eval code. We detect eval code by testing
2053 * cg->parser->callerFrame, which is set only by eval or a debugger
2056 * Note that this callerFrame non-null test must be qualified by testing
2057 * !cg->funbox to exclude function code nested in eval code, which is not
2058 * subject to the deletable binding exception.
2065 if (dn_kind
!= JSDefinition::UNKNOWN
) {
2066 if (cg
->parser
->callerFrame
&& !cg
->funbox
)
2067 JS_ASSERT(cg
->compileAndGo());
2069 pn
->pn_op
= JSOP_FALSE
;
2070 pn
->pn_dflags
|= PND_BOUND
;
2076 pn
->pn_op
= op
= JSOP_NAME
;
2079 if (cookie
== FREE_UPVAR_COOKIE
) {
2080 JSStackFrame
*caller
= cg
->parser
->callerFrame
;
2082 JS_ASSERT(cg
->compileAndGo());
2085 * Don't generate upvars on the left side of a for loop. See
2088 if (cg
->flags
& TCF_IN_FOR_INIT
)
2091 JS_ASSERT(caller
->script
);
2096 * Make sure the variable object used by the compiler to initialize
2097 * parent links matches the caller's varobj. Compile-n-go compiler-
2098 * created function objects have the top-level cg's scopeChain set
2099 * as their parent by Parser::newFunction.
2101 JSObject
*scopeobj
= cg
->inFunction()
2102 ? FUN_OBJECT(cg
->fun
)->getParent()
2104 if (scopeobj
!= cg
->parser
->callerVarObj
)
2108 * We are compiling eval or debug script inside a function frame
2109 * and the scope chain matches the function's variable object.
2110 * Optimize access to function's arguments and variable and the
2113 if (op
!= JSOP_NAME
)
2117 * Generator functions may be resumed from any call stack, which
2118 * defeats the display optimization to static link searching used
2119 * by JSOP_{GET,CALL}UPVAR.
2121 JSFunction
*fun
= cg
->parser
->callerFrame
->fun
;
2122 JS_ASSERT(cg
->staticLevel
>= fun
->u
.i
.script
->staticLevel
);
2123 unsigned skip
= cg
->staticLevel
- fun
->u
.i
.script
->staticLevel
;
2124 if (cg
->skipSpansGenerator(skip
))
2127 return MakeUpvarForEval(pn
, cg
);
2132 if (dn
->pn_dflags
& PND_GVAR
) {
2134 * If this is a global reference from within a function, leave pn_op as
2135 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2136 * only we could depend on the global frame's slots being valid for all
2137 * calls to the function, and if we could equate the atom index in the
2138 * function's atom map for every global name with its frame slot.
2140 if (cg
->inFunction())
2144 * We are optimizing global variables and there may be no pre-existing
2145 * global property named atom when this global script runs. If atom was
2146 * declared via const or var, optimize pn to access fp->vars using the
2147 * appropriate JSOP_*GVAR op.
2149 * FIXME: should be able to optimize global function access too.
2151 JS_ASSERT(dn_kind
== JSDefinition::VAR
|| dn_kind
== JSDefinition::CONST
);
2154 case JSOP_NAME
: op
= JSOP_GETGVAR
; break;
2155 case JSOP_SETNAME
: op
= JSOP_SETGVAR
; break;
2156 case JSOP_SETCONST
: /* NB: no change */ break;
2157 case JSOP_INCNAME
: op
= JSOP_INCGVAR
; break;
2158 case JSOP_NAMEINC
: op
= JSOP_GVARINC
; break;
2159 case JSOP_DECNAME
: op
= JSOP_DECGVAR
; break;
2160 case JSOP_NAMEDEC
: op
= JSOP_GVARDEC
; break;
2161 case JSOP_FORNAME
: /* NB: no change */ break;
2162 case JSOP_DELNAME
: /* NB: no change */ break;
2163 default: JS_NOT_REACHED("gvar");
2166 pn
->pn_cookie
= cookie
;
2167 pn
->pn_dflags
|= PND_BOUND
;
2171 uintN level
= UPVAR_FRAME_SKIP(cookie
);
2172 JS_ASSERT(cg
->staticLevel
>= level
);
2175 * A JSDefinition witnessed as a declaration by the parser cannot be an
2176 * upvar, unless it is the degenerate kind of upvar selected above (in the
2177 * code before the PND_GVAR test) for the special case of compile-and-go
2178 * code generated from eval called from a function, where the eval code
2179 * uses local vars defined in the function. We detect this upvar-for-eval
2180 * case by checking dn's op.
2182 if (PN_OP(dn
) == JSOP_GETUPVAR
) {
2183 JS_ASSERT(cg
->staticLevel
>= level
);
2184 if (op
!= JSOP_NAME
)
2188 JSStackFrame
*caller
= cg
->parser
->callerFrame
;
2191 JS_ASSERT(caller
->script
);
2193 JSTreeContext
*tc
= cg
;
2194 while (tc
->staticLevel
!= level
)
2196 JS_ASSERT(tc
->compiling());
2198 JSCodeGenerator
*evalcg
= (JSCodeGenerator
*) tc
;
2199 JS_ASSERT(evalcg
->compileAndGo());
2200 JS_ASSERT(caller
->fun
&& cg
->parser
->callerVarObj
== evalcg
->scopeChain
);
2203 * Don't generate upvars on the left side of a for loop. See
2204 * bug 470758 and bug 520513.
2206 if (evalcg
->flags
& TCF_IN_FOR_INIT
)
2209 if (cg
->staticLevel
== level
) {
2210 pn
->pn_op
= JSOP_GETUPVAR
;
2211 pn
->pn_cookie
= cookie
;
2212 pn
->pn_dflags
|= PND_BOUND
;
2216 return MakeUpvarForEval(pn
, cg
);
2219 uintN skip
= cg
->staticLevel
- level
;
2221 JS_ASSERT(cg
->inFunction());
2222 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie
) != CALLEE_UPVAR_SLOT
,
2223 cg
->lexdeps
.lookup(atom
));
2224 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
2225 JS_ASSERT(cg
->fun
->u
.i
.skipmin
<= skip
);
2228 * If op is a mutating opcode, this upvar's static level is too big to
2229 * index into the display, or the function is heavyweight, we fall back
2232 if (op
!= JSOP_NAME
)
2234 if (level
>= JS_DISPLAY_SIZE
)
2236 if (cg
->flags
& TCF_FUN_HEAVYWEIGHT
)
2239 if (FUN_FLAT_CLOSURE(cg
->fun
)) {
2243 * The function we're compiling may not be heavyweight, but if it
2244 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2245 * Parser::analyzeFunctions has arranged for this function's
2246 * enclosing functions to be heavyweight, so we can safely stick
2247 * with JSOP_NAME/JSOP_CALLNAME.
2249 if (cg
->funbox
->node
->pn_dflags
& PND_FUNARG
)
2253 * Generator functions may be resumed from any call stack, which
2254 * defeats the display optimization to static link searching used
2255 * by JSOP_{GET,CALL}UPVAR.
2257 if (cg
->skipSpansGenerator(skip
))
2263 ale
= cg
->upvarList
.lookup(atom
);
2265 index
= ALE_INDEX(ale
);
2267 if (!js_AddLocal(cx
, cg
->fun
, atom
, JSLOCAL_UPVAR
))
2270 ale
= cg
->upvarList
.add(cg
->parser
, atom
);
2273 index
= ALE_INDEX(ale
);
2274 JS_ASSERT(index
== cg
->upvarList
.count
- 1);
2276 uint32
*vector
= cg
->upvarMap
.vector
;
2278 uint32 length
= cg
->lexdeps
.count
;
2280 vector
= (uint32
*) js_calloc(length
* sizeof *vector
);
2282 JS_ReportOutOfMemory(cx
);
2285 cg
->upvarMap
.vector
= vector
;
2286 cg
->upvarMap
.length
= length
;
2289 uintN slot
= UPVAR_FRAME_SLOT(cookie
);
2290 if (slot
!= CALLEE_UPVAR_SLOT
&& dn_kind
!= JSDefinition::ARG
) {
2291 JSTreeContext
*tc
= cg
;
2294 } while (tc
->staticLevel
!= level
);
2295 if (tc
->inFunction())
2296 slot
+= tc
->fun
->nargs
;
2299 vector
[index
] = MAKE_UPVAR_COOKIE(skip
, slot
);
2303 pn
->pn_cookie
= index
;
2304 pn
->pn_dflags
|= PND_BOUND
;
2309 * We are compiling a function body and may be able to optimize name
2310 * to stack slot. Look for an argument or variable in the function and
2311 * rewrite pn_op and update pn accordingly.
2314 case JSDefinition::UNKNOWN
:
2317 case JSDefinition::LET
:
2319 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2320 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2321 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2322 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2323 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2324 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2325 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2326 default: JS_NOT_REACHED("let");
2330 case JSDefinition::ARG
:
2332 case JSOP_NAME
: op
= JSOP_GETARG
; break;
2333 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
2334 case JSOP_INCNAME
: op
= JSOP_INCARG
; break;
2335 case JSOP_NAMEINC
: op
= JSOP_ARGINC
; break;
2336 case JSOP_DECNAME
: op
= JSOP_DECARG
; break;
2337 case JSOP_NAMEDEC
: op
= JSOP_ARGDEC
; break;
2338 case JSOP_FORNAME
: op
= JSOP_FORARG
; break;
2339 default: JS_NOT_REACHED("arg");
2341 JS_ASSERT(!pn
->isConst());
2344 case JSDefinition::VAR
:
2345 if (PN_OP(dn
) == JSOP_CALLEE
) {
2346 JS_ASSERT(op
!= JSOP_CALLEE
);
2347 JS_ASSERT((cg
->fun
->flags
& JSFUN_LAMBDA
) && atom
== cg
->fun
->atom
);
2350 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2351 * cannot be sure cg->fun is not something of the form:
2353 * var ff = (function f(s) { eval(s); return f; });
2355 * where a caller invokes ff("var f = 42"). The result returned for
2356 * such an invocation must be 42, since the callee name is
2357 * lexically bound in an outer declarative environment from the
2358 * function's activation. See jsfun.cpp:call_resolve.
2360 JS_ASSERT(op
!= JSOP_DELNAME
);
2361 if (!(cg
->flags
& TCF_FUN_HEAVYWEIGHT
)) {
2363 pn
->pn_dflags
|= PND_CONST
;
2367 pn
->pn_dflags
|= PND_BOUND
;
2373 JS_ASSERT_IF(dn_kind
!= JSDefinition::FUNCTION
,
2374 dn_kind
== JSDefinition::VAR
||
2375 dn_kind
== JSDefinition::CONST
);
2377 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2378 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2379 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
2380 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2381 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2382 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2383 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2384 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2385 default: JS_NOT_REACHED("local");
2387 JS_ASSERT_IF(dn_kind
== JSDefinition::CONST
, pn
->pn_dflags
& PND_CONST
);
2391 JS_ASSERT(op
!= PN_OP(pn
));
2393 pn
->pn_cookie
= UPVAR_FRAME_SLOT(cookie
);
2394 pn
->pn_dflags
|= PND_BOUND
;
2399 * If pn contains a useful expression, return true with *answer set to true.
2400 * If pn contains a useless expression, return true with *answer set to false.
2401 * Return false on error.
2403 * The caller should initialize *answer to false and invoke this function on
2404 * an expression statement or similar subtree to decide whether the tree could
2405 * produce code that has any side effects. For an expression statement, we
2406 * define useless code as code with no side effects, because the main effect,
2407 * the value left on the stack after the code executes, will be discarded by a
2411 CheckSideEffects(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2421 switch (pn
->pn_arity
) {
2424 * A named function, contrary to ES3, is no longer useful, because we
2425 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2426 * Object instance and binding a readonly, permanent property in it
2427 * (the object and binding can be detected and hijacked or captured).
2428 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2434 if (pn
->pn_op
== JSOP_NOP
||
2435 pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2436 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2438 * Non-operators along with ||, &&, ===, and !== never invoke
2439 * toString or valueOf.
2441 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
2442 ok
&= CheckSideEffects(cx
, cg
, pn2
, answer
);
2445 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2446 * are presumed to be useful, because they may have side effects
2447 * even if their main effect (their return value) is discarded.
2449 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2450 * to avoid too much recursion. All such lists must be presumed
2451 * to be useful because each index operation could invoke a getter
2452 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2453 * does not apply here: arguments[i][j] might invoke a getter).
2455 * Likewise, array and object initialisers may call prototype
2456 * setters (the __defineSetter__ built-in, and writable __proto__
2457 * on Array.prototype create this hazard). Initialiser list nodes
2458 * have JSOP_NEWINIT in their pn_op.
2465 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid1
, answer
) &&
2466 CheckSideEffects(cx
, cg
, pn
->pn_kid2
, answer
) &&
2467 CheckSideEffects(cx
, cg
, pn
->pn_kid3
, answer
);
2471 if (pn
->pn_type
== TOK_ASSIGN
) {
2473 * Assignment is presumed to be useful, even if the next operation
2474 * is another assignment overwriting this one's ostensible effect,
2475 * because the left operand may be a property with a setter that
2478 * The only exception is assignment of a useless value to a const
2479 * declared in the function currently being compiled.
2482 if (pn2
->pn_type
!= TOK_NAME
) {
2485 if (!BindNameToSlot(cx
, cg
, pn2
))
2487 if (!CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
))
2489 if (!*answer
&& (pn
->pn_op
!= JSOP_NOP
|| !pn2
->isConst()))
2493 if (pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2494 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2496 * ||, &&, ===, and !== do not convert their operands via
2497 * toString or valueOf method calls.
2499 ok
= CheckSideEffects(cx
, cg
, pn
->pn_left
, answer
) &&
2500 CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
);
2503 * We can't easily prove that neither operand ever denotes an
2504 * object with a toString or valueOf method.
2512 switch (pn
->pn_type
) {
2515 switch (pn2
->pn_type
) {
2517 if (!BindNameToSlot(cx
, cg
, pn2
))
2519 if (pn2
->isConst()) {
2525 #if JS_HAS_XML_SUPPORT
2530 /* All these delete addressing modes have effects too. */
2534 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2540 if (pn
->pn_op
== JSOP_NOT
) {
2541 /* ! does not convert its operand via toString or valueOf. */
2542 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2549 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2550 * have direct effects. Of the remaining unary-arity node types,
2551 * we can't easily prove that the operand never denotes an object
2552 * with a toString or valueOf method.
2561 * Take care to avoid trying to bind a label name (labels, both for
2562 * statements and property values in object initialisers, have pn_op
2563 * defaulted to JSOP_NOP).
2565 if (pn
->pn_type
== TOK_NAME
&& pn
->pn_op
!= JSOP_NOP
) {
2566 if (!BindNameToSlot(cx
, cg
, pn
))
2568 if (pn
->pn_op
!= JSOP_ARGUMENTS
&& pn
->pn_op
!= JSOP_CALLEE
&&
2569 pn
->pn_cookie
== FREE_UPVAR_COOKIE
) {
2571 * Not an argument or local variable use, and not a use of a
2572 * unshadowed named function expression's given name, so this
2573 * expression could invoke a getter that has side effects.
2578 pn2
= pn
->maybeExpr();
2579 if (pn
->pn_type
== TOK_DOT
) {
2580 if (pn2
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn2
))
2582 if (!(pn2
->pn_op
== JSOP_ARGUMENTS
&&
2583 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
)) {
2585 * Any dotted property reference could call a getter, except
2586 * for arguments.length where arguments is unambiguous.
2591 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2595 ok
= CheckSideEffects(cx
, cg
, pn
->pn_tree
, answer
);
2599 if (pn
->pn_type
== TOK_DEBUGGER
)
2607 EmitNameOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2612 if (!BindNameToSlot(cx
, cg
, pn
))
2622 JS_ASSERT(!cg
->funbox
);
2629 op
= JSOP_CALLLOCAL
;
2632 op
= JSOP_CALLUPVAR
;
2635 op
= JSOP_CALLDSLOT
;
2638 JS_ASSERT(op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
);
2643 if (op
== JSOP_ARGUMENTS
|| op
== JSOP_CALLEE
) {
2644 if (js_Emit1(cx
, cg
, op
) < 0)
2646 if (callContext
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
2649 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
2650 EMIT_UINT16_IMM_OP(op
, pn
->pn_cookie
);
2652 if (!EmitAtomOp(cx
, pn
, op
, cg
))
2660 #if JS_HAS_XML_SUPPORT
2662 EmitXMLName(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2667 JS_ASSERT(pn
->pn_type
== TOK_UNARYOP
);
2668 JS_ASSERT(pn
->pn_op
== JSOP_XMLNAME
);
2669 JS_ASSERT(op
== JSOP_XMLNAME
|| op
== JSOP_CALLXMLNAME
);
2672 oldflags
= cg
->flags
;
2673 cg
->flags
&= ~TCF_IN_FOR_INIT
;
2674 if (!js_EmitTree(cx
, cg
, pn2
))
2676 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
2677 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2678 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2682 return js_Emit1(cx
, cg
, op
) >= 0;
2687 EmitSpecialPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2690 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2691 * interpreter and trace recorder, which skip dense array instances by
2692 * going up to Array.prototype before looking up the property name.
2694 JSAtomListElement
*ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
2697 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
2699 if (js_Emit1(cx
, cg
, op
) < 0)
2705 EmitPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
,
2708 JSParseNode
*pn2
, *pndot
, *pnup
, *pndown
;
2711 JS_ASSERT(pn
->pn_arity
== PN_NAME
);
2712 pn2
= pn
->maybeExpr();
2714 /* Special case deoptimization for __proto__. */
2715 if ((op
== JSOP_GETPROP
|| op
== JSOP_CALLPROP
) &&
2716 pn
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
2717 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
2719 return EmitSpecialPropOp(cx
, pn
, callContext
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
);
2723 JS_ASSERT(pn
->pn_type
== TOK_DOT
);
2724 JS_ASSERT(op
== JSOP_GETPROP
);
2726 } else if (op
== JSOP_GETPROP
&& pn
->pn_type
== TOK_DOT
) {
2727 if (pn2
->pn_op
== JSOP_THIS
) {
2728 if (pn
->pn_atom
!= cx
->runtime
->atomState
.lengthAtom
) {
2729 /* Fast path for gets of |this.foo|. */
2730 return EmitAtomOp(cx
, pn
, JSOP_GETTHISPROP
, cg
);
2732 } else if (pn2
->pn_type
== TOK_NAME
) {
2735 * - arguments.length into JSOP_ARGCNT
2736 * - argname.prop into JSOP_GETARGPROP
2737 * - localname.prop into JSOP_GETLOCALPROP
2738 * but don't do this if the property is 'length' -- prefer to emit
2739 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2741 if (!BindNameToSlot(cx
, cg
, pn2
))
2743 if (pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
2744 if (pn2
->pn_op
== JSOP_ARGUMENTS
)
2745 return js_Emit1(cx
, cg
, JSOP_ARGCNT
) >= 0;
2747 switch (pn2
->pn_op
) {
2749 op
= JSOP_GETARGPROP
;
2752 op
= JSOP_GETLOCALPROP
;
2754 JSAtomListElement
*ale
;
2757 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
2760 atomIndex
= ALE_INDEX(ale
);
2761 return EmitSlotIndexOp(cx
, op
, pn2
->pn_cookie
, atomIndex
, cg
);
2771 * If the object operand is also a dotted property reference, reverse the
2772 * list linked via pn_expr temporarily so we can iterate over it from the
2773 * bottom up (reversing again as we go), to avoid excessive recursion.
2775 if (pn2
->pn_type
== TOK_DOT
) {
2778 top
= CG_OFFSET(cg
);
2780 /* Reverse pndot->pn_expr to point up, not down. */
2781 pndot
->pn_offset
= top
;
2782 JS_ASSERT(!pndot
->pn_used
);
2783 pndown
= pndot
->pn_expr
;
2784 pndot
->pn_expr
= pnup
;
2785 if (pndown
->pn_type
!= TOK_DOT
)
2791 /* pndown is a primary expression, not a dotted property reference. */
2792 if (!js_EmitTree(cx
, cg
, pndown
))
2796 /* Walk back up the list, emitting annotated name ops. */
2797 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2798 CG_OFFSET(cg
) - pndown
->pn_offset
) < 0) {
2802 /* Special case deoptimization on __proto__, as above. */
2803 if (pndot
->pn_arity
== PN_NAME
&& pndot
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
2804 if (!EmitSpecialPropOp(cx
, pndot
, JSOP_GETELEM
, cg
))
2806 } else if (!EmitAtomOp(cx
, pndot
, PN_OP(pndot
), cg
)) {
2810 /* Reverse the pn_expr link again. */
2811 pnup
= pndot
->pn_expr
;
2812 pndot
->pn_expr
= pndown
;
2814 } while ((pndot
= pnup
) != NULL
);
2816 if (!js_EmitTree(cx
, cg
, pn2
))
2820 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2821 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2825 return EmitAtomOp(cx
, pn
, op
, cg
);
2829 EmitElemOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2832 JSParseNode
*left
, *right
, *next
, ltmp
, rtmp
;
2835 top
= CG_OFFSET(cg
);
2836 if (pn
->pn_arity
== PN_LIST
) {
2837 /* Left-associative operator chain to avoid too much recursion. */
2838 JS_ASSERT(pn
->pn_op
== JSOP_GETELEM
);
2839 JS_ASSERT(pn
->pn_count
>= 3);
2842 next
= left
->pn_next
;
2843 JS_ASSERT(next
!= right
);
2846 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2847 * one or more index expression and JSOP_GETELEM op pairs.
2849 if (left
->pn_type
== TOK_NAME
&& next
->pn_type
== TOK_NUMBER
) {
2850 if (!BindNameToSlot(cx
, cg
, left
))
2852 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2853 JSDOUBLE_IS_INT(next
->pn_dval
, slot
) &&
2854 (jsuint
)slot
< JS_BIT(16)) {
2856 * arguments[i]() requires arguments object as "this".
2857 * Check that we never generates list for that usage.
2859 JS_ASSERT(op
!= JSOP_CALLELEM
|| next
->pn_next
);
2860 left
->pn_offset
= next
->pn_offset
= top
;
2861 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2863 next
= left
->pn_next
;
2868 * Check whether we generated JSOP_ARGSUB, just above, and have only
2869 * one more index expression to emit. Given arguments[0][j], we must
2870 * skip the while loop altogether, falling through to emit code for j
2871 * (in the subtree referenced by right), followed by the annotated op,
2872 * at the bottom of this function.
2874 JS_ASSERT(next
!= right
|| pn
->pn_count
== 3);
2875 if (left
== pn
->pn_head
) {
2876 if (!js_EmitTree(cx
, cg
, left
))
2879 while (next
!= right
) {
2880 if (!js_EmitTree(cx
, cg
, next
))
2882 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2884 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
2886 next
= next
->pn_next
;
2889 if (pn
->pn_arity
== PN_NAME
) {
2891 * Set left and right so pn appears to be a TOK_LB node, instead
2892 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2893 * EmitDestructuringOps nearer below. In the destructuring case,
2894 * the base expression (pn_expr) of the name may be null, which
2895 * means we have to emit a JSOP_BINDNAME.
2897 left
= pn
->maybeExpr();
2900 left
->pn_type
= TOK_STRING
;
2901 left
->pn_op
= JSOP_BINDNAME
;
2902 left
->pn_arity
= PN_NULLARY
;
2903 left
->pn_pos
= pn
->pn_pos
;
2904 left
->pn_atom
= pn
->pn_atom
;
2907 right
->pn_type
= TOK_STRING
;
2908 JS_ASSERT(ATOM_IS_STRING(pn
->pn_atom
));
2909 right
->pn_op
= js_IsIdentifier(ATOM_TO_STRING(pn
->pn_atom
))
2912 right
->pn_arity
= PN_NULLARY
;
2913 right
->pn_pos
= pn
->pn_pos
;
2914 right
->pn_atom
= pn
->pn_atom
;
2916 JS_ASSERT(pn
->pn_arity
== PN_BINARY
);
2918 right
= pn
->pn_right
;
2921 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2922 if (op
== JSOP_GETELEM
&&
2923 left
->pn_type
== TOK_NAME
&&
2924 right
->pn_type
== TOK_NUMBER
) {
2925 if (!BindNameToSlot(cx
, cg
, left
))
2927 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2928 JSDOUBLE_IS_INT(right
->pn_dval
, slot
) &&
2929 (jsuint
)slot
< JS_BIT(16)) {
2930 left
->pn_offset
= right
->pn_offset
= top
;
2931 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2936 if (!js_EmitTree(cx
, cg
, left
))
2940 /* The right side of the descendant operator is implicitly quoted. */
2941 JS_ASSERT(op
!= JSOP_DESCENDANTS
|| right
->pn_type
!= TOK_STRING
||
2942 right
->pn_op
== JSOP_QNAMEPART
);
2943 if (!js_EmitTree(cx
, cg
, right
))
2945 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2947 return js_Emit1(cx
, cg
, op
) >= 0;
2951 EmitNumberOp(JSContext
*cx
, jsdouble dval
, JSCodeGenerator
*cg
)
2958 JSAtomListElement
*ale
;
2960 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
2962 return js_Emit1(cx
, cg
, JSOP_ZERO
) >= 0;
2964 return js_Emit1(cx
, cg
, JSOP_ONE
) >= 0;
2965 if ((jsint
)(int8
)ival
== ival
)
2966 return js_Emit2(cx
, cg
, JSOP_INT8
, (jsbytecode
)(int8
)ival
) >= 0;
2969 if (u
< JS_BIT(16)) {
2970 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2971 } else if (u
< JS_BIT(24)) {
2972 off
= js_EmitN(cx
, cg
, JSOP_UINT24
, 3);
2975 pc
= CG_CODE(cg
, off
);
2978 off
= js_EmitN(cx
, cg
, JSOP_INT32
, 4);
2981 pc
= CG_CODE(cg
, off
);
2982 SET_INT32(pc
, ival
);
2987 atom
= js_AtomizeDouble(cx
, dval
);
2991 ale
= cg
->atomList
.add(cg
->parser
, atom
);
2994 return EmitIndexOp(cx
, JSOP_DOUBLE
, ALE_INDEX(ale
), cg
);
2998 EmitSwitch(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2999 JSStmtInfo
*stmtInfo
)
3002 JSBool ok
, hasDefault
, constPropagated
;
3003 ptrdiff_t top
, off
, defaultOffset
;
3004 JSParseNode
*pn2
, *pn3
, *pn4
;
3005 uint32 caseCount
, tableLength
;
3006 JSParseNode
**table
;
3011 JSAtomListElement
*ale
;
3013 size_t switchSize
, tableSize
;
3014 jsbytecode
*pc
, *savepc
;
3015 #if JS_HAS_BLOCK_SCOPE
3019 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3020 switchOp
= JSOP_TABLESWITCH
;
3022 hasDefault
= constPropagated
= JS_FALSE
;
3026 * If the switch contains let variables scoped by its body, model the
3027 * resulting block on the stack first, before emitting the discriminant's
3028 * bytecode (in case the discriminant contains a stack-model dependency
3029 * such as a let expression).
3032 #if JS_HAS_BLOCK_SCOPE
3033 if (pn2
->pn_type
== TOK_LEXICALSCOPE
) {
3035 * Push the body's block scope before discriminant code-gen for proper
3036 * static block scope linkage in case the discriminant contains a let
3037 * expression. The block's locals must lie under the discriminant on
3038 * the stack so that case-dispatch bytecodes can find the discriminant
3041 count
= OBJ_BLOCK_COUNT(cx
, pn2
->pn_objbox
->object
);
3042 js_PushBlockScope(cg
, stmtInfo
, pn2
->pn_objbox
->object
, -1);
3043 stmtInfo
->type
= STMT_SWITCH
;
3045 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3046 if (!EmitEnterBlock(cx
, pn2
, cg
))
3050 * Pop the switch's statement info around discriminant code-gen. Note
3051 * how this leaves cg->blockChain referencing the switch's
3052 * block scope object, which is necessary for correct block parenting
3053 * in the case where the discriminant contains a let expression.
3055 cg
->topStmt
= stmtInfo
->down
;
3056 cg
->topScopeStmt
= stmtInfo
->downScope
;
3066 * Emit code for the discriminant first (or nearly first, in the case of a
3067 * switch whose body is a block scope).
3069 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
3072 /* Switch bytecodes run from here till end of final case. */
3073 top
= CG_OFFSET(cg
);
3074 #if !JS_HAS_BLOCK_SCOPE
3075 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3077 if (pn2
->pn_type
== TOK_LC
) {
3078 js_PushStatement(cg
, stmtInfo
, STMT_SWITCH
, top
);
3080 /* Re-push the switch's statement info record. */
3081 cg
->topStmt
= cg
->topScopeStmt
= stmtInfo
;
3083 /* Set the statement info record's idea of top. */
3084 stmtInfo
->update
= top
;
3086 /* Advance pn2 to refer to the switch case list. */
3091 caseCount
= pn2
->pn_count
;
3095 if (caseCount
== 0 ||
3097 (hasDefault
= (pn2
->pn_head
->pn_type
== TOK_DEFAULT
)))) {
3102 #define INTMAP_LENGTH 256
3103 jsbitmap intmap_space
[INTMAP_LENGTH
];
3104 jsbitmap
*intmap
= NULL
;
3105 int32 intmap_bitlen
= 0;
3107 low
= JSVAL_INT_MAX
;
3108 high
= JSVAL_INT_MIN
;
3110 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3111 if (pn3
->pn_type
== TOK_DEFAULT
) {
3112 hasDefault
= JS_TRUE
;
3113 caseCount
--; /* one of the "cases" was the default */
3117 JS_ASSERT(pn3
->pn_type
== TOK_CASE
);
3118 if (switchOp
== JSOP_CONDSWITCH
)
3122 while (pn4
->pn_type
== TOK_RP
)
3124 switch (pn4
->pn_type
) {
3127 if (JSDOUBLE_IS_INT(d
, i
) && INT_FITS_IN_JSVAL(i
)) {
3128 pn3
->pn_val
= INT_TO_JSVAL(i
);
3130 atom
= js_AtomizeDouble(cx
, d
);
3135 pn3
->pn_val
= ATOM_KEY(atom
);
3139 pn3
->pn_val
= ATOM_KEY(pn4
->pn_atom
);
3142 if (!pn4
->maybeExpr()) {
3143 ok
= LookupCompileTimeConstant(cx
, cg
, pn4
->pn_atom
, &v
);
3146 if (v
!= JSVAL_HOLE
) {
3147 if (!JSVAL_IS_PRIMITIVE(v
)) {
3149 * XXX JSOP_LOOKUPSWITCH does not support const-
3150 * propagated object values, see bug 407186.
3152 switchOp
= JSOP_CONDSWITCH
;
3156 constPropagated
= JS_TRUE
;
3162 if (pn4
->pn_op
== JSOP_TRUE
) {
3163 pn3
->pn_val
= JSVAL_TRUE
;
3166 if (pn4
->pn_op
== JSOP_FALSE
) {
3167 pn3
->pn_val
= JSVAL_FALSE
;
3170 if (pn4
->pn_op
== JSOP_NULL
) {
3171 pn3
->pn_val
= JSVAL_NULL
;
3176 switchOp
= JSOP_CONDSWITCH
;
3180 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3
->pn_val
));
3182 if (switchOp
!= JSOP_TABLESWITCH
)
3184 if (!JSVAL_IS_INT(pn3
->pn_val
)) {
3185 switchOp
= JSOP_LOOKUPSWITCH
;
3188 i
= JSVAL_TO_INT(pn3
->pn_val
);
3189 if ((jsuint
)(i
+ (jsint
)JS_BIT(15)) >= (jsuint
)JS_BIT(16)) {
3190 switchOp
= JSOP_LOOKUPSWITCH
;
3199 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3200 * We bias i by 65536 if it's negative, and hope that's a rare
3201 * case (because it requires a malloc'd bitmap).
3205 if (i
>= intmap_bitlen
) {
3207 i
< (INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
)) {
3208 intmap
= intmap_space
;
3209 intmap_bitlen
= INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
;
3211 /* Just grab 8K for the worst-case bitmap. */
3212 intmap_bitlen
= JS_BIT(16);
3213 intmap
= (jsbitmap
*)
3214 cx
->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2
)
3215 * sizeof(jsbitmap
));
3217 JS_ReportOutOfMemory(cx
);
3221 memset(intmap
, 0, intmap_bitlen
>> JS_BITS_PER_BYTE_LOG2
);
3223 if (JS_TEST_BIT(intmap
, i
)) {
3224 switchOp
= JSOP_LOOKUPSWITCH
;
3227 JS_SET_BIT(intmap
, i
);
3231 if (intmap
&& intmap
!= intmap_space
)
3237 * Compute table length and select lookup instead if overlarge or
3238 * more than half-sparse.
3240 if (switchOp
== JSOP_TABLESWITCH
) {
3241 tableLength
= (uint32
)(high
- low
+ 1);
3242 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
3243 switchOp
= JSOP_LOOKUPSWITCH
;
3244 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3246 * Lookup switch supports only atom indexes below 64K limit.
3247 * Conservatively estimate the maximum possible index during
3248 * switch generation and use conditional switch if it exceeds
3251 if (caseCount
+ cg
->atomList
.count
> JS_BIT(16))
3252 switchOp
= JSOP_CONDSWITCH
;
3257 * Emit a note with two offsets: first tells total switch code length,
3258 * second tells offset to first JSOP_CASE if condswitch.
3260 noteIndex
= js_NewSrcNote3(cx
, cg
, SRC_SWITCH
, 0, 0);
3264 if (switchOp
== JSOP_CONDSWITCH
) {
3266 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3269 } else if (switchOp
== JSOP_TABLESWITCH
) {
3271 * 3 offsets (len, low, high) before the table, 1 per entry.
3273 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
3276 * JSOP_LOOKUPSWITCH:
3277 * 1 offset (len) and 1 atom index (npairs) before the table,
3278 * 1 atom index and 1 jump offset per entry.
3280 switchSize
= (size_t)(JUMP_OFFSET_LEN
+ INDEX_LEN
+
3281 (INDEX_LEN
+ JUMP_OFFSET_LEN
) * caseCount
);
3285 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3287 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3288 * to emit the immediate operand(s) by which bytecode readers such as
3289 * BuildSpanDepTable discover the length of the switch opcode *before*
3290 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3291 * also important to zero all unknown jump offset immediate operands,
3292 * so they can be converted to span dependencies with null targets to
3293 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3295 if (js_EmitN(cx
, cg
, switchOp
, switchSize
) < 0)
3299 if (switchOp
== JSOP_CONDSWITCH
) {
3300 intN caseNoteIndex
= -1;
3301 JSBool beforeCases
= JS_TRUE
;
3303 /* Emit code for evaluating cases and jumping to case statements. */
3304 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3306 if (pn4
&& !js_EmitTree(cx
, cg
, pn4
))
3308 if (caseNoteIndex
>= 0) {
3309 /* off is the previous JSOP_CASE's bytecode offset. */
3310 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3311 CG_OFFSET(cg
) - off
)) {
3316 JS_ASSERT(pn3
->pn_type
== TOK_DEFAULT
);
3319 caseNoteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
3320 if (caseNoteIndex
< 0)
3322 off
= EmitJump(cx
, cg
, JSOP_CASE
, 0);
3325 pn3
->pn_offset
= off
;
3327 uintN noteCount
, noteCountDelta
;
3329 /* Switch note's second offset is to first JSOP_CASE. */
3330 noteCount
= CG_NOTE_COUNT(cg
);
3331 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
3335 noteCountDelta
= CG_NOTE_COUNT(cg
) - noteCount
;
3336 if (noteCountDelta
!= 0)
3337 caseNoteIndex
+= noteCountDelta
;
3338 beforeCases
= JS_FALSE
;
3343 * If we didn't have an explicit default (which could fall in between
3344 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3345 * call in the loop above), link the last case to the implicit default
3346 * for the decompiler.
3349 caseNoteIndex
>= 0 &&
3350 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
3351 CG_OFFSET(cg
) - off
)) {
3355 /* Emit default even if no explicit default statement. */
3356 defaultOffset
= EmitJump(cx
, cg
, JSOP_DEFAULT
, 0);
3357 if (defaultOffset
< 0)
3360 pc
= CG_CODE(cg
, top
+ JUMP_OFFSET_LEN
);
3362 if (switchOp
== JSOP_TABLESWITCH
) {
3363 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3364 SET_JUMP_OFFSET(pc
, low
);
3365 pc
+= JUMP_OFFSET_LEN
;
3366 SET_JUMP_OFFSET(pc
, high
);
3367 pc
+= JUMP_OFFSET_LEN
;
3370 * Use malloc to avoid arena bloat for programs with many switches.
3371 * We free table if non-null at label out, so all control flow must
3372 * exit this function through goto out or goto bad.
3374 if (tableLength
!= 0) {
3375 tableSize
= (size_t)tableLength
* sizeof *table
;
3376 table
= (JSParseNode
**) cx
->malloc(tableSize
);
3379 memset(table
, 0, tableSize
);
3380 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3381 if (pn3
->pn_type
== TOK_DEFAULT
)
3383 i
= JSVAL_TO_INT(pn3
->pn_val
);
3385 JS_ASSERT((uint32
)i
< tableLength
);
3390 JS_ASSERT(switchOp
== JSOP_LOOKUPSWITCH
);
3392 /* Fill in the number of cases. */
3393 SET_INDEX(pc
, caseCount
);
3398 * After this point, all control flow involving JSOP_TABLESWITCH
3399 * must set ok and goto out to exit this function. To keep things
3400 * simple, all switchOp cases exit that way.
3402 MUST_FLOW_THROUGH("out");
3405 * We have already generated at least one big jump so we must
3406 * explicitly add span dependencies for the switch jumps. When
3407 * called below, js_SetJumpOffset can only do it when patching
3408 * the first big jump or when cg->spanDeps is null.
3410 if (!AddSwitchSpanDeps(cx
, cg
, CG_CODE(cg
, top
)))
3414 if (constPropagated
) {
3416 * Skip switchOp, as we are not setting jump offsets in the two
3417 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3418 * unless there was an error.
3420 savepc
= CG_NEXT(cg
);
3421 CG_NEXT(cg
) = pc
+ 1;
3422 if (switchOp
== JSOP_TABLESWITCH
) {
3423 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3426 (pn4
= pn3
->pn_left
) != NULL
&&
3427 pn4
->pn_type
== TOK_NAME
) {
3428 /* Note a propagated constant with the const's name. */
3429 JS_ASSERT(!pn4
->maybeExpr());
3430 ale
= cg
->atomList
.add(cg
->parser
, pn4
->pn_atom
);
3434 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3435 ALE_INDEX(ale
)) < 0) {
3439 pc
+= JUMP_OFFSET_LEN
;
3442 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3444 if (pn4
&& pn4
->pn_type
== TOK_NAME
) {
3445 /* Note a propagated constant with the const's name. */
3446 JS_ASSERT(!pn4
->maybeExpr());
3447 ale
= cg
->atomList
.add(cg
->parser
, pn4
->pn_atom
);
3451 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3452 ALE_INDEX(ale
)) < 0) {
3456 pc
+= INDEX_LEN
+ JUMP_OFFSET_LEN
;
3459 CG_NEXT(cg
) = savepc
;
3463 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3464 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3465 if (switchOp
== JSOP_CONDSWITCH
&& pn3
->pn_type
!= TOK_DEFAULT
)
3466 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx
, cg
, pn3
->pn_offset
, goto bad
);
3467 pn4
= pn3
->pn_right
;
3468 ok
= js_EmitTree(cx
, cg
, pn4
);
3471 pn3
->pn_offset
= pn4
->pn_offset
;
3472 if (pn3
->pn_type
== TOK_DEFAULT
)
3473 off
= pn3
->pn_offset
- top
;
3477 /* If no default case, offset for default is to end of switch. */
3478 off
= CG_OFFSET(cg
) - top
;
3481 /* We better have set "off" by now. */
3482 JS_ASSERT(off
!= -1);
3484 /* Set the default offset (to end of switch if no default). */
3485 if (switchOp
== JSOP_CONDSWITCH
) {
3487 JS_ASSERT(defaultOffset
!= -1);
3488 ok
= js_SetJumpOffset(cx
, cg
, CG_CODE(cg
, defaultOffset
),
3489 off
- (defaultOffset
- top
));
3493 pc
= CG_CODE(cg
, top
);
3494 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3497 pc
+= JUMP_OFFSET_LEN
;
3500 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3501 off
= CG_OFFSET(cg
) - top
;
3502 ok
= js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, off
);
3506 if (switchOp
== JSOP_TABLESWITCH
) {
3507 /* Skip over the already-initialized switch bounds. */
3508 pc
+= 2 * JUMP_OFFSET_LEN
;
3510 /* Fill in the jump table, if there is one. */
3511 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3513 off
= pn3
? pn3
->pn_offset
- top
: 0;
3514 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3517 pc
+= JUMP_OFFSET_LEN
;
3519 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3520 /* Skip over the already-initialized number of cases. */
3523 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3524 if (pn3
->pn_type
== TOK_DEFAULT
)
3526 if (!js_AtomizePrimitiveValue(cx
, pn3
->pn_val
, &atom
))
3528 ale
= cg
->atomList
.add(cg
->parser
, atom
);
3531 SET_INDEX(pc
, ALE_INDEX(ale
));
3534 off
= pn3
->pn_offset
- top
;
3535 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3538 pc
+= JUMP_OFFSET_LEN
;
3546 ok
= js_PopStatementCG(cx
, cg
);
3548 #if JS_HAS_BLOCK_SCOPE
3549 if (ok
&& pn
->pn_right
->pn_type
== TOK_LEXICALSCOPE
)
3550 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
3561 js_EmitFunctionScript(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*body
)
3563 if (cg
->flags
& TCF_FUN_IS_GENERATOR
) {
3564 /* JSOP_GENERATOR must be the first instruction. */
3565 CG_SWITCH_TO_PROLOG(cg
);
3566 JS_ASSERT(CG_NEXT(cg
) == CG_BASE(cg
));
3567 if (js_Emit1(cx
, cg
, JSOP_GENERATOR
) < 0)
3569 CG_SWITCH_TO_MAIN(cg
);
3572 * Emit a trace hint opcode only if not in a generator, since generators
3573 * are not yet traced and both want to be the first instruction.
3575 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
3579 if (cg
->flags
& TCF_FUN_UNBRAND_THIS
) {
3580 if (js_Emit1(cx
, cg
, JSOP_UNBRANDTHIS
) < 0)
3584 return js_EmitTree(cx
, cg
, body
) &&
3585 js_Emit1(cx
, cg
, JSOP_STOP
) >= 0 &&
3586 js_NewScriptFromCG(cx
, cg
);
3589 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3590 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3592 uintN line_ = (line); \
3593 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3594 if (delta_ != 0) { \
3596 * Encode any change in the current source line number by using \
3597 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3598 * whichever consumes less space. \
3600 * NB: We handle backward line number deltas (possible with for \
3601 * loops where the update part is emitted after the body, but its \
3602 * line number is <= any line number in the body) here by letting \
3603 * unsigned delta_ wrap to a very large number, which triggers a \
3606 CG_CURRENT_LINE(cg) = line_; \
3607 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3608 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3612 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3614 } while (--delta_ != 0); \
3619 /* A function, so that we avoid macro-bloating all the other callsites. */
3621 UpdateLineNumberNotes(JSContext
*cx
, JSCodeGenerator
*cg
, uintN line
)
3623 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, line
);
3628 MaybeEmitVarDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3629 JSParseNode
*pn
, jsatomid
*result
)
3632 JSAtomListElement
*ale
;
3634 if (pn
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
3635 atomIndex
= (jsatomid
) UPVAR_FRAME_SLOT(pn
->pn_cookie
);
3637 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom
);
3640 atomIndex
= ALE_INDEX(ale
);
3643 if (JOF_OPTYPE(pn
->pn_op
) == JOF_ATOM
&&
3644 (!cg
->inFunction() || (cg
->flags
& TCF_FUN_HEAVYWEIGHT
))) {
3645 CG_SWITCH_TO_PROLOG(cg
);
3646 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.begin
.lineno
))
3648 EMIT_INDEX_OP(prologOp
, atomIndex
);
3649 CG_SWITCH_TO_MAIN(cg
);
3653 *result
= atomIndex
;
3657 #if JS_HAS_DESTRUCTURING
3660 (*DestructuringDeclEmitter
)(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3664 EmitDestructuringDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3667 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
3668 if (!BindNameToSlot(cx
, cg
, pn
))
3671 JS_ASSERT(PN_OP(pn
) != JSOP_ARGUMENTS
&& PN_OP(pn
) != JSOP_CALLEE
);
3672 return MaybeEmitVarDecl(cx
, cg
, prologOp
, pn
, NULL
);
3676 EmitDestructuringDecls(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3679 JSParseNode
*pn2
, *pn3
;
3680 DestructuringDeclEmitter emitter
;
3682 if (pn
->pn_type
== TOK_RB
) {
3683 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3684 if (pn2
->pn_type
== TOK_COMMA
)
3686 emitter
= (pn2
->pn_type
== TOK_NAME
)
3687 ? EmitDestructuringDecl
3688 : EmitDestructuringDecls
;
3689 if (!emitter(cx
, cg
, prologOp
, pn2
))
3693 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3694 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3695 pn3
= pn2
->pn_right
;
3696 emitter
= (pn3
->pn_type
== TOK_NAME
)
3697 ? EmitDestructuringDecl
3698 : EmitDestructuringDecls
;
3699 if (!emitter(cx
, cg
, prologOp
, pn3
))
3707 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
);
3710 EmitDestructuringLHS(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3715 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3716 * destructuring initialiser-form, call ourselves to handle it, then
3717 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3718 * ending with a JSOP_ENUMELEM or equivalent op.
3720 if (pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
) {
3721 if (!EmitDestructuringOpsHelper(cx
, cg
, pn
))
3723 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3726 if (pn
->pn_type
== TOK_NAME
) {
3727 if (!BindNameToSlot(cx
, cg
, pn
))
3729 if (pn
->isConst() && !pn
->isInitialized())
3730 return js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3733 switch (pn
->pn_op
) {
3736 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3737 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3738 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3740 if (!EmitElemOp(cx
, pn
, JSOP_ENUMELEM
, cg
))
3745 if (!EmitElemOp(cx
, pn
, JSOP_ENUMCONSTELEM
, cg
))
3750 slot
= (jsuint
) pn
->pn_cookie
;
3751 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, slot
);
3756 slot
= (jsuint
) pn
->pn_cookie
;
3757 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
3758 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3766 top
= CG_OFFSET(cg
);
3767 if (!js_EmitTree(cx
, cg
, pn
))
3769 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
3771 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
3785 * Recursive helper for EmitDestructuringOps.
3787 * Given a value to destructure on the stack, walk over an object or array
3788 * initialiser at pn, emitting bytecodes to match property values and store
3789 * them in the lvalues identified by the matched property names.
3792 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3795 JSParseNode
*pn2
, *pn3
;
3799 intN stackDepth
= cg
->stackDepth
;
3800 JS_ASSERT(stackDepth
!= 0);
3801 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
3802 JS_ASSERT(pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
);
3805 if (pn
->pn_count
== 0) {
3806 /* Emit a DUP;POP sequence for the decompiler. */
3807 return js_Emit1(cx
, cg
, JSOP_DUP
) >= 0 &&
3808 js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3812 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3814 * Duplicate the value being destructured to use as a reference base.
3815 * If dup is not the first one, annotate it for the decompiler.
3817 if (pn2
!= pn
->pn_head
&& js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
3819 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
3823 * Now push the property name currently being matched, which is either
3824 * the array initialiser's current index, or the current property name
3825 * "label" on the left of a colon in the object initialiser. Set pn3
3826 * to the lvalue node, which is in the value-initializing position.
3829 if (pn
->pn_type
== TOK_RB
) {
3830 if (!EmitNumberOp(cx
, index
, cg
))
3834 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3835 JS_ASSERT(pn2
->pn_type
== TOK_COLON
);
3837 if (pn3
->pn_type
== TOK_NUMBER
) {
3839 * If we are emitting an object destructuring initialiser,
3840 * annotate the index op with SRC_INITPROP so we know we are
3841 * not decompiling an array initialiser.
3843 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
3845 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
3848 JS_ASSERT(pn3
->pn_type
== TOK_STRING
||
3849 pn3
->pn_type
== TOK_NAME
);
3850 if (!EmitAtomOp(cx
, pn3
, JSOP_GETPROP
, cg
))
3852 doElemOp
= JS_FALSE
;
3854 pn3
= pn2
->pn_right
;
3859 * Ok, get the value of the matching property name. This leaves
3860 * that value on top of the value being destructured, so the stack
3861 * is one deeper than when we started.
3863 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
3865 JS_ASSERT(cg
->stackDepth
== stackDepth
+ 1);
3868 /* Nullary comma node makes a hole in the array destructurer. */
3869 if (pn3
->pn_type
== TOK_COMMA
&& pn3
->pn_arity
== PN_NULLARY
) {
3870 JS_ASSERT(pn
->pn_type
== TOK_RB
);
3871 JS_ASSERT(pn2
== pn3
);
3872 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3875 if (!EmitDestructuringLHS(cx
, cg
, pn3
))
3879 JS_ASSERT(cg
->stackDepth
== stackDepth
);
3887 OpToDeclType(JSOp op
)
3891 return SRC_DECL_LET
;
3893 return SRC_DECL_CONST
;
3895 return SRC_DECL_VAR
;
3897 return SRC_DECL_NONE
;
3902 EmitDestructuringOps(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3906 * If we're called from a variable declaration, help the decompiler by
3907 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3908 * If the destructuring initialiser is empty, our helper will emit a
3909 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3911 if (js_NewSrcNote2(cx
, cg
, SRC_DESTRUCT
, OpToDeclType(prologOp
)) < 0)
3915 * Call our recursive helper to emit the destructuring assignments and
3916 * related stack manipulations.
3918 return EmitDestructuringOpsHelper(cx
, cg
, pn
);
3922 EmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3923 JSParseNode
*lhs
, JSParseNode
*rhs
)
3925 jsuint depth
, limit
, i
, nslots
;
3928 depth
= limit
= (uintN
) cg
->stackDepth
;
3929 for (pn
= rhs
->pn_head
; pn
; pn
= pn
->pn_next
) {
3930 if (limit
== JS_BIT(16)) {
3931 ReportCompileErrorNumber(cx
, CG_TS(cg
), rhs
, JSREPORT_ERROR
, JSMSG_ARRAY_INIT_TOO_BIG
);
3935 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3936 JS_ASSERT(!(pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
));
3937 if (!js_EmitTree(cx
, cg
, pn
))
3942 if (js_NewSrcNote2(cx
, cg
, SRC_GROUPASSIGN
, OpToDeclType(prologOp
)) < 0)
3946 for (pn
= lhs
->pn_head
; pn
; pn
= pn
->pn_next
, ++i
) {
3947 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3948 JS_ASSERT(i
< limit
);
3949 jsint slot
= AdjustBlockSlot(cx
, cg
, i
);
3952 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL
, slot
);
3954 if (pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
) {
3955 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3958 if (!EmitDestructuringLHS(cx
, cg
, pn
))
3963 nslots
= limit
- depth
;
3964 EMIT_UINT16_IMM_OP(JSOP_POPN
, nslots
);
3965 cg
->stackDepth
= (uintN
) depth
;
3970 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3971 * can emit a group assignment sequence, which results in 0 stack depth delta,
3972 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3975 MaybeEmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3976 JSParseNode
*pn
, JSOp
*pop
)
3978 JSParseNode
*lhs
, *rhs
;
3980 JS_ASSERT(pn
->pn_type
== TOK_ASSIGN
);
3981 JS_ASSERT(*pop
== JSOP_POP
|| *pop
== JSOP_POPV
);
3984 if (lhs
->pn_type
== TOK_RB
&& rhs
->pn_type
== TOK_RB
&&
3985 !(rhs
->pn_xflags
& PNX_HOLEY
) &&
3986 lhs
->pn_count
<= rhs
->pn_count
) {
3987 if (!EmitGroupAssignment(cx
, cg
, prologOp
, lhs
, rhs
))
3994 #endif /* JS_HAS_DESTRUCTURING */
3997 EmitVariables(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
3998 JSBool inLetHead
, ptrdiff_t *headNoteIndex
)
4000 bool let
, forInVar
, first
;
4001 #if JS_HAS_BLOCK_SCOPE
4002 bool forInLet
, popScope
;
4003 JSStmtInfo
*stmt
, *scopeStmt
;
4005 ptrdiff_t off
, noteIndex
, tmp
;
4006 JSParseNode
*pn2
, *pn3
, *next
;
4011 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
4012 *headNoteIndex
= -1;
4015 * Let blocks and expressions have a parenthesized head in which the new
4016 * scope is not yet open. Initializer evaluation uses the parent node's
4017 * lexical scope. If popScope is true below, then we hide the top lexical
4018 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
4019 * it won't find any names in the new let block.
4021 * The same goes for let declarations in the head of any kind of for loop.
4022 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
4023 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
4024 * in the containing scope, and puts x in the loop body's scope.
4026 let
= (pn
->pn_op
== JSOP_NOP
);
4027 forInVar
= (pn
->pn_xflags
& PNX_FORINVAR
) != 0;
4028 #if JS_HAS_BLOCK_SCOPE
4029 forInLet
= let
&& forInVar
;
4030 popScope
= (inLetHead
|| (let
&& (cg
->flags
& TCF_IN_FOR_INIT
)));
4033 scopeStmt
= cg
->topScopeStmt
;
4036 else stmt
= scopeStmt
= NULL
; /* quell GCC overwarning */
4038 JS_ASSERT(!popScope
|| let
);
4041 off
= noteIndex
= -1;
4042 for (pn2
= pn
->pn_head
; ; pn2
= next
) {
4043 first
= pn2
== pn
->pn_head
;
4044 next
= pn2
->pn_next
;
4046 if (pn2
->pn_type
!= TOK_NAME
) {
4047 #if JS_HAS_DESTRUCTURING
4048 if (pn2
->pn_type
== TOK_RB
|| pn2
->pn_type
== TOK_RC
) {
4050 * Emit variable binding ops, but not destructuring ops.
4051 * The parser (see Variables, jsparse.c) has ensured that
4052 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4053 * and that case will emit the destructuring code only after
4054 * emitting an enumerating opcode and a branch that tests
4055 * whether the enumeration ended.
4057 JS_ASSERT(forInVar
);
4058 JS_ASSERT(pn
->pn_count
== 1);
4059 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn2
))
4066 * A destructuring initialiser assignment preceded by var will
4067 * never occur to the left of 'in' in a for-in loop. As with 'for
4068 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4069 * i' to be hoisted out of the loop.
4071 JS_ASSERT(pn2
->pn_type
== TOK_ASSIGN
);
4072 JS_ASSERT(!forInVar
);
4075 * To allow the front end to rewrite var f = x; as f = x; when a
4076 * function f(){} precedes the var, detect simple name assignment
4077 * here and initialize the name.
4079 #if !JS_HAS_DESTRUCTURING
4080 JS_ASSERT(pn2
->pn_left
->pn_type
== TOK_NAME
);
4082 if (pn2
->pn_left
->pn_type
== TOK_NAME
)
4085 pn3
= pn2
->pn_right
;
4090 #if JS_HAS_DESTRUCTURING
4091 if (pn
->pn_count
== 1) {
4093 * If this is the only destructuring assignment in the list,
4094 * try to optimize to a group assignment. If we're in a let
4095 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4096 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4098 JS_ASSERT(noteIndex
< 0 && !pn2
->pn_next
);
4100 if (!MaybeEmitGroupAssignment(cx
, cg
,
4101 inLetHead
? JSOP_POP
: PN_OP(pn
),
4105 if (op
== JSOP_NOP
) {
4106 pn
->pn_xflags
= (pn
->pn_xflags
& ~PNX_POPVAR
) | PNX_GROUPINIT
;
4112 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn3
))
4115 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4119 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4120 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4121 * we will emit at the bottom of this function.
4123 if (!EmitDestructuringOps(cx
, cg
,
4124 inLetHead
? JSOP_POP
: PN_OP(pn
),
4133 * Load initializer early to share code above that jumps to do_name.
4134 * NB: if this var redeclares an existing binding, then pn2 is linked
4135 * on its definition's use-chain and pn_expr has been overlayed with
4138 pn3
= pn2
->maybeExpr();
4141 if (!BindNameToSlot(cx
, cg
, pn2
))
4145 if (op
== JSOP_ARGUMENTS
) {
4146 /* JSOP_ARGUMENTS => no initializer */
4147 JS_ASSERT(!pn3
&& !let
);
4150 atomIndex
= 0; /* quell GCC overwarning */
4153 JS_ASSERT(op
!= JSOP_CALLEE
);
4154 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
|| !let
);
4155 if (!MaybeEmitVarDecl(cx
, cg
, PN_OP(pn
), pn2
, &atomIndex
))
4159 JS_ASSERT(!forInVar
);
4160 if (op
== JSOP_SETNAME
) {
4162 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
4164 if (pn
->pn_op
== JSOP_DEFCONST
&&
4165 !js_DefineCompileTimeConstant(cx
, cg
, pn2
->pn_atom
, pn3
)) {
4169 #if JS_HAS_BLOCK_SCOPE
4170 /* Evaluate expr in the outer lexical scope if requested. */
4172 cg
->topStmt
= stmt
->down
;
4173 cg
->topScopeStmt
= scopeStmt
->downScope
;
4177 oldflags
= cg
->flags
;
4178 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4179 if (!js_EmitTree(cx
, cg
, pn3
))
4181 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
4183 #if JS_HAS_BLOCK_SCOPE
4186 cg
->topScopeStmt
= scopeStmt
;
4193 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4194 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4195 * a TOK_SEQ node to make the two statements appear as one. Therefore
4196 * if this declaration is part of a for-in loop head, we do not need to
4197 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4198 * js_EmitTree, will annotate appropriately.
4200 JS_ASSERT_IF(pn2
->pn_defn
, pn3
== pn2
->pn_expr
);
4202 JS_ASSERT(pn
->pn_count
== 1);
4209 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4210 (pn
->pn_op
== JSOP_DEFCONST
)
4212 : (pn
->pn_op
== JSOP_DEFVAR
)
4214 : SRC_DECL_LET
) < 0) {
4217 if (op
== JSOP_ARGUMENTS
) {
4218 if (js_Emit1(cx
, cg
, op
) < 0)
4220 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4221 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4223 EMIT_INDEX_OP(op
, atomIndex
);
4226 #if JS_HAS_DESTRUCTURING
4229 tmp
= CG_OFFSET(cg
);
4230 if (noteIndex
>= 0) {
4231 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
4237 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
4238 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4242 /* If this is a let head, emit and return a srcnote on the pop. */
4244 *headNoteIndex
= js_NewSrcNote(cx
, cg
, SRC_DECL
);
4245 if (*headNoteIndex
< 0)
4247 if (!(pn
->pn_xflags
& PNX_POPVAR
))
4248 return js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4251 return !(pn
->pn_xflags
& PNX_POPVAR
) || js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
4254 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4256 GettableNoteForNextOp(JSCodeGenerator
*cg
)
4258 ptrdiff_t offset
, target
;
4259 jssrcnote
*sn
, *end
;
4262 target
= CG_OFFSET(cg
);
4263 for (sn
= CG_NOTES(cg
), end
= sn
+ CG_NOTE_COUNT(cg
); sn
< end
;
4265 if (offset
== target
&& SN_IS_GETTABLE(sn
))
4267 offset
+= SN_DELTA(sn
);
4273 /* Top-level named functions need a nop for decompilation. */
4275 EmitFunctionDefNop(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
4277 return js_NewSrcNote2(cx
, cg
, SRC_FUNCDEF
, (ptrdiff_t)index
) >= 0 &&
4278 js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
4282 EmitNewInit(JSContext
*cx
, JSCodeGenerator
*cg
, JSProtoKey key
, JSParseNode
*pn
, int sharpnum
)
4284 if (js_Emit2(cx
, cg
, JSOP_NEWINIT
, (jsbytecode
) key
) < 0)
4286 #if JS_HAS_SHARP_VARS
4287 if (cg
->hasSharps()) {
4288 if (pn
->pn_count
!= 0)
4289 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4291 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, sharpnum
);
4293 JS_ASSERT(sharpnum
< 0);
4300 EmitEndInit(JSContext
*cx
, JSCodeGenerator
*cg
, uint32 count
)
4302 #if JS_HAS_SHARP_VARS
4303 /* Emit an op for sharp array cleanup and decompilation. */
4304 if (cg
->hasSharps() && count
!= 0)
4305 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT
, cg
->sharpSlotBase
);
4307 return js_Emit1(cx
, cg
, JSOP_ENDINIT
) >= 0;
4310 /* See the SRC_FOR source note offsetBias comments later in this file. */
4311 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
4312 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4315 js_EmitTree(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
4317 JSBool ok
, useful
, wantval
;
4318 JSStmtInfo
*stmt
, stmtInfo
;
4319 ptrdiff_t top
, off
, tmp
, beq
, jmp
;
4320 JSParseNode
*pn2
, *pn3
;
4322 JSAtomListElement
*ale
;
4325 ptrdiff_t noteIndex
;
4326 JSSrcNoteType noteType
;
4331 #if JS_HAS_SHARP_VARS
4335 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
4339 pn
->pn_offset
= top
= CG_OFFSET(cg
);
4341 /* Emit notes to tell the current bytecode's source line number. */
4342 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
->pn_pos
.begin
.lineno
);
4344 switch (pn
->pn_type
) {
4350 #if JS_HAS_XML_SUPPORT
4351 if (pn
->pn_arity
== PN_NULLARY
) {
4352 if (js_Emit1(cx
, cg
, JSOP_GETFUNNS
) < 0)
4358 fun
= (JSFunction
*) pn
->pn_funbox
->object
;
4359 JS_ASSERT(FUN_INTERPRETED(fun
));
4360 if (fun
->u
.i
.script
) {
4362 * This second pass is needed to emit JSOP_NOP with a source note
4363 * for the already-emitted function definition prolog opcode. See
4364 * comments in the TOK_LC case.
4366 JS_ASSERT(pn
->pn_op
== JSOP_NOP
);
4367 JS_ASSERT(cg
->inFunction());
4368 if (!EmitFunctionDefNop(cx
, cg
, pn
->pn_index
))
4373 JS_ASSERT_IF(cx
->options
& JSOPTION_ANONFUNFIX
,
4375 (!pn
->pn_used
&& !pn
->isTopLevel()) ||
4376 (fun
->flags
& JSFUN_LAMBDA
));
4378 JS_ASSERT_IF(pn
->pn_funbox
->tcflags
& TCF_FUN_HEAVYWEIGHT
,
4379 FUN_KIND(fun
) == JSFUN_INTERPRETED
);
4381 /* Generate code for the function's body. */
4382 void *cg2mark
= cg
->codePool
->getMark();
4383 JSCodeGenerator
*cg2space
;
4384 cg
->codePool
->allocateType
<JSCodeGenerator
>(cg2space
);
4386 js_ReportOutOfScriptQuota(cx
);
4389 JSCodeGenerator
*cg2
=
4390 new (cg2space
) JSCodeGenerator(cg
->parser
,
4391 cg
->codePool
, cg
->notePool
,
4392 pn
->pn_pos
.begin
.lineno
);
4397 cg2
->flags
= pn
->pn_funbox
->tcflags
| TCF_IN_FUNCTION
;
4398 #if JS_HAS_SHARP_VARS
4399 if (cg2
->flags
& TCF_HAS_SHARPS
) {
4400 cg2
->sharpSlotBase
= fun
->sharpSlotBase(cx
);
4401 if (cg2
->sharpSlotBase
< 0)
4406 cg2
->funbox
= pn
->pn_funbox
;
4410 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4411 * bits and to reserve the all-ones value, thereby reserving the magic
4412 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4414 JS_ASSERT(cg
->staticLevel
< JS_BITMASK(16) - 1);
4415 cg2
->staticLevel
= cg
->staticLevel
+ 1;
4417 /* We measured the max scope depth when we parsed the function. */
4418 JS_SCOPE_DEPTH_METERING(cg2
->maxScopeDepth
= uint16(-1));
4419 if (!js_EmitFunctionScript(cx
, cg2
, pn
->pn_body
))
4422 cg2
->~JSCodeGenerator();
4423 cg
->codePool
->release(cg2mark
);
4428 /* Make the function object a literal in the outer script's pool. */
4429 index
= cg
->objectList
.index(pn
->pn_funbox
);
4431 /* Emit a bytecode pointing to the closure object in its immediate. */
4433 if (op
!= JSOP_NOP
) {
4434 if ((pn
->pn_funbox
->tcflags
& TCF_GENEXP_LAMBDA
) &&
4435 js_NewSrcNote(cx
, cg
, SRC_GENEXP
) < 0) {
4438 EMIT_INDEX_OP(op
, index
);
4443 * For a script we emit the code as we parse. Thus the bytecode for
4444 * top-level functions should go in the prolog to predefine their
4445 * names in the variable object before the already-generated main code
4446 * is executed. This extra work for top-level scripts is not necessary
4447 * when we emit the code for a function. It is fully parsed prior to
4448 * invocation of the emitter and calls to js_EmitTree for function
4449 * definitions can be scheduled before generating the rest of code.
4451 if (!cg
->inFunction()) {
4452 JS_ASSERT(!cg
->topStmt
);
4453 CG_SWITCH_TO_PROLOG(cg
);
4454 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFFUN_FC
: JSOP_DEFFUN
;
4455 EMIT_INDEX_OP(op
, index
);
4456 CG_SWITCH_TO_MAIN(cg
);
4458 /* Emit NOP for the decompiler. */
4459 if (!EmitFunctionDefNop(cx
, cg
, index
))
4463 JSLocalKind localKind
=
4465 js_LookupLocal(cx
, cg
->fun
, fun
->atom
, &slot
);
4466 JS_ASSERT(localKind
== JSLOCAL_VAR
|| localKind
== JSLOCAL_CONST
);
4467 JS_ASSERT(index
< JS_BIT(20));
4468 pn
->pn_index
= index
;
4469 op
= FUN_FLAT_CLOSURE(fun
) ? JSOP_DEFLOCALFUN_FC
: JSOP_DEFLOCALFUN
;
4470 if (!EmitSlotIndexOp(cx
, op
, slot
, index
, cg
))
4477 ok
= js_EmitTree(cx
, cg
, pn
->last());
4481 JS_ASSERT(cg
->lexdeps
.count
== 0);
4482 JS_ASSERT(pn
->pn_names
.count
!= 0);
4483 cg
->lexdeps
= pn
->pn_names
;
4484 ok
= js_EmitTree(cx
, cg
, pn
->pn_tree
);
4488 /* Initialize so we can detect else-if chains and avoid recursion. */
4489 stmtInfo
.type
= STMT_IF
;
4494 /* Emit code for the condition before pushing stmtInfo. */
4495 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4497 top
= CG_OFFSET(cg
);
4498 if (stmtInfo
.type
== STMT_IF
) {
4499 js_PushStatement(cg
, &stmtInfo
, STMT_IF
, top
);
4502 * We came here from the goto further below that detects else-if
4503 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4504 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4505 * to help the decompiler. Actually, we need two offsets, one for
4506 * decompiling any else clause and the second for decompiling an
4507 * else-if chain without bracing, overindenting, or incorrectly
4508 * scoping let declarations.
4510 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4511 stmtInfo
.type
= STMT_IF
;
4512 stmtInfo
.update
= top
;
4513 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4515 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 1, top
- beq
))
4519 /* Emit an annotated branch-if-false around the then part. */
4521 noteIndex
= js_NewSrcNote(cx
, cg
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4524 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4528 /* Emit code for the then and optional else parts. */
4529 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4532 /* Modify stmtInfo so we know we're in the else part. */
4533 stmtInfo
.type
= STMT_ELSE
;
4536 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4537 * around the else part. The js_PopStatementCG call at the bottom
4538 * of this switch case will fix up the backpatch chain linked from
4541 jmp
= EmitGoto(cx
, cg
, &stmtInfo
, &stmtInfo
.breaks
, NULL
, SRC_NULL
);
4545 /* Ensure the branch-if-false comes here, then emit the else. */
4546 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4547 if (pn3
->pn_type
== TOK_IF
) {
4552 if (!js_EmitTree(cx
, cg
, pn3
))
4556 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4557 * the decompiler's benefit. We can't just "back up" from the pc
4558 * of the else clause, because we don't know whether an extended
4559 * jump was required to leap from the end of the then clause over
4562 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4565 /* No else part, fixup the branch-if-false to come here. */
4566 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4568 ok
= js_PopStatementCG(cx
, cg
);
4572 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4573 ok
= EmitSwitch(cx
, cg
, pn
, &stmtInfo
);
4578 * Minimize bytecodes issued for one or more iterations by jumping to
4579 * the condition below the body and closing the loop if the condition
4580 * is true with a backward branch. For iteration count i:
4582 * i test at the top test at the bottom
4583 * = =============== ==================
4584 * 0 ifeq-pass goto; ifne-fail
4585 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4586 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4588 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4590 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4591 * test at the top. When JSParseNode trees were added during the ES3
4592 * work (1998-9), the code generation scheme was not optimized, and
4593 * the decompiler continued to take advantage of the branch and jump
4594 * that bracketed the body. But given the SRC_WHILE note, it is easy
4595 * to support the more efficient scheme.
4597 js_PushStatement(cg
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
4598 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4601 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4604 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4607 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4609 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4610 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4612 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4615 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, beq
- jmp
))
4617 ok
= js_PopStatementCG(cx
, cg
);
4621 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4622 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4623 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
4626 /* Compile the loop body. */
4627 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
4630 js_PushStatement(cg
, &stmtInfo
, STMT_DO_LOOP
, top
);
4631 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4634 /* Set loop and enclosing label update offsets, for continue. */
4637 stmt
->update
= CG_OFFSET(cg
);
4638 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4640 /* Compile the loop condition, now that continues know where to go. */
4641 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4645 * Since we use JSOP_IFNE for other purposes as well as for do-while
4646 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4647 * and the decompiler must get that delta and decompile recursively.
4649 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4652 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, 1 + (beq
- top
)))
4654 ok
= js_PopStatementCG(cx
, cg
);
4658 beq
= 0; /* suppress gcc warnings */
4661 js_PushStatement(cg
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4663 if (pn2
->pn_type
== TOK_IN
) {
4664 /* Set stmtInfo type for later testing. */
4665 stmtInfo
.type
= STMT_FOR_IN_LOOP
;
4668 * If the left part is 'var x', emit code to define x if necessary
4669 * using a prolog opcode, but do not emit a pop. If the left part
4670 * is 'var x = i', emit prolog code to define x if necessary; then
4671 * emit code to evaluate i, assign the result to x, and pop the
4672 * result off the stack.
4674 * All the logic to do this is implemented in the outer switch's
4675 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4677 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4678 * called here will generate the proper note for the assignment
4679 * op that sets x = i, hoisting the initialized var declaration
4680 * out of the loop: 'var x = i; for (x in o) ...'.
4682 * In the 'for (var x in o) ...' case, nothing but the prolog op
4683 * (if needed) should be generated here, we must emit the note
4684 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4685 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4687 * A 'for (let x = i in o)' loop must not be hoisted, since in
4688 * this form the let variable is scoped by the loop body (but not
4689 * the head). The initializer expression i must be evaluated for
4690 * any side effects. So we hoist only i in the let case.
4693 type
= PN_TYPE(pn3
);
4694 cg
->flags
|= TCF_IN_FOR_INIT
;
4695 if (TokenKindIsDecl(type
) && !js_EmitTree(cx
, cg
, pn3
))
4697 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4699 /* Compile the object expression to the right of 'in'. */
4700 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4704 * Emit a bytecode to convert top of stack value to the iterator
4705 * object depending on the loop variant (for-in, for-each-in, or
4706 * destructuring for-in).
4708 JS_ASSERT(pn
->pn_op
== JSOP_ITER
);
4709 if (js_Emit2(cx
, cg
, JSOP_ITER
, (uint8
) pn
->pn_iflags
) < 0)
4712 /* Annotate so the decompiler can find the loop-closing jump. */
4713 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR_IN
);
4718 * Jump down to the loop condition to minimize overhead assuming at
4719 * least one iteration, as the other loop forms do.
4721 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4725 top
= CG_OFFSET(cg
);
4726 SET_STATEMENT_TOP(&stmtInfo
, top
);
4727 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4731 intN loopDepth
= cg
->stackDepth
;
4735 * Compile a JSOP_FOR* bytecode based on the left hand side.
4737 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4738 * or similar, to signify assignment, rather than declaration, to
4739 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4740 * parameter and emits the appropriate source note, defaulting to
4741 * assignment, so JSOP_SETNAME is not critical here; many similar
4742 * ops could be used -- just not JSOP_NOP (which means 'let').
4746 #if JS_HAS_BLOCK_SCOPE
4750 JS_ASSERT(pn3
->pn_arity
== PN_LIST
&& pn3
->pn_count
== 1);
4752 #if JS_HAS_DESTRUCTURING
4753 if (pn3
->pn_type
== TOK_ASSIGN
) {
4755 JS_ASSERT(pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
);
4757 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4758 op
= PN_OP(pn2
->pn_left
);
4759 goto destructuring_for
;
4762 JS_ASSERT(pn3
->pn_type
== TOK_NAME
);
4768 * Always annotate JSOP_FORLOCAL if given input of the form
4769 * 'for (let x in * o)' -- the decompiler must not hoist the
4770 * 'let x' out of the loop head, or x will be bound in the
4771 * wrong scope. Likewise, but in this case only for the sake
4772 * of higher decompilation fidelity only, do not hoist 'var x'
4773 * when given 'for (var x in o)'.
4776 #if JS_HAS_BLOCK_SCOPE
4779 (type
== TOK_VAR
&& !pn3
->maybeExpr())) &&
4780 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4783 : SRC_DECL_LET
) < 0) {
4786 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4789 case JSOP_GETARG
: /* FALL THROUGH */
4790 case JSOP_SETARG
: op
= JSOP_FORARG
; break;
4791 case JSOP_GETGVAR
: /* FALL THROUGH */
4792 case JSOP_SETGVAR
: op
= JSOP_FORNAME
; break;
4793 case JSOP_GETLOCAL
: /* FALL THROUGH */
4794 case JSOP_SETLOCAL
: op
= JSOP_FORLOCAL
; break;
4795 default: JS_ASSERT(0);
4798 pn3
->pn_op
= JSOP_FORNAME
;
4799 if (!BindNameToSlot(cx
, cg
, pn3
))
4803 if (pn3
->isConst()) {
4804 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn3
, JSREPORT_ERROR
,
4805 JSMSG_BAD_FOR_LEFTSIDE
);
4808 if (pn3
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
4809 atomIndex
= (jsatomid
) pn3
->pn_cookie
;
4810 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4812 if (!EmitAtomOp(cx
, pn3
, op
, cg
))
4819 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4820 * has no side effects.
4823 if (!CheckSideEffects(cx
, cg
, pn3
->expr(), &useful
))
4826 if (!EmitPropOp(cx
, pn3
, JSOP_FORPROP
, cg
, JS_FALSE
))
4832 #if JS_HAS_DESTRUCTURING
4836 if (js_Emit1(cx
, cg
, JSOP_FORELEM
) < 0)
4838 JS_ASSERT(cg
->stackDepth
>= 2);
4840 #if JS_HAS_DESTRUCTURING
4841 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4842 if (!EmitDestructuringOps(cx
, cg
, op
, pn3
))
4844 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4848 if (pn3
->pn_type
== TOK_LP
) {
4849 JS_ASSERT(pn3
->pn_op
== JSOP_SETCALL
);
4850 if (!js_EmitTree(cx
, cg
, pn3
))
4852 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4855 #if JS_HAS_XML_SUPPORT
4856 if (pn3
->pn_type
== TOK_UNARYOP
) {
4857 JS_ASSERT(pn3
->pn_op
== JSOP_BINDXMLNAME
);
4858 if (!js_EmitTree(cx
, cg
, pn3
))
4860 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4864 if (!EmitElemOp(cx
, pn3
, JSOP_ENUMELEM
, cg
))
4869 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4870 JS_ASSERT(cg
->stackDepth
== loopDepth
);
4872 /* Set the first srcnote offset so we can find the start of the loop body. */
4873 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, CG_OFFSET(cg
) - jmp
))
4876 /* Emit code for the loop body. */
4877 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4880 /* Set loop and enclosing "update" offsets, for continue. */
4883 stmt
->update
= CG_OFFSET(cg
);
4884 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4887 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4889 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4890 if (js_Emit1(cx
, cg
, JSOP_MOREITER
) < 0)
4892 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4896 /* Set the second srcnote offset so we can find the closing jump. */
4897 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1, beq
- jmp
))
4900 /* C-style for (init; cond; update) ... loop. */
4904 /* No initializer: emit an annotated nop for the decompiler. */
4907 cg
->flags
|= TCF_IN_FOR_INIT
;
4908 #if JS_HAS_DESTRUCTURING
4909 if (pn3
->pn_type
== TOK_ASSIGN
&&
4910 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4914 if (op
== JSOP_POP
) {
4915 if (!js_EmitTree(cx
, cg
, pn3
))
4917 if (TokenKindIsDecl(PN_TYPE(pn3
))) {
4919 * Check whether a destructuring-initialized var decl
4920 * was optimized to a group assignment. If so, we do
4921 * not need to emit a pop below, so switch to a nop,
4922 * just for the decompiler.
4924 JS_ASSERT(pn3
->pn_arity
== PN_LIST
);
4925 if (pn3
->pn_xflags
& PNX_GROUPINIT
)
4929 cg
->flags
&= ~TCF_IN_FOR_INIT
;
4933 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4934 * Use tmp to hold the biased srcnote "top" offset, which differs
4935 * from the top local variable by the length of the JSOP_GOTO{,X}
4936 * emitted in between tmp and top if this loop has a condition.
4938 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR
);
4939 if (noteIndex
< 0 || js_Emit1(cx
, cg
, op
) < 0)
4941 tmp
= CG_OFFSET(cg
);
4944 /* Goto the loop condition, which branches back to iterate. */
4945 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4950 top
= CG_OFFSET(cg
);
4951 SET_STATEMENT_TOP(&stmtInfo
, top
);
4953 /* Emit code for the loop body. */
4954 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
4956 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4959 /* Set the second note offset so we can find the update part. */
4960 JS_ASSERT(noteIndex
!= -1);
4961 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
4962 CG_OFFSET(cg
) - tmp
)) {
4966 /* Set loop and enclosing "update" offsets, for continue. */
4969 stmt
->update
= CG_OFFSET(cg
);
4970 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4972 /* Check for update code to do before the condition (if any). */
4976 #if JS_HAS_DESTRUCTURING
4977 if (pn3
->pn_type
== TOK_ASSIGN
&&
4978 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4982 if (op
== JSOP_POP
&& !js_EmitTree(cx
, cg
, pn3
))
4985 /* Always emit the POP or NOP, to help the decompiler. */
4986 if (js_Emit1(cx
, cg
, op
) < 0)
4989 /* Restore the absolute line number for source note readers. */
4990 off
= (ptrdiff_t) pn
->pn_pos
.end
.lineno
;
4991 if (CG_CURRENT_LINE(cg
) != (uintN
) off
) {
4992 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, off
) < 0)
4994 CG_CURRENT_LINE(cg
) = (uintN
) off
;
4998 /* Set the first note offset so we can find the loop condition. */
4999 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5000 CG_OFFSET(cg
) - tmp
)) {
5005 /* Fix up the goto from top to target the loop condition. */
5006 JS_ASSERT(jmp
>= 0);
5007 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5009 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid2
))
5013 /* The third note offset helps us find the loop-closing jump. */
5014 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 2,
5015 CG_OFFSET(cg
) - tmp
)) {
5020 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
5024 /* No loop condition -- emit the loop-closing jump. */
5025 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, top
- CG_OFFSET(cg
));
5031 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5032 if (!js_PopStatementCG(cx
, cg
))
5035 if (pn2
->pn_type
== TOK_IN
) {
5036 if (!NewTryNote(cx
, cg
, JSTRY_ITER
, cg
->stackDepth
, top
, CG_OFFSET(cg
)) ||
5037 js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0) {
5047 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5050 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
)
5052 noteType
= SRC_BREAK2LABEL
;
5055 while (!STMT_IS_LOOP(stmt
) && stmt
->type
!= STMT_SWITCH
)
5057 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_NULL
: SRC_BREAK
;
5060 if (EmitGoto(cx
, cg
, stmt
, &stmt
->breaks
, ale
, noteType
) < 0)
5068 /* Find the loop statement enclosed by the matching label. */
5069 JSStmtInfo
*loop
= NULL
;
5070 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5073 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= atom
) {
5074 if (STMT_IS_LOOP(stmt
))
5079 noteType
= SRC_CONT2LABEL
;
5082 while (!STMT_IS_LOOP(stmt
))
5084 noteType
= SRC_CONTINUE
;
5087 if (EmitGoto(cx
, cg
, stmt
, &stmt
->continues
, ale
, noteType
) < 0)
5092 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5094 js_PushStatement(cg
, &stmtInfo
, STMT_WITH
, CG_OFFSET(cg
));
5095 if (js_Emit1(cx
, cg
, JSOP_ENTERWITH
) < 0)
5097 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5099 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
5101 ok
= js_PopStatementCG(cx
, cg
);
5106 ptrdiff_t tryStart
, tryEnd
, catchJump
, finallyStart
;
5108 JSParseNode
*lastCatch
;
5113 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5116 * When a finally block is active (STMT_FINALLY in our tree context),
5117 * non-local jumps (including jumps-over-catches) result in a GOSUB
5118 * being written into the bytecode stream and fixed-up later (c.f.
5119 * EmitBackPatchOp and BackPatch).
5121 js_PushStatement(cg
, &stmtInfo
,
5122 pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
,
5126 * Since an exception can be thrown at any place inside the try block,
5127 * we need to restore the stack and the scope chain before we transfer
5128 * the control to the exception handler.
5130 * For that we store in a try note associated with the catch or
5131 * finally block the stack depth upon the try entry. The interpreter
5132 * uses this depth to properly unwind the stack and the scope chain.
5134 depth
= cg
->stackDepth
;
5136 /* Mark try location for decompilation, then emit try block. */
5137 if (js_Emit1(cx
, cg
, JSOP_TRY
) < 0)
5139 tryStart
= CG_OFFSET(cg
);
5140 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5142 JS_ASSERT(depth
== cg
->stackDepth
);
5144 /* GOSUB to finally, if present. */
5146 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5148 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(stmtInfo
));
5153 /* Emit (hidden) jump over catch and/or finally. */
5154 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5156 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5160 tryEnd
= CG_OFFSET(cg
);
5162 /* If this try has a catch block, emit it. */
5166 jsint count
= 0; /* previous catch block's population */
5169 * The emitted code for a catch block looks like:
5171 * [throwing] only if 2nd+ catch block
5172 * [leaveblock] only if 2nd+ catch block
5173 * enterblock with SRC_CATCH
5175 * [dup] only if catchguard
5176 * setlocalpop <slot> or destructuring code
5177 * [< catchguard code >] if there's a catchguard
5178 * [ifeq <offset to next catch block>] " "
5179 * [pop] only if catchguard
5180 * < catch block contents >
5182 * goto <end of catch blocks> non-local; finally applies
5184 * If there's no catch block without a catchguard, the last
5185 * <offset to next catch block> points to rethrow code. This
5186 * code will [gosub] to the finally code if appropriate, and is
5187 * also used for the catch-all trynote for capturing exceptions
5188 * thrown from catch{} blocks.
5190 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
5191 ptrdiff_t guardJump
, catchNote
;
5193 JS_ASSERT(cg
->stackDepth
== depth
);
5194 guardJump
= GUARDJUMP(stmtInfo
);
5195 if (guardJump
!= -1) {
5196 /* Fix up and clean up previous catch block. */
5197 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, guardJump
);
5200 * Account for JSOP_ENTERBLOCK (whose block object count
5201 * is saved below) and pushed exception object that we
5202 * still have after the jumping from the previous guard.
5204 cg
->stackDepth
= depth
+ count
+ 1;
5207 * Move exception back to cx->exception to prepare for
5208 * the next catch. We hide [throwing] from the decompiler
5209 * since it compensates for the hidden JSOP_DUP at the
5210 * start of the previous guarded catch.
5212 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5213 js_Emit1(cx
, cg
, JSOP_THROWING
) < 0) {
5216 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5218 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
5219 JS_ASSERT(cg
->stackDepth
== depth
);
5223 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5224 * by the call to js_EmitTree immediately below. Save this
5225 * source note's index in stmtInfo for use by the TOK_CATCH:
5226 * case, where the length of the catch guard is set as the
5229 catchNote
= js_NewSrcNote2(cx
, cg
, SRC_CATCH
, 0);
5232 CATCHNOTE(stmtInfo
) = catchNote
;
5235 * Emit the lexical scope and catch body. Save the catch's
5236 * block object population via count, for use when targeting
5237 * guardJump at the next catch (the guard mismatch case).
5239 JS_ASSERT(pn3
->pn_type
== TOK_LEXICALSCOPE
);
5240 count
= OBJ_BLOCK_COUNT(cx
, pn3
->pn_objbox
->object
);
5241 if (!js_EmitTree(cx
, cg
, pn3
))
5244 /* gosub <finally>, if required */
5246 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
,
5250 JS_ASSERT(cg
->stackDepth
== depth
);
5254 * Jump over the remaining catch blocks. This will get fixed
5255 * up to jump to after catch/finally.
5257 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5259 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
5264 * Save a pointer to the last catch node to handle try-finally
5265 * and try-catch(guard)-finally special cases.
5267 lastCatch
= pn3
->expr();
5272 * Last catch guard jumps to the rethrow code sequence if none of the
5273 * guards match. Target guardJump at the beginning of the rethrow
5274 * sequence, just in case a guard expression throws and leaves the
5277 if (lastCatch
&& lastCatch
->pn_kid2
) {
5278 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, GUARDJUMP(stmtInfo
));
5280 /* Sync the stack to take into account pushed exception. */
5281 JS_ASSERT(cg
->stackDepth
== depth
);
5282 cg
->stackDepth
= depth
+ 1;
5285 * Rethrow the exception, delegating executing of finally if any
5286 * to the exception handler.
5288 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
5289 js_Emit1(cx
, cg
, JSOP_THROW
) < 0) {
5294 JS_ASSERT(cg
->stackDepth
== depth
);
5296 /* Emit finally handler if any. */
5297 finallyStart
= 0; /* to quell GCC uninitialized warnings */
5300 * Fix up the gosubs that might have been emitted before non-local
5301 * jumps to the finally code.
5303 if (!BackPatch(cx
, cg
, GOSUBS(stmtInfo
), CG_NEXT(cg
), JSOP_GOSUB
))
5306 finallyStart
= CG_OFFSET(cg
);
5308 /* Indicate that we're emitting a subroutine body. */
5309 stmtInfo
.type
= STMT_SUBROUTINE
;
5310 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_kid3
->pn_pos
.begin
.lineno
))
5312 if (js_Emit1(cx
, cg
, JSOP_FINALLY
) < 0 ||
5313 !js_EmitTree(cx
, cg
, pn
->pn_kid3
) ||
5314 js_Emit1(cx
, cg
, JSOP_RETSUB
) < 0) {
5317 JS_ASSERT(cg
->stackDepth
== depth
);
5319 if (!js_PopStatementCG(cx
, cg
))
5322 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5323 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5327 /* Fix up the end-of-try/catch jumps to come here. */
5328 if (!BackPatch(cx
, cg
, catchJump
, CG_NEXT(cg
), JSOP_GOTO
))
5332 * Add the try note last, to let post-order give us the right ordering
5333 * (first to last for a given nesting level, inner to outer by level).
5336 !NewTryNote(cx
, cg
, JSTRY_CATCH
, depth
, tryStart
, tryEnd
)) {
5341 * If we've got a finally, mark try+catch region with additional
5342 * trynote to catch exceptions (re)thrown from a catch block or
5343 * for the try{}finally{} case.
5346 !NewTryNote(cx
, cg
, JSTRY_FINALLY
, depth
, tryStart
, finallyStart
)) {
5354 ptrdiff_t catchStart
, guardJump
;
5358 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5359 * and save the block object atom.
5362 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& (stmt
->flags
& SIF_SCOPE
));
5363 stmt
->type
= STMT_CATCH
;
5364 catchStart
= stmt
->update
;
5365 blockObj
= stmt
->blockObj
;
5367 /* Go up one statement info record to the TRY or FINALLY record. */
5369 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
5371 /* Pick up the pending exception and bind it to the catch variable. */
5372 if (js_Emit1(cx
, cg
, JSOP_EXCEPTION
) < 0)
5376 * Dup the exception object if there is a guard for rethrowing to use
5377 * it later when rethrowing or in other catches.
5379 if (pn
->pn_kid2
&& js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5383 switch (pn2
->pn_type
) {
5384 #if JS_HAS_DESTRUCTURING
5387 if (!EmitDestructuringOps(cx
, cg
, JSOP_NOP
, pn2
))
5389 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5395 /* Inline and specialize BindNameToSlot for pn2. */
5396 JS_ASSERT(pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
);
5397 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, pn2
->pn_cookie
);
5404 /* Emit the guard expression, if there is one. */
5406 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5408 if (!js_SetSrcNoteOffset(cx
, cg
, CATCHNOTE(*stmt
), 0,
5409 CG_OFFSET(cg
) - catchStart
)) {
5412 /* ifeq <next block> */
5413 guardJump
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5416 GUARDJUMP(*stmt
) = guardJump
;
5418 /* Pop duplicated exception object as we no longer need it. */
5419 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5423 /* Emit the catch body. */
5424 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5428 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5429 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5431 off
= cg
->stackDepth
;
5432 if (js_NewSrcNote2(cx
, cg
, SRC_CATCH
, off
) < 0)
5438 if (!EmitVariables(cx
, cg
, pn
, JS_FALSE
, ¬eIndex
))
5443 /* Push a return value */
5446 if (!js_EmitTree(cx
, cg
, pn2
))
5449 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5454 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5455 * blocks having finally clauses and to exit intermingled let blocks.
5456 * We can't simply transfer control flow to our caller in that case,
5457 * because we must gosub to those finally clauses from inner to outer,
5458 * with the correct stack pointer (i.e., after popping any with,
5459 * for/in, etc., slots nested inside the finally's try).
5461 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5462 * extra JSOP_RETRVAL after the fixups.
5464 top
= CG_OFFSET(cg
);
5465 if (js_Emit1(cx
, cg
, JSOP_RETURN
) < 0)
5467 if (!EmitNonLocalJumpFixup(cx
, cg
, NULL
))
5469 if (top
+ JSOP_RETURN_LENGTH
!= CG_OFFSET(cg
)) {
5470 CG_BASE(cg
)[top
] = JSOP_SETRVAL
;
5471 if (js_Emit1(cx
, cg
, JSOP_RETRVAL
) < 0)
5476 #if JS_HAS_GENERATORS
5478 if (!cg
->inFunction()) {
5479 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
5480 JSMSG_BAD_RETURN_OR_YIELD
,
5485 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5488 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5491 if (pn
->pn_hidden
&& js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5493 if (js_Emit1(cx
, cg
, JSOP_YIELD
) < 0)
5500 #if JS_HAS_XML_SUPPORT
5501 if (pn
->pn_arity
== PN_UNARY
) {
5502 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5504 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5510 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5513 tmp
= CG_OFFSET(cg
);
5514 if (pn
->pn_xflags
& PNX_NEEDBRACES
) {
5515 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5516 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
5520 js_PushStatement(cg
, &stmtInfo
, STMT_BLOCK
, top
);
5522 JSParseNode
*pnchild
= pn
->pn_head
;
5523 if (pn
->pn_xflags
& PNX_FUNCDEFS
) {
5525 * This block contains top-level function definitions. To ensure
5526 * that we emit the bytecode defining them before the rest of code
5527 * in the block we use a separate pass over functions. During the
5528 * main pass later the emitter will add JSOP_NOP with source notes
5529 * for the function to preserve the original functions position
5532 * Currently this is used only for functions, as compile-as-we go
5533 * mode for scripts does not allow separate emitter passes.
5535 JS_ASSERT(cg
->inFunction());
5536 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
5538 * Assign the destructuring arguments before defining any
5539 * functions, see bug 419662.
5541 JS_ASSERT(pnchild
->pn_type
== TOK_SEMI
);
5542 JS_ASSERT(pnchild
->pn_kid
->pn_type
== TOK_COMMA
);
5543 if (!js_EmitTree(cx
, cg
, pnchild
))
5545 pnchild
= pnchild
->pn_next
;
5548 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5549 if (pn2
->pn_type
== TOK_FUNCTION
) {
5550 if (pn2
->pn_op
== JSOP_NOP
) {
5551 if (!js_EmitTree(cx
, cg
, pn2
))
5555 * JSOP_DEFFUN in a top-level block with function
5556 * definitions appears, for example, when "if (true)"
5557 * is optimized away from "if (true) function x() {}".
5560 JS_ASSERT(pn2
->pn_op
== JSOP_DEFFUN
);
5565 for (pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5566 if (!js_EmitTree(cx
, cg
, pn2
))
5570 if (noteIndex
>= 0 &&
5571 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5572 CG_OFFSET(cg
) - tmp
)) {
5576 ok
= js_PopStatementCG(cx
, cg
);
5581 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5582 js_PushStatement(cg
, &stmtInfo
, STMT_SEQ
, top
);
5583 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5584 if (!js_EmitTree(cx
, cg
, pn2
))
5587 ok
= js_PopStatementCG(cx
, cg
);
5594 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5595 * debugger, and eval frames may need the value of the ultimate
5596 * expression statement as the script's result, despite the fact
5597 * that it appears useless to the compiler.
5599 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5600 * calling JS_Compile* to suppress JSOP_POPV.
5602 useful
= wantval
= !(cg
->flags
& (TCF_IN_FUNCTION
| TCF_NO_SCRIPT_RVAL
));
5604 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5609 * Don't eliminate apparently useless expressions if they are
5610 * labeled expression statements. The tc->topStmt->update test
5611 * catches the case where we are nesting in js_EmitTree for a
5612 * labeled compound statement.
5616 cg
->topStmt
->type
!= STMT_LABEL
||
5617 cg
->topStmt
->update
< CG_OFFSET(cg
))) {
5618 CG_CURRENT_LINE(cg
) = pn2
->pn_pos
.begin
.lineno
;
5619 if (!ReportCompileErrorNumber(cx
, CG_TS(cg
), pn2
,
5620 JSREPORT_WARNING
| JSREPORT_STRICT
,
5621 JSMSG_USELESS_EXPR
)) {
5625 op
= wantval
? JSOP_POPV
: JSOP_POP
;
5626 #if JS_HAS_DESTRUCTURING
5628 pn2
->pn_type
== TOK_ASSIGN
&&
5629 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn2
, &op
)) {
5633 if (op
!= JSOP_NOP
) {
5635 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5636 * avoid null closure cloning. Do this only for assignment
5637 * statements that are not completion values wanted by a
5638 * script evaluator, to ensure that the joined function
5639 * can't escape directly.
5642 PN_TYPE(pn2
) == TOK_ASSIGN
&&
5643 PN_OP(pn2
) == JSOP_NOP
&&
5644 PN_OP(pn2
->pn_left
) == JSOP_SETPROP
&&
5645 PN_OP(pn2
->pn_right
) == JSOP_LAMBDA
&&
5646 pn2
->pn_right
->pn_funbox
->joinable()) {
5647 pn2
->pn_left
->pn_op
= JSOP_SETMETHOD
;
5649 if (!js_EmitTree(cx
, cg
, pn2
))
5651 if (js_Emit1(cx
, cg
, op
) < 0)
5659 /* Emit an annotated nop so we know to decompile a label. */
5661 ale
= cg
->atomList
.add(cg
->parser
, atom
);
5665 noteType
= (pn2
->pn_type
== TOK_LC
||
5666 (pn2
->pn_type
== TOK_LEXICALSCOPE
&&
5667 pn2
->expr()->pn_type
== TOK_LC
))
5670 noteIndex
= js_NewSrcNote2(cx
, cg
, noteType
,
5671 (ptrdiff_t) ALE_INDEX(ale
));
5672 if (noteIndex
< 0 ||
5673 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5677 /* Emit code for the labeled statement. */
5678 js_PushStatement(cg
, &stmtInfo
, STMT_LABEL
, CG_OFFSET(cg
));
5679 stmtInfo
.label
= atom
;
5680 if (!js_EmitTree(cx
, cg
, pn2
))
5682 if (!js_PopStatementCG(cx
, cg
))
5685 /* If the statement was compound, emit a note for the end brace. */
5686 if (noteType
== SRC_LABELBRACE
) {
5687 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5688 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5696 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5697 * These notes help the decompiler bracket the bytecodes generated
5698 * from each sub-expression that follows a comma.
5700 off
= noteIndex
= -1;
5701 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
5702 if (!js_EmitTree(cx
, cg
, pn2
))
5704 tmp
= CG_OFFSET(cg
);
5705 if (noteIndex
>= 0) {
5706 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5712 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5713 if (noteIndex
< 0 ||
5714 js_Emit1(cx
, cg
, JSOP_POP
) < 0) {
5722 * Check left operand type and generate specialized code for it.
5723 * Specialize to avoid ECMA "reference type" values on the operand
5724 * stack, which impose pervasive runtime "GetValue" costs.
5727 atomIndex
= (jsatomid
) -1; /* quell GCC overwarning */
5728 switch (PN_TYPE(pn2
)) {
5730 if (!BindNameToSlot(cx
, cg
, pn2
))
5732 if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
5733 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
5735 ale
= cg
->atomList
.add(cg
->parser
, pn2
->pn_atom
);
5738 atomIndex
= ALE_INDEX(ale
);
5739 if (!pn2
->isConst())
5740 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
5744 if (!js_EmitTree(cx
, cg
, pn2
->expr()))
5746 ale
= cg
->atomList
.add(cg
->parser
, pn2
->pn_atom
);
5749 atomIndex
= ALE_INDEX(ale
);
5752 JS_ASSERT(pn2
->pn_arity
== PN_BINARY
);
5753 if (!js_EmitTree(cx
, cg
, pn2
->pn_left
))
5755 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
5758 #if JS_HAS_DESTRUCTURING
5764 if (!js_EmitTree(cx
, cg
, pn2
))
5767 #if JS_HAS_XML_SUPPORT
5769 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5770 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5772 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5781 if (op
!= JSOP_NOP
) {
5782 switch (pn2
->pn_type
) {
5784 if (pn2
->isConst()) {
5785 if (PN_OP(pn2
) == JSOP_CALLEE
) {
5786 if (js_Emit1(cx
, cg
, JSOP_CALLEE
) < 0)
5789 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5791 } else if (PN_OP(pn2
) == JSOP_SETNAME
) {
5792 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5794 EMIT_INDEX_OP(JSOP_GETXPROP
, atomIndex
);
5796 JS_ASSERT(PN_OP(pn2
) != JSOP_GETUPVAR
);
5797 EMIT_UINT16_IMM_OP((PN_OP(pn2
) == JSOP_SETGVAR
)
5799 : (PN_OP(pn2
) == JSOP_SETARG
)
5806 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5808 if (pn2
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
5809 if (js_Emit1(cx
, cg
, JSOP_LENGTH
) < 0)
5811 } else if (pn2
->pn_atom
== cx
->runtime
->atomState
.protoAtom
) {
5812 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, atomIndex
, cg
))
5814 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5817 EMIT_INDEX_OP(JSOP_GETPROP
, atomIndex
);
5822 #if JS_HAS_XML_SUPPORT
5825 if (js_Emit1(cx
, cg
, JSOP_DUP2
) < 0)
5827 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5834 /* Now emit the right operand (it may affect the namespace). */
5835 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5838 /* If += etc., emit the binary operator with a decompiler note. */
5839 if (op
!= JSOP_NOP
) {
5841 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5842 * declared in the current compilation unit, as in this case (just
5843 * a bit further below) we will avoid emitting the assignment op.
5845 if (pn2
->pn_type
!= TOK_NAME
|| !pn2
->isConst()) {
5846 if (js_NewSrcNote(cx
, cg
, SRC_ASSIGNOP
) < 0)
5849 if (js_Emit1(cx
, cg
, op
) < 0)
5853 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5854 if (pn2
->pn_type
!= TOK_NAME
&&
5855 #if JS_HAS_DESTRUCTURING
5856 pn2
->pn_type
!= TOK_RB
&&
5857 pn2
->pn_type
!= TOK_RC
&&
5859 js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0) {
5863 /* Finally, emit the specialized assignment bytecode. */
5864 switch (pn2
->pn_type
) {
5870 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5874 if (js_Emit1(cx
, cg
, JSOP_SETELEM
) < 0)
5877 #if JS_HAS_DESTRUCTURING
5880 if (!EmitDestructuringOps(cx
, cg
, JSOP_SETNAME
, pn2
))
5884 #if JS_HAS_XML_SUPPORT
5886 if (js_Emit1(cx
, cg
, JSOP_SETXMLNAME
) < 0)
5896 /* Emit the condition, then branch if false to the else part. */
5897 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5899 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_COND
);
5902 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5903 if (beq
< 0 || !js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5906 /* Jump around else, fixup the branch, emit else, fixup jump. */
5907 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
5910 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
5913 * Because each branch pushes a single value, but our stack budgeting
5914 * analysis ignores branches, we now have to adjust cg->stackDepth to
5915 * ignore the value pushed by the first branch. Execution will follow
5916 * only one path, so we must decrement cg->stackDepth.
5918 * Failing to do this will foil code, such as the try/catch/finally
5919 * exception handling code generator, that samples cg->stackDepth for
5920 * use at runtime (JSOP_SETSP), or in let expression and block code
5921 * generation, which must use the stack depth to compute local stack
5922 * indexes correctly.
5924 JS_ASSERT(cg
->stackDepth
> 0);
5926 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5928 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5929 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
5936 * JSOP_OR converts the operand on the stack to boolean, and if true,
5937 * leaves the original operand value on the stack and jumps; otherwise
5938 * it pops and falls into the next bytecode, which evaluates the right
5939 * operand. The jump goes around the right operand evaluation.
5941 * JSOP_AND converts the operand on the stack to boolean, and if false,
5942 * leaves the original operand value on the stack and jumps; otherwise
5943 * it pops and falls into the right operand's bytecode.
5945 if (pn
->pn_arity
== PN_BINARY
) {
5946 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5948 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5951 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5953 off
= CG_OFFSET(cg
);
5954 pc
= CG_CODE(cg
, top
);
5955 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5958 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5959 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5961 /* Left-associative operator chain: avoid too much recursion. */
5963 if (!js_EmitTree(cx
, cg
, pn2
))
5965 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5969 /* Emit nodes between the head and the tail. */
5971 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5972 if (!js_EmitTree(cx
, cg
, pn2
))
5974 off
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5977 if (!SetBackPatchDelta(cx
, cg
, CG_CODE(cg
, jmp
), off
- jmp
))
5982 if (!js_EmitTree(cx
, cg
, pn2
))
5986 off
= CG_OFFSET(cg
);
5988 pc
= CG_CODE(cg
, top
);
5989 tmp
= GetJumpOffset(cg
, pc
);
5990 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5993 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5998 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5999 if (pn
->pn_arity
== PN_LIST
&& pn
->pn_count
< JS_BIT(16) &&
6001 /* Emit up to the first string literal conventionally. */
6002 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6003 if (pn2
->pn_type
== TOK_STRING
)
6005 if (!js_EmitTree(cx
, cg
, pn2
))
6007 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6015 * Having seen a string literal, we know statically that the rest
6016 * of the additions are string concatenation, so we emit them as a
6017 * single concatn. First, do string conversion on the result of the
6018 * preceding zero or more additions so that any side effects of
6019 * string conversion occur before the next operand begins.
6021 if (pn2
== pn
->pn_head
) {
6024 if (!js_Emit1(cx
, cg
, JSOP_OBJTOSTR
))
6029 for (; pn2
; pn2
= pn2
->pn_next
, index
++) {
6030 if (!js_EmitTree(cx
, cg
, pn2
))
6032 if (!pn2
->isLiteral() && js_Emit1(cx
, cg
, JSOP_OBJTOSTR
) < 0)
6036 EMIT_UINT16_IMM_OP(JSOP_CONCATN
, index
);
6045 case TOK_INSTANCEOF
:
6050 if (pn
->pn_arity
== PN_LIST
) {
6051 /* Left-associative operator chain: avoid too much recursion. */
6053 if (!js_EmitTree(cx
, cg
, pn2
))
6056 while ((pn2
= pn2
->pn_next
) != NULL
) {
6057 if (!js_EmitTree(cx
, cg
, pn2
))
6059 if (js_Emit1(cx
, cg
, op
) < 0)
6063 #if JS_HAS_XML_SUPPORT
6067 if (pn
->pn_arity
== PN_NAME
) {
6068 if (!js_EmitTree(cx
, cg
, pn
->expr()))
6070 if (!EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
))
6076 * Binary :: has a right operand that brackets arbitrary code,
6077 * possibly including a let (a = b) ... expression. We must clear
6078 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6080 oldflags
= cg
->flags
;
6081 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6084 /* Binary operators that evaluate both operands unconditionally. */
6085 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6087 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6089 #if JS_HAS_XML_SUPPORT
6090 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6092 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6098 #if JS_HAS_XML_SUPPORT
6101 JS_ASSERT(pn
->pn_arity
== PN_UNARY
);
6108 /* Unary op, including unary +/-. */
6110 #if JS_HAS_XML_SUPPORT
6111 if (op
== JSOP_XMLNAME
) {
6112 if (!EmitXMLName(cx
, pn
, op
, cg
))
6119 if (op
== JSOP_TYPEOF
&& pn2
->pn_type
!= TOK_NAME
)
6120 op
= JSOP_TYPEOFEXPR
;
6122 oldflags
= cg
->flags
;
6123 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6124 if (!js_EmitTree(cx
, cg
, pn2
))
6126 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6127 if (js_Emit1(cx
, cg
, op
) < 0)
6134 /* Emit lvalue-specialized code for ++/-- operators. */
6136 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
6138 switch (pn2
->pn_type
) {
6140 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
6142 if (!BindNameToSlot(cx
, cg
, pn2
))
6145 if (op
== JSOP_CALLEE
) {
6146 if (js_Emit1(cx
, cg
, op
) < 0)
6148 } else if (pn2
->pn_cookie
!= FREE_UPVAR_COOKIE
) {
6149 atomIndex
= (jsatomid
) pn2
->pn_cookie
;
6150 EMIT_UINT16_IMM_OP(op
, atomIndex
);
6152 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
6153 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6157 if (pn2
->isConst()) {
6158 if (js_Emit1(cx
, cg
, JSOP_POS
) < 0)
6161 if (!(js_CodeSpec
[op
].format
& JOF_POST
)) {
6162 if (js_Emit1(cx
, cg
, JSOP_ONE
) < 0)
6164 op
= (js_CodeSpec
[op
].format
& JOF_INC
) ? JSOP_ADD
: JSOP_SUB
;
6165 if (js_Emit1(cx
, cg
, op
) < 0)
6171 if (!EmitPropOp(cx
, pn2
, op
, cg
, JS_FALSE
))
6175 if (!EmitElemOp(cx
, pn2
, op
, cg
))
6179 if (!js_EmitTree(cx
, cg
, pn2
))
6181 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
6182 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
6185 if (js_Emit1(cx
, cg
, op
) < 0)
6188 #if JS_HAS_XML_SUPPORT
6190 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
6191 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
6193 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
6195 if (js_Emit1(cx
, cg
, op
) < 0)
6204 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6205 * must evaluate the operand if it appears it might have side effects.
6208 switch (pn2
->pn_type
) {
6210 if (!BindNameToSlot(cx
, cg
, pn2
))
6213 if (op
== JSOP_FALSE
) {
6214 if (js_Emit1(cx
, cg
, op
) < 0)
6217 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
6222 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, cg
, JS_FALSE
))
6225 #if JS_HAS_XML_SUPPORT
6227 if (!EmitElemOp(cx
, pn2
, JSOP_DELDESC
, cg
))
6232 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, cg
))
6237 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6238 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6241 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
6244 off
= noteIndex
= -1;
6246 if (pn2
->pn_op
== JSOP_SETCALL
)
6247 pn2
->pn_op
= JSOP_CALL
;
6248 if (!js_EmitTree(cx
, cg
, pn2
))
6250 off
= CG_OFFSET(cg
);
6251 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
6252 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
6255 if (js_Emit1(cx
, cg
, JSOP_TRUE
) < 0)
6257 if (noteIndex
>= 0) {
6258 tmp
= CG_OFFSET(cg
);
6259 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
6265 #if JS_HAS_XML_SUPPORT
6267 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
6269 jmp
= EmitJump(cx
, cg
, JSOP_FILTER
, 0);
6272 top
= js_Emit1(cx
, cg
, JSOP_TRACE
);
6275 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
6277 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
6278 if (EmitJump(cx
, cg
, JSOP_ENDFILTER
, top
- CG_OFFSET(cg
)) < 0)
6285 * Pop a stack operand, convert it to object, get a property named by
6286 * this bytecode's immediate-indexed atom operand, and push its value
6287 * (not a reference to it).
6289 ok
= EmitPropOp(cx
, pn
, PN_OP(pn
), cg
, JS_FALSE
);
6293 #if JS_HAS_XML_SUPPORT
6297 * Pop two operands, convert the left one to object and the right one
6298 * to property name (atom or tagged int), get the named property, and
6299 * push its value. Set the "obj" register to the result of ToObject
6300 * on the left operand.
6302 ok
= EmitElemOp(cx
, pn
, PN_OP(pn
), cg
);
6308 bool callop
= (PN_TYPE(pn
) == TOK_LP
);
6311 * Emit callable invocation or operator new (constructor call) code.
6312 * First, emit code for the left operand to evaluate the callable or
6313 * constructable object expression.
6315 * For operator new applied to other expressions than E4X ones, we emit
6316 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6317 * interpose the lambda-initialized method read barrier -- see the code
6318 * in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6320 * Then (or in a call case that has no explicit reference-base object)
6321 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6322 * parameter: in the operator new case, the newborn instance; in the
6323 * base-less call case, a cookie meaning "use the global object as the
6324 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6325 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6328 switch (pn2
->pn_type
) {
6330 if (!EmitNameOp(cx
, cg
, pn2
, callop
))
6334 if (!EmitPropOp(cx
, pn2
, PN_OP(pn2
), cg
, callop
))
6338 JS_ASSERT(pn2
->pn_op
== JSOP_GETELEM
);
6339 if (!EmitElemOp(cx
, pn2
, callop
? JSOP_CALLELEM
: JSOP_GETELEM
, cg
))
6343 #if JS_HAS_XML_SUPPORT
6344 if (pn2
->pn_op
== JSOP_XMLNAME
) {
6345 if (!EmitXMLName(cx
, pn2
, JSOP_CALLXMLNAME
, cg
))
6347 callop
= true; /* suppress JSOP_NULL after */
6354 * Push null as a placeholder for the global object, per ECMA-262
6357 if (!js_EmitTree(cx
, cg
, pn2
))
6359 callop
= false; /* trigger JSOP_NULL after */
6362 if (!callop
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
6365 /* Remember start of callable-object bytecode for decompilation hint. */
6369 * Emit code for each argument in order, then emit the JSOP_*CALL or
6370 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6371 * were pushed on the operand stack.
6373 uintN oldflags
= cg
->flags
;
6374 cg
->flags
&= ~TCF_IN_FOR_INIT
;
6375 for (pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
6376 if (!js_EmitTree(cx
, cg
, pn3
))
6379 cg
->flags
|= oldflags
& TCF_IN_FOR_INIT
;
6380 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - off
) < 0)
6383 argc
= pn
->pn_count
- 1;
6384 if (js_Emit3(cx
, cg
, PN_OP(pn
), ARGC_HI(argc
), ARGC_LO(argc
)) < 0)
6386 if (PN_OP(pn
) == JSOP_CALL
) {
6387 /* Add a trace hint opcode for recursion. */
6388 if (js_Emit1(cx
, cg
, JSOP_TRACE
) < 0)
6391 if (PN_OP(pn
) == JSOP_EVAL
)
6392 EMIT_UINT16_IMM_OP(JSOP_LINENO
, pn
->pn_pos
.begin
.lineno
);
6396 case TOK_LEXICALSCOPE
:
6398 JSObjectBox
*objbox
;
6401 objbox
= pn
->pn_objbox
;
6402 js_PushBlockScope(cg
, &stmtInfo
, objbox
->object
, CG_OFFSET(cg
));
6405 * If this lexical scope is not for a catch block, let block or let
6406 * expression, or any kind of for loop (where the scope starts in the
6407 * head after the first part if for (;;), else in the body if for-in);
6408 * and if our container is top-level but not a function body, or else
6409 * a block statement; then emit a SRC_BRACE note. All other container
6410 * statements get braces by default from the decompiler.
6413 type
= PN_TYPE(pn
->expr());
6414 if (type
!= TOK_CATCH
&& type
!= TOK_LET
&& type
!= TOK_FOR
&&
6415 (!(stmt
= stmtInfo
.down
)
6417 : stmt
->type
== STMT_BLOCK
)) {
6418 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6419 /* There must be no source note already output for the next op. */
6420 JS_ASSERT(CG_NOTE_COUNT(cg
) == 0 ||
6421 CG_LAST_NOTE_OFFSET(cg
) != CG_OFFSET(cg
) ||
6422 !GettableNoteForNextOp(cg
));
6424 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
6429 JS_ASSERT(CG_OFFSET(cg
) == top
);
6430 if (!EmitEnterBlock(cx
, pn
, cg
))
6433 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
6437 if (op
== JSOP_LEAVEBLOCKEXPR
) {
6438 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
6441 if (noteIndex
>= 0 &&
6442 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6443 CG_OFFSET(cg
) - top
)) {
6448 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6449 count
= OBJ_BLOCK_COUNT(cx
, objbox
->object
);
6450 EMIT_UINT16_IMM_OP(op
, count
);
6452 ok
= js_PopStatementCG(cx
, cg
);
6456 #if JS_HAS_BLOCK_SCOPE
6458 /* Let statements have their variable declarations on the left. */
6459 if (pn
->pn_arity
== PN_BINARY
) {
6466 /* Non-null pn2 means that pn is the variable list from a let head. */
6467 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
6468 if (!EmitVariables(cx
, cg
, pn
, pn2
!= NULL
, ¬eIndex
))
6471 /* Thus non-null pn2 is the body of the let block or expression. */
6472 tmp
= CG_OFFSET(cg
);
6473 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
6476 if (noteIndex
>= 0 &&
6477 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
6478 CG_OFFSET(cg
) - tmp
)) {
6482 #endif /* JS_HAS_BLOCK_SCOPE */
6484 #if JS_HAS_GENERATORS
6485 case TOK_ARRAYPUSH
: {
6489 * The array object's stack index is in cg->arrayCompDepth. See below
6490 * under the array initialiser code generator for array comprehension
6493 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6495 slot
= AdjustBlockSlot(cx
, cg
, cg
->arrayCompDepth
);
6498 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
6504 #if JS_HAS_GENERATORS
6508 * Emit code for [a, b, c] that is equivalent to constructing a new
6509 * array and in source order evaluating each element value and adding
6510 * it to the array, without invoking latent setters. We use the
6511 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6512 * avoid dup'ing and popping the array as each element is added, as
6513 * JSOP_SETELEM/JSOP_SETPROP would do.
6515 * If no sharp variable is defined, the initializer is not for an array
6516 * comprehension, the initializer is not overlarge, and the initializer
6517 * is not in global code (whose stack growth cannot be precisely modeled
6518 * due to the need to reserve space for global variables and regular
6519 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6520 * array using a fast, all-at-once process rather than a slow, element-
6521 * by-element process.
6523 #if JS_HAS_SHARP_VARS
6528 op
= (JS_LIKELY(pn
->pn_count
< JS_BIT(16)) && cg
->inFunction())
6532 #if JS_HAS_GENERATORS
6533 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6536 #if JS_HAS_SHARP_VARS
6537 JS_ASSERT_IF(sharpnum
>= 0, cg
->hasSharps());
6538 if (cg
->hasSharps())
6542 if (op
== JSOP_NEWINIT
&& !EmitNewInit(cx
, cg
, JSProto_Array
, pn
, sharpnum
))
6545 #if JS_HAS_GENERATORS
6546 if (pn
->pn_type
== TOK_ARRAYCOMP
) {
6550 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6551 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6552 * its kids under pn2 to generate this comprehension.
6554 JS_ASSERT(cg
->stackDepth
> 0);
6555 saveDepth
= cg
->arrayCompDepth
;
6556 cg
->arrayCompDepth
= (uint32
) (cg
->stackDepth
- 1);
6557 if (!js_EmitTree(cx
, cg
, pn
->pn_head
))
6559 cg
->arrayCompDepth
= saveDepth
;
6561 /* Emit the usual op needed for decompilation. */
6562 if (!EmitEndInit(cx
, cg
, 1))
6566 #endif /* JS_HAS_GENERATORS */
6569 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6570 if (op
== JSOP_NEWINIT
&& !EmitNumberOp(cx
, atomIndex
, cg
))
6572 if (pn2
->pn_type
== TOK_COMMA
&& pn2
->pn_arity
== PN_NULLARY
) {
6573 if (js_Emit1(cx
, cg
, JSOP_HOLE
) < 0)
6576 if (!js_EmitTree(cx
, cg
, pn2
))
6579 if (op
== JSOP_NEWINIT
&& js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6582 JS_ASSERT(atomIndex
== pn
->pn_count
);
6584 if (pn
->pn_xflags
& PNX_ENDCOMMA
) {
6585 /* Emit a source note so we know to decompile an extra comma. */
6586 if (js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
6590 if (op
== JSOP_NEWINIT
) {
6592 * Emit an op to finish the array and, secondarily, to aid in sharp
6593 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6595 if (!EmitEndInit(cx
, cg
, atomIndex
))
6600 JS_ASSERT(atomIndex
< JS_BIT(16));
6601 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY
, atomIndex
);
6605 #if JS_HAS_SHARP_VARS
6609 #if JS_HAS_DESTRUCTURING_SHORTHAND
6610 if (pn
->pn_xflags
& PNX_DESTRUCT
) {
6611 ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
, JSMSG_BAD_OBJECT_INIT
);
6616 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6617 * a new object and in source order evaluating each property value and
6618 * adding the property to the object, without invoking latent setters.
6619 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6620 * ignore setters and to avoid dup'ing and popping the object as each
6621 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6623 if (!EmitNewInit(cx
, cg
, JSProto_Object
, pn
, sharpnum
))
6626 uintN methodInits
= 0, slowMethodInits
= 0;
6627 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6628 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6630 if (pn3
->pn_type
== TOK_NUMBER
) {
6631 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
6635 /* Emit code for the property initializer. */
6636 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
6640 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
6641 if (js_Emit1(cx
, cg
, op
) < 0)
6645 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6646 if (pn3
->pn_type
== TOK_NUMBER
) {
6647 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
6649 if (js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6652 JS_ASSERT(pn3
->pn_type
== TOK_NAME
||
6653 pn3
->pn_type
== TOK_STRING
);
6654 ale
= cg
->atomList
.add(cg
->parser
, pn3
->pn_atom
);
6658 /* Check whether we can optimize to JSOP_INITMETHOD. */
6659 JSParseNode
*init
= pn2
->pn_right
;
6660 bool lambda
= PN_OP(init
) == JSOP_LAMBDA
;
6663 if (op
== JSOP_INITPROP
&& lambda
&& init
->pn_funbox
->joinable())
6665 op
= JSOP_INITMETHOD
;
6666 pn2
->pn_op
= uint8(op
);
6673 EMIT_INDEX_OP(op
, ALE_INDEX(ale
));
6677 if (cg
->funbox
&& cg
->funbox
->shouldUnbrand(methodInits
, slowMethodInits
)) {
6678 if (js_Emit1(cx
, cg
, JSOP_UNBRAND
) < 0)
6681 if (!EmitEndInit(cx
, cg
, pn
->pn_count
))
6686 #if JS_HAS_SHARP_VARS
6688 JS_ASSERT(cg
->hasSharps());
6689 sharpnum
= pn
->pn_num
;
6691 if (pn
->pn_type
== TOK_RB
)
6693 # if JS_HAS_GENERATORS
6694 if (pn
->pn_type
== TOK_ARRAYCOMP
)
6697 if (pn
->pn_type
== TOK_RC
)
6698 goto do_emit_object
;
6700 if (!js_EmitTree(cx
, cg
, pn
))
6702 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP
, cg
->sharpSlotBase
, (jsatomid
) sharpnum
);
6706 JS_ASSERT(cg
->hasSharps());
6707 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP
, cg
->sharpSlotBase
, (jsatomid
) pn
->pn_num
);
6709 #endif /* JS_HAS_SHARP_VARS */
6713 * Cope with a left-over function definition that was replaced by a use
6714 * of a later function definition of the same name. See FunctionDef and
6715 * MakeDefIntoUse in jsparse.cpp.
6717 if (pn
->pn_op
== JSOP_NOP
)
6719 if (!EmitNameOp(cx
, cg
, pn
, JS_FALSE
))
6723 #if JS_HAS_XML_SUPPORT
6728 case TOK_XMLCOMMENT
:
6731 ok
= EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6735 ok
= EmitNumberOp(cx
, pn
->pn_dval
, cg
);
6740 * If the regexp's script is one-shot and the regexp is not used in a
6741 * loop, we can avoid the extra fork-on-exec costs of JSOP_REGEXP by
6742 * selecting JSOP_OBJECT. Otherwise, to avoid incorrect proto, parent,
6743 * and lastIndex sharing, select JSOP_REGEXP.
6745 JS_ASSERT(pn
->pn_op
== JSOP_REGEXP
);
6746 bool singleton
= !cg
->fun
&& cg
->compileAndGo();
6748 for (JSStmtInfo
*stmt
= cg
->topStmt
; stmt
; stmt
= stmt
->down
) {
6749 if (STMT_IS_LOOP(stmt
)) {
6756 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, JSOP_OBJECT
, cg
);
6758 ok
= EmitIndexOp(cx
, JSOP_REGEXP
,
6759 cg
->regexpList
.index(pn
->pn_objbox
),
6765 #if JS_HAS_XML_SUPPORT
6769 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6773 #if JS_HAS_DEBUGGER_KEYWORD
6775 if (js_Emit1(cx
, cg
, JSOP_DEBUGGER
) < 0)
6778 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6780 #if JS_HAS_XML_SUPPORT
6783 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6784 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6788 JS_ASSERT(PN_TYPE(pn
) == TOK_XMLLIST
|| pn
->pn_count
!= 0);
6789 switch (pn
->pn_head
? PN_TYPE(pn
->pn_head
) : TOK_XMLLIST
) {
6797 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6801 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6802 if (pn2
->pn_type
== TOK_LC
&&
6803 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6806 if (!js_EmitTree(cx
, cg
, pn2
))
6808 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6812 if (pn
->pn_xflags
& PNX_XMLROOT
) {
6813 if (pn
->pn_count
== 0) {
6814 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
);
6815 atom
= cx
->runtime
->atomState
.emptyAtom
;
6816 ale
= cg
->atomList
.add(cg
->parser
, atom
);
6819 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6821 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6826 JS_ASSERT(pn
->pn_count
!= 0);
6831 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6832 ok
= EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
);
6842 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6845 ale
= cg
->atomList
.add(cg
->parser
,
6846 (pn
->pn_type
== TOK_XMLETAGO
)
6847 ? cx
->runtime
->atomState
.etagoAtom
6848 : cx
->runtime
->atomState
.stagoAtom
);
6851 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6853 JS_ASSERT(pn
->pn_count
!= 0);
6855 if (pn2
->pn_type
== TOK_LC
&& js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0)
6857 if (!js_EmitTree(cx
, cg
, pn2
))
6859 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6862 for (pn2
= pn2
->pn_next
, i
= 0; pn2
; pn2
= pn2
->pn_next
, i
++) {
6863 if (pn2
->pn_type
== TOK_LC
&&
6864 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6867 if (!js_EmitTree(cx
, cg
, pn2
))
6869 if ((i
& 1) && pn2
->pn_type
== TOK_LC
) {
6870 if (js_Emit1(cx
, cg
, JSOP_TOATTRVAL
) < 0)
6873 if (js_Emit1(cx
, cg
,
6874 (i
& 1) ? JSOP_ADDATTRVAL
: JSOP_ADDATTRNAME
) < 0) {
6879 ale
= cg
->atomList
.add(cg
->parser
,
6880 (pn
->pn_type
== TOK_XMLPTAGC
)
6881 ? cx
->runtime
->atomState
.ptagcAtom
6882 : cx
->runtime
->atomState
.tagcAtom
);
6885 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6886 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6889 if ((pn
->pn_xflags
& PNX_XMLROOT
) && js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6895 if (pn
->pn_arity
== PN_LIST
) {
6896 JS_ASSERT(pn
->pn_count
!= 0);
6897 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6898 if (pn2
->pn_type
== TOK_LC
&&
6899 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6902 if (!js_EmitTree(cx
, cg
, pn2
))
6904 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6908 JS_ASSERT(pn
->pn_arity
== PN_NULLARY
);
6909 ok
= (pn
->pn_op
== JSOP_OBJECT
)
6910 ? EmitObjectOp(cx
, pn
->pn_objbox
, PN_OP(pn
), cg
)
6911 : EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6916 ale
= cg
->atomList
.add(cg
->parser
, pn
->pn_atom2
);
6919 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
6921 if (!EmitAtomOp(cx
, pn
, JSOP_XMLPI
, cg
))
6924 #endif /* JS_HAS_XML_SUPPORT */
6930 if (ok
&& --cg
->emitLevel
== 0) {
6932 ok
= OptimizeSpanDeps(cx
, cg
);
6933 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_pos
.end
.lineno
))
6941 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6942 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6944 JS_FRIEND_DATA(JSSrcNoteSpec
) js_SrcNoteSpec
[] = {
6947 {"if-else", 2, 0, 1},
6950 {"continue", 0, 0, 0},
6952 {"pcdelta", 1, 0, 1},
6953 {"assignop", 0, 0, 0},
6956 {"hidden", 0, 0, 0},
6957 {"pcbase", 1, 0, -1},
6959 {"labelbrace", 1, 0, 0},
6960 {"endbrace", 0, 0, 0},
6961 {"break2label", 1, 0, 0},
6962 {"cont2label", 1, 0, 0},
6963 {"switch", 2, 0, 1},
6964 {"funcdef", 1, 0, 0},
6966 {"extended", -1, 0, 0},
6967 {"newline", 0, 0, 0},
6968 {"setline", 1, 0, 0},
6969 {"xdelta", 0, 0, 0},
6973 AllocSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
)
6979 index
= CG_NOTE_COUNT(cg
);
6980 if (((uintN
)index
& CG_NOTE_MASK(cg
)) == 0) {
6981 pool
= cg
->notePool
;
6982 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6983 if (!CG_NOTES(cg
)) {
6984 /* Allocate the first note array lazily; leave noteMask alone. */
6985 pool
->allocateCast
<jssrcnote
*>(CG_NOTES(cg
), size
);
6987 /* Grow by doubling note array size; update noteMask on success. */
6988 pool
->growCast
<jssrcnote
*>(CG_NOTES(cg
), size
, size
);
6990 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6992 if (!CG_NOTES(cg
)) {
6993 js_ReportOutOfScriptQuota(cx
);
6998 CG_NOTE_COUNT(cg
) = index
+ 1;
7003 js_NewSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
)
7007 ptrdiff_t offset
, delta
, xdelta
;
7010 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
7011 * incrementing CG_NOTE_COUNT(cg).
7013 index
= AllocSrcNote(cx
, cg
);
7016 sn
= &CG_NOTES(cg
)[index
];
7019 * Compute delta from the last annotated bytecode's offset. If it's too
7020 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7022 offset
= CG_OFFSET(cg
);
7023 delta
= offset
- CG_LAST_NOTE_OFFSET(cg
);
7024 CG_LAST_NOTE_OFFSET(cg
) = offset
;
7025 if (delta
>= SN_DELTA_LIMIT
) {
7027 xdelta
= JS_MIN(delta
, SN_XDELTA_MASK
);
7028 SN_MAKE_XDELTA(sn
, xdelta
);
7030 index
= AllocSrcNote(cx
, cg
);
7033 sn
= &CG_NOTES(cg
)[index
];
7034 } while (delta
>= SN_DELTA_LIMIT
);
7038 * Initialize type and delta, then allocate the minimum number of notes
7039 * needed for type's arity. Usually, we won't need more, but if an offset
7040 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7042 SN_MAKE_NOTE(sn
, type
, delta
);
7043 for (n
= (intN
)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
7044 if (js_NewSrcNote(cx
, cg
, SRC_NULL
) < 0)
7051 js_NewSrcNote2(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7056 index
= js_NewSrcNote(cx
, cg
, type
);
7058 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset
))
7065 js_NewSrcNote3(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
7066 ptrdiff_t offset1
, ptrdiff_t offset2
)
7070 index
= js_NewSrcNote(cx
, cg
, type
);
7072 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset1
))
7074 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 1, offset2
))
7081 GrowSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
)
7086 /* Grow by doubling note array size; update noteMask on success. */
7087 pool
= cg
->notePool
;
7088 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
7089 pool
->growCast
<jssrcnote
*>(CG_NOTES(cg
), size
, size
);
7090 if (!CG_NOTES(cg
)) {
7091 js_ReportOutOfScriptQuota(cx
);
7094 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
7099 js_AddToSrcNoteDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*sn
,
7102 ptrdiff_t base
, limit
, newdelta
, diff
;
7106 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7107 * main script note deltas, and only by a small positive amount.
7109 JS_ASSERT(cg
->current
== &cg
->main
);
7110 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
7112 base
= SN_DELTA(sn
);
7113 limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
7114 newdelta
= base
+ delta
;
7115 if (newdelta
< limit
) {
7116 SN_SET_DELTA(sn
, newdelta
);
7118 index
= sn
- cg
->main
.notes
;
7119 if ((cg
->main
.noteCount
& cg
->main
.noteMask
) == 0) {
7120 if (!GrowSrcNotes(cx
, cg
))
7122 sn
= cg
->main
.notes
+ index
;
7124 diff
= cg
->main
.noteCount
- index
;
7125 cg
->main
.noteCount
++;
7126 memmove(sn
+ 1, sn
, SRCNOTE_SIZE(diff
));
7127 SN_MAKE_XDELTA(sn
, delta
);
7133 JS_FRIEND_API(uintN
)
7134 js_SrcNoteLength(jssrcnote
*sn
)
7139 arity
= (intN
)js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7140 for (base
= sn
++; arity
; sn
++, arity
--) {
7141 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7147 JS_FRIEND_API(ptrdiff_t)
7148 js_GetSrcNoteOffset(jssrcnote
*sn
, uintN which
)
7150 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7151 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7152 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7153 for (sn
++; which
; sn
++, which
--) {
7154 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7157 if (*sn
& SN_3BYTE_OFFSET_FLAG
) {
7158 return (ptrdiff_t)(((uint32
)(sn
[0] & SN_3BYTE_OFFSET_MASK
) << 16)
7162 return (ptrdiff_t)*sn
;
7166 js_SetSrcNoteOffset(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
,
7167 uintN which
, ptrdiff_t offset
)
7172 if ((jsuword
)offset
>= (jsuword
)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG
<< 16)) {
7173 ReportStatementTooLarge(cx
, cg
);
7177 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7178 sn
= &CG_NOTES(cg
)[index
];
7179 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7180 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7181 for (sn
++; which
; sn
++, which
--) {
7182 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
7186 /* See if the new offset requires three bytes. */
7187 if (offset
> (ptrdiff_t)SN_3BYTE_OFFSET_MASK
) {
7188 /* Maybe this offset was already set to a three-byte value. */
7189 if (!(*sn
& SN_3BYTE_OFFSET_FLAG
)) {
7190 /* Losing, need to insert another two bytes for this offset. */
7191 index
= sn
- CG_NOTES(cg
);
7194 * Simultaneously test to see if the source note array must grow to
7195 * accommodate either the first or second byte of additional storage
7196 * required by this 3-byte offset.
7198 if (((CG_NOTE_COUNT(cg
) + 1) & CG_NOTE_MASK(cg
)) <= 1) {
7199 if (!GrowSrcNotes(cx
, cg
))
7201 sn
= CG_NOTES(cg
) + index
;
7203 CG_NOTE_COUNT(cg
) += 2;
7205 diff
= CG_NOTE_COUNT(cg
) - (index
+ 3);
7206 JS_ASSERT(diff
>= 0);
7208 memmove(sn
+ 3, sn
+ 1, SRCNOTE_SIZE(diff
));
7210 *sn
++ = (jssrcnote
)(SN_3BYTE_OFFSET_FLAG
| (offset
>> 16));
7211 *sn
++ = (jssrcnote
)(offset
>> 8);
7213 *sn
= (jssrcnote
)offset
;
7218 #define DEBUG_srcnotesize
7221 #ifdef DEBUG_srcnotesize
7223 static uint32 hist
[NBINS
];
7225 void DumpSrcNoteSizeHist()
7231 fp
= fopen("/tmp/srcnotes.hist", "w");
7234 setvbuf(fp
, NULL
, _IONBF
, 0);
7236 fprintf(fp
, "SrcNote size histogram:\n");
7237 for (i
= 0; i
< NBINS
; i
++) {
7238 fprintf(fp
, "%4u %4u ", JS_BIT(i
), hist
[i
]);
7239 for (n
= (int) JS_HOWMANY(hist
[i
], 10); n
> 0; --n
)
7248 * Fill in the storage at notes with prolog and main srcnotes; the space at
7249 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7250 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7251 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7254 js_FinishTakingSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*notes
)
7256 uintN prologCount
, mainCount
, totalCount
;
7257 ptrdiff_t offset
, delta
;
7260 JS_ASSERT(cg
->current
== &cg
->main
);
7262 prologCount
= cg
->prolog
.noteCount
;
7263 if (prologCount
&& cg
->prolog
.currentLine
!= cg
->firstLine
) {
7264 CG_SWITCH_TO_PROLOG(cg
);
7265 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, (ptrdiff_t)cg
->firstLine
) < 0)
7267 prologCount
= cg
->prolog
.noteCount
;
7268 CG_SWITCH_TO_MAIN(cg
);
7271 * Either no prolog srcnotes, or no line number change over prolog.
7272 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7273 * of the first main note, by adding to its delta and possibly even
7274 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7275 * that came at and after the last annotated bytecode.
7277 offset
= CG_PROLOG_OFFSET(cg
) - cg
->prolog
.lastNoteOffset
;
7278 JS_ASSERT(offset
>= 0);
7279 if (offset
> 0 && cg
->main
.noteCount
!= 0) {
7280 /* NB: Use as much of the first main note's delta as we can. */
7281 sn
= cg
->main
.notes
;
7282 delta
= SN_IS_XDELTA(sn
)
7283 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
7284 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
7288 if (!js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
))
7293 delta
= JS_MIN(offset
, SN_XDELTA_MASK
);
7294 sn
= cg
->main
.notes
;
7299 mainCount
= cg
->main
.noteCount
;
7300 totalCount
= prologCount
+ mainCount
;
7302 memcpy(notes
, cg
->prolog
.notes
, SRCNOTE_SIZE(prologCount
));
7303 memcpy(notes
+ prologCount
, cg
->main
.notes
, SRCNOTE_SIZE(mainCount
));
7304 SN_MAKE_TERMINATOR(¬es
[totalCount
]);
7307 { int bin
= JS_CeilingLog2(totalCount
);
7317 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
7318 uintN stackDepth
, size_t start
, size_t end
)
7322 JS_ASSERT((uintN
)(uint16
)stackDepth
== stackDepth
);
7323 JS_ASSERT(start
<= end
);
7324 JS_ASSERT((size_t)(uint32
)start
== start
);
7325 JS_ASSERT((size_t)(uint32
)end
== end
);
7327 cx
->tempPool
.allocateType
<JSTryNode
>(tryNode
);
7329 js_ReportOutOfScriptQuota(cx
);
7333 tryNode
->note
.kind
= kind
;
7334 tryNode
->note
.stackDepth
= (uint16
)stackDepth
;
7335 tryNode
->note
.start
= (uint32
)start
;
7336 tryNode
->note
.length
= (uint32
)(end
- start
);
7337 tryNode
->prev
= cg
->lastTryNode
;
7338 cg
->lastTryNode
= tryNode
;
7344 js_FinishTakingTryNotes(JSCodeGenerator
*cg
, JSTryNoteArray
*array
)
7349 JS_ASSERT(array
->length
> 0 && array
->length
== cg
->ntrynotes
);
7350 tn
= array
->vector
+ array
->length
;
7351 tryNode
= cg
->lastTryNode
;
7353 *--tn
= tryNode
->note
;
7354 } while ((tryNode
= tryNode
->prev
) != NULL
);
7355 JS_ASSERT(tn
== array
->vector
);
7359 * Find the index of the given object for code generator.
7361 * Since the emitter refers to each parsed object only once, for the index we
7362 * use the number of already indexes objects. We also add the object to a list
7363 * to convert the list to a fixed-size array when we complete code generation,
7364 * see JSCGObjectList::finish below.
7366 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7367 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7368 * directly used to store and fetch a reference to a cloned RegExp object that
7369 * shares the same JSRegExp private data created for the object literal in
7370 * objbox. We need a cloned object to hold lastIndex and other direct properties
7371 * that should not be shared among threads sharing a precompiled function or
7374 * If the code being compiled is function code, allocate a reserved slot in
7375 * the cloned function object that shares its precompiled script with other
7376 * cloned function objects and with the compiler-created clone-parent. There
7377 * are nregexps = script->regexps()->length such reserved slots in each
7378 * function object cloned from fun->object. NB: during compilation, a funobj
7379 * slots element must never be allocated, because js_AllocSlot could hand out
7380 * one of the slots that should be given to a regexp clone.
7382 * If the code being compiled is global code, the cloned regexp are stored in
7383 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7384 * fp->nvars to ngvars + nregexps.
7386 * The slots initially contain undefined or null. We populate them lazily when
7387 * JSOP_REGEXP is executed for the first time.
7389 * Why clone regexp objects? ECMA specifies that when a regular expression
7390 * literal is scanned, a RegExp object is created. In the spec, compilation
7391 * and execution happen indivisibly, but in this implementation and many of
7392 * its embeddings, code is precompiled early and re-executed in multiple
7393 * threads, or using multiple global objects, or both, for efficiency.
7395 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7396 * objects, which makes for collisions on the lastIndex property (especially
7397 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7398 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7401 JSCGObjectList::index(JSObjectBox
*objbox
)
7403 JS_ASSERT(!objbox
->emitLink
);
7404 objbox
->emitLink
= lastbox
;
7410 JSCGObjectList::finish(JSObjectArray
*array
)
7413 JSObjectBox
*objbox
;
7415 JS_ASSERT(length
<= INDEX_LIMIT
);
7416 JS_ASSERT(length
== array
->length
);
7418 cursor
= array
->vector
+ array
->length
;
7422 JS_ASSERT(!*cursor
);
7423 *cursor
= objbox
->object
;
7424 } while ((objbox
= objbox
->emitLink
) != NULL
);
7425 JS_ASSERT(cursor
== array
->vector
);