Bug 551763: Fix deletion of arguments ident. (r=Waldo)
[mozilla-central.git] / js / src / jsemit.cpp
blob334417f46dc1e00eeab40af7c3e2ea9f350352ef
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
82 using namespace js;
84 static JSBool
85 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
86 uintN stackDepth, size_t start, size_t end);
88 JSCodeGenerator::JSCodeGenerator(Parser *parser,
89 JSArenaPool *cpool, JSArenaPool *npool,
90 uintN lineno)
91 : JSTreeContext(parser),
92 codePool(cpool), notePool(npool),
93 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
94 stackDepth(0), maxStackDepth(0),
95 ntrynotes(0), lastTryNode(NULL),
96 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
97 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
98 arrayCompDepth(0),
99 emitLevel(0),
100 constMap(parser->context)
102 flags = TCF_COMPILING;
103 memset(&prolog, 0, sizeof prolog);
104 memset(&main, 0, sizeof main);
105 current = &main;
106 firstLine = prolog.currentLine = main.currentLine = lineno;
107 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
108 memset(&upvarMap, 0, sizeof upvarMap);
111 bool JSCodeGenerator::init()
113 return constMap.init();
116 JSCodeGenerator::~JSCodeGenerator()
118 JS_ARENA_RELEASE(codePool, codeMark);
119 JS_ARENA_RELEASE(notePool, noteMark);
121 /* NB: non-null only after OOM. */
122 if (spanDeps)
123 parser->context->free(spanDeps);
125 if (upvarMap.vector)
126 parser->context->free(upvarMap.vector);
129 static ptrdiff_t
130 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
132 jsbytecode *base, *limit, *next;
133 ptrdiff_t offset, length;
134 size_t incr, size;
136 base = CG_BASE(cg);
137 next = CG_NEXT(cg);
138 limit = CG_LIMIT(cg);
139 offset = next - base;
140 if (next + delta > limit) {
141 length = offset + delta;
142 length = (length <= BYTECODE_CHUNK)
143 ? BYTECODE_CHUNK
144 : JS_BIT(JS_CeilingLog2(length));
145 incr = BYTECODE_SIZE(length);
146 if (!base) {
147 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
148 } else {
149 size = BYTECODE_SIZE(limit - base);
150 incr -= size;
151 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
153 if (!base) {
154 js_ReportOutOfScriptQuota(cx);
155 return -1;
157 CG_BASE(cg) = base;
158 CG_LIMIT(cg) = base + length;
159 CG_NEXT(cg) = base + offset;
161 return offset;
164 static void
165 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
167 jsbytecode *pc;
168 JSOp op;
169 const JSCodeSpec *cs;
170 uintN extra, depth, nuses;
171 intN ndefs;
173 pc = CG_CODE(cg, target);
174 op = (JSOp) *pc;
175 cs = &js_CodeSpec[op];
176 #ifdef JS_TRACER
177 extern uint8 js_opcode2extra[];
178 extra = js_opcode2extra[op];
179 #else
180 extra = 0;
181 #endif
182 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
183 depth = (uintN) cg->stackDepth +
184 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
185 extra;
186 if (depth > cg->maxStackDepth)
187 cg->maxStackDepth = depth;
190 nuses = js_GetStackUses(cs, op, pc);
191 cg->stackDepth -= nuses;
192 JS_ASSERT(cg->stackDepth >= 0);
193 if (cg->stackDepth < 0) {
194 char numBuf[12];
195 TokenStream *ts;
197 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
198 ts = &cg->parser->tokenStream;
199 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
200 js_GetErrorMessage, NULL,
201 JSMSG_STACK_UNDERFLOW,
202 ts->getFilename() ? ts->getFilename() : "stdin",
203 numBuf);
205 ndefs = cs->ndefs;
206 if (ndefs < 0) {
207 JSObject *blockObj;
209 /* We just executed IndexParsedObject */
210 JS_ASSERT(op == JSOP_ENTERBLOCK);
211 JS_ASSERT(nuses == 0);
212 blockObj = cg->objectList.lastbox->object;
213 JS_ASSERT(blockObj->getClass() == &js_BlockClass);
214 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
216 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
217 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
219 cg->stackDepth += ndefs;
220 if ((uintN)cg->stackDepth > cg->maxStackDepth)
221 cg->maxStackDepth = cg->stackDepth;
224 ptrdiff_t
225 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
227 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
229 if (offset >= 0) {
230 *CG_NEXT(cg)++ = (jsbytecode)op;
231 UpdateDepth(cx, cg, offset);
233 return offset;
236 ptrdiff_t
237 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
239 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
241 if (offset >= 0) {
242 jsbytecode *next = CG_NEXT(cg);
243 next[0] = (jsbytecode)op;
244 next[1] = op1;
245 CG_NEXT(cg) = next + 2;
246 UpdateDepth(cx, cg, offset);
248 return offset;
251 ptrdiff_t
252 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
253 jsbytecode op2)
255 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
257 if (offset >= 0) {
258 jsbytecode *next = CG_NEXT(cg);
259 next[0] = (jsbytecode)op;
260 next[1] = op1;
261 next[2] = op2;
262 CG_NEXT(cg) = next + 3;
263 UpdateDepth(cx, cg, offset);
265 return offset;
268 ptrdiff_t
269 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
271 ptrdiff_t length = 1 + (ptrdiff_t)extra;
272 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
274 if (offset >= 0) {
275 jsbytecode *next = CG_NEXT(cg);
276 *next = (jsbytecode)op;
277 memset(next + 1, 0, BYTECODE_SIZE(extra));
278 CG_NEXT(cg) = next + length;
281 * Don't UpdateDepth if op's use-count comes from the immediate
282 * operand yet to be stored in the extra bytes after op.
284 if (js_CodeSpec[op].nuses >= 0)
285 UpdateDepth(cx, cg, offset);
287 return offset;
290 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
291 const char js_with_statement_str[] = "with statement";
292 const char js_finally_block_str[] = "finally block";
293 const char js_script_str[] = "script";
295 static const char *statementName[] = {
296 "label statement", /* LABEL */
297 "if statement", /* IF */
298 "else statement", /* ELSE */
299 "destructuring body", /* BODY */
300 "switch statement", /* SWITCH */
301 "block", /* BLOCK */
302 js_with_statement_str, /* WITH */
303 "catch block", /* CATCH */
304 "try block", /* TRY */
305 js_finally_block_str, /* FINALLY */
306 js_finally_block_str, /* SUBROUTINE */
307 "do loop", /* DO_LOOP */
308 "for loop", /* FOR_LOOP */
309 "for/in loop", /* FOR_IN_LOOP */
310 "while loop", /* WHILE_LOOP */
313 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
315 static const char *
316 StatementName(JSCodeGenerator *cg)
318 if (!cg->topStmt)
319 return js_script_str;
320 return statementName[cg->topStmt->type];
323 static void
324 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
326 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
327 StatementName(cg));
331 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
332 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
333 into unconditional (gotos and gosubs), and conditional jumps or branches
334 (which pop a value, test it, and jump depending on its value). Most jumps
335 have just one immediate operand, a signed offset from the jump opcode's pc
336 to the target bytecode. The lookup and table switch opcodes may contain
337 many jump offsets.
339 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
340 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
341 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
342 the extended form of the JSOP_OR branch opcode). The unextended or short
343 formats have 16-bit signed immediate offset operands, the extended or long
344 formats have 32-bit signed immediates. The span-dependency problem consists
345 of selecting as few long instructions as possible, or about as few -- since
346 jumps can span other jumps, extending one jump may cause another to need to
347 be extended.
349 Most JS scripts are short, so need no extended jumps. We optimize for this
350 case by generating short jumps until we know a long jump is needed. After
351 that point, we keep generating short jumps, but each jump's 16-bit immediate
352 offset operand is actually an unsigned index into cg->spanDeps, an array of
353 JSSpanDep structs. Each struct tells the top offset in the script of the
354 opcode, the "before" offset of the jump (which will be the same as top for
355 simplex jumps, but which will index further into the bytecode array for a
356 non-initial jump offset in a lookup or table switch), the after "offset"
357 adjusted during span-dependent instruction selection (initially the same
358 value as the "before" offset), and the jump target (more below).
360 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
361 ensure that all bytecode generated so far can be inspected to discover where
362 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
363 that we generate span-dependency records sorted by their offsets, so we can
364 binary-search when trying to find a JSSpanDep for a given bytecode offset,
365 or the nearest JSSpanDep at or above a given pc.
367 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
368 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
369 tells us that we need to binary-search for the cg->spanDeps entry by the
370 jump opcode's bytecode offset (sd->before).
372 Jump targets need to be maintained in a data structure that lets us look
373 up an already-known target by its address (jumps may have a common target),
374 and that also lets us update the addresses (script-relative, a.k.a. absolute
375 offsets) of targets that come after a jump target (for when a jump below
376 that target needs to be extended). We use an AVL tree, implemented using
377 recursion, but with some tricky optimizations to its height-balancing code
378 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
380 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
381 positive sign, even though they link "backward" (i.e., toward lower bytecode
382 address). We don't want to waste space and search time in the AVL tree for
383 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
384 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
385 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
386 target, or is still awaiting backpatching.
388 Note that backpatch chains would present a problem for BuildSpanDepTable,
389 which inspects bytecode to build cg->spanDeps on demand, when the first
390 short jump offset overflows. To solve this temporary problem, we emit a
391 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
392 nuses/ndefs counts help keep the stack balanced, but whose opcode format
393 distinguishes its backpatch delta immediate operand from a normal jump
394 offset.
396 static int
397 BalanceJumpTargets(JSJumpTarget **jtp)
399 JSJumpTarget *jt, *jt2, *root;
400 int dir, otherDir, heightChanged;
401 JSBool doubleRotate;
403 jt = *jtp;
404 JS_ASSERT(jt->balance != 0);
406 if (jt->balance < -1) {
407 dir = JT_RIGHT;
408 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
409 } else if (jt->balance > 1) {
410 dir = JT_LEFT;
411 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
412 } else {
413 return 0;
416 otherDir = JT_OTHER_DIR(dir);
417 if (doubleRotate) {
418 jt2 = jt->kids[otherDir];
419 *jtp = root = jt2->kids[dir];
421 jt->kids[otherDir] = root->kids[dir];
422 root->kids[dir] = jt;
424 jt2->kids[dir] = root->kids[otherDir];
425 root->kids[otherDir] = jt2;
427 heightChanged = 1;
428 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
429 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
430 root->balance = 0;
431 } else {
432 *jtp = root = jt->kids[otherDir];
433 jt->kids[otherDir] = root->kids[dir];
434 root->kids[dir] = jt;
436 heightChanged = (root->balance != 0);
437 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
440 return heightChanged;
443 typedef struct AddJumpTargetArgs {
444 JSContext *cx;
445 JSCodeGenerator *cg;
446 ptrdiff_t offset;
447 JSJumpTarget *node;
448 } AddJumpTargetArgs;
450 static int
451 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
453 JSJumpTarget *jt;
454 int balanceDelta;
456 jt = *jtp;
457 if (!jt) {
458 JSCodeGenerator *cg = args->cg;
460 jt = cg->jtFreeList;
461 if (jt) {
462 cg->jtFreeList = jt->kids[JT_LEFT];
463 } else {
464 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
465 sizeof *jt);
466 if (!jt) {
467 js_ReportOutOfScriptQuota(args->cx);
468 return 0;
471 jt->offset = args->offset;
472 jt->balance = 0;
473 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
474 cg->numJumpTargets++;
475 args->node = jt;
476 *jtp = jt;
477 return 1;
480 if (jt->offset == args->offset) {
481 args->node = jt;
482 return 0;
485 if (args->offset < jt->offset)
486 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
487 else
488 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
489 if (!args->node)
490 return 0;
492 jt->balance += balanceDelta;
493 return (balanceDelta && jt->balance)
494 ? 1 - BalanceJumpTargets(jtp)
495 : 0;
498 #ifdef DEBUG_brendan
499 static int AVLCheck(JSJumpTarget *jt)
501 int lh, rh;
503 if (!jt) return 0;
504 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
505 lh = AVLCheck(jt->kids[JT_LEFT]);
506 rh = AVLCheck(jt->kids[JT_RIGHT]);
507 JS_ASSERT(jt->balance == rh - lh);
508 return 1 + JS_MAX(lh, rh);
510 #endif
512 static JSBool
513 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
514 ptrdiff_t off)
516 AddJumpTargetArgs args;
518 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
519 ReportStatementTooLarge(cx, cg);
520 return JS_FALSE;
523 args.cx = cx;
524 args.cg = cg;
525 args.offset = sd->top + off;
526 args.node = NULL;
527 AddJumpTarget(&args, &cg->jumpTargets);
528 if (!args.node)
529 return JS_FALSE;
531 #ifdef DEBUG_brendan
532 AVLCheck(cg->jumpTargets);
533 #endif
535 SD_SET_TARGET(sd, args.node);
536 return JS_TRUE;
539 #define SPANDEPS_MIN 256
540 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
541 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
543 static JSBool
544 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
545 ptrdiff_t off)
547 uintN index;
548 JSSpanDep *sdbase, *sd;
549 size_t size;
551 index = cg->numSpanDeps;
552 if (index + 1 == 0) {
553 ReportStatementTooLarge(cx, cg);
554 return JS_FALSE;
557 if ((index & (index - 1)) == 0 &&
558 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
559 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
560 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
561 if (!sdbase)
562 return JS_FALSE;
563 cg->spanDeps = sdbase;
566 cg->numSpanDeps = index + 1;
567 sd = cg->spanDeps + index;
568 sd->top = pc - CG_BASE(cg);
569 sd->offset = sd->before = pc2 - CG_BASE(cg);
571 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
572 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
573 if (off != 0) {
574 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
575 if (off > BPDELTA_MAX) {
576 ReportStatementTooLarge(cx, cg);
577 return JS_FALSE;
580 SD_SET_BPDELTA(sd, off);
581 } else if (off == 0) {
582 /* Jump offset will be patched directly, without backpatch chaining. */
583 SD_SET_TARGET(sd, 0);
584 } else {
585 /* The jump offset in off is non-zero, therefore it's already known. */
586 if (!SetSpanDepTarget(cx, cg, sd, off))
587 return JS_FALSE;
590 if (index > SPANDEP_INDEX_MAX)
591 index = SPANDEP_INDEX_HUGE;
592 SET_SPANDEP_INDEX(pc2, index);
593 return JS_TRUE;
596 static jsbytecode *
597 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
599 JSOp op;
600 jsbytecode *pc2;
601 ptrdiff_t off;
602 jsint low, high;
603 uintN njumps, indexlen;
605 op = (JSOp) *pc;
606 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
607 pc2 = pc;
608 off = GET_JUMP_OFFSET(pc2);
609 if (!AddSpanDep(cx, cg, pc, pc2, off))
610 return NULL;
611 pc2 += JUMP_OFFSET_LEN;
612 if (op == JSOP_TABLESWITCH) {
613 low = GET_JUMP_OFFSET(pc2);
614 pc2 += JUMP_OFFSET_LEN;
615 high = GET_JUMP_OFFSET(pc2);
616 pc2 += JUMP_OFFSET_LEN;
617 njumps = (uintN) (high - low + 1);
618 indexlen = 0;
619 } else {
620 njumps = GET_UINT16(pc2);
621 pc2 += UINT16_LEN;
622 indexlen = INDEX_LEN;
624 while (njumps) {
625 --njumps;
626 pc2 += indexlen;
627 off = GET_JUMP_OFFSET(pc2);
628 if (!AddSpanDep(cx, cg, pc, pc2, off))
629 return NULL;
630 pc2 += JUMP_OFFSET_LEN;
632 return 1 + pc2;
635 static JSBool
636 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
638 jsbytecode *pc, *end;
639 JSOp op;
640 const JSCodeSpec *cs;
641 ptrdiff_t off;
643 pc = CG_BASE(cg) + cg->spanDepTodo;
644 end = CG_NEXT(cg);
645 while (pc != end) {
646 JS_ASSERT(pc < end);
647 op = (JSOp)*pc;
648 cs = &js_CodeSpec[op];
650 switch (JOF_TYPE(cs->format)) {
651 case JOF_TABLESWITCH:
652 case JOF_LOOKUPSWITCH:
653 pc = AddSwitchSpanDeps(cx, cg, pc);
654 if (!pc)
655 return JS_FALSE;
656 break;
658 case JOF_JUMP:
659 off = GET_JUMP_OFFSET(pc);
660 if (!AddSpanDep(cx, cg, pc, pc, off))
661 return JS_FALSE;
662 /* FALL THROUGH */
663 default:
664 pc += cs->length;
665 break;
669 return JS_TRUE;
672 static JSSpanDep *
673 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
675 uintN index;
676 ptrdiff_t offset;
677 int lo, hi, mid;
678 JSSpanDep *sd;
680 index = GET_SPANDEP_INDEX(pc);
681 if (index != SPANDEP_INDEX_HUGE)
682 return cg->spanDeps + index;
684 offset = pc - CG_BASE(cg);
685 lo = 0;
686 hi = cg->numSpanDeps - 1;
687 while (lo <= hi) {
688 mid = (lo + hi) / 2;
689 sd = cg->spanDeps + mid;
690 if (sd->before == offset)
691 return sd;
692 if (sd->before < offset)
693 lo = mid + 1;
694 else
695 hi = mid - 1;
698 JS_ASSERT(0);
699 return NULL;
702 static JSBool
703 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
704 ptrdiff_t delta)
706 JSSpanDep *sd;
708 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
709 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
710 SET_JUMP_OFFSET(pc, delta);
711 return JS_TRUE;
714 if (delta > BPDELTA_MAX) {
715 ReportStatementTooLarge(cx, cg);
716 return JS_FALSE;
719 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
720 return JS_FALSE;
722 sd = GetSpanDep(cg, pc);
723 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
724 SD_SET_BPDELTA(sd, delta);
725 return JS_TRUE;
728 static void
729 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
731 if (jt->offset > pivot) {
732 jt->offset += delta;
733 if (jt->kids[JT_LEFT])
734 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
736 if (jt->kids[JT_RIGHT])
737 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
740 static JSSpanDep *
741 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
742 JSSpanDep *guard)
744 int num, hi, mid;
745 JSSpanDep *sdbase, *sd;
747 num = cg->numSpanDeps;
748 JS_ASSERT(num > 0);
749 hi = num - 1;
750 sdbase = cg->spanDeps;
751 while (lo <= hi) {
752 mid = (lo + hi) / 2;
753 sd = sdbase + mid;
754 if (sd->before == offset)
755 return sd;
756 if (sd->before < offset)
757 lo = mid + 1;
758 else
759 hi = mid - 1;
761 if (lo == num)
762 return guard;
763 sd = sdbase + lo;
764 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
765 return sd;
768 static void
769 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
771 if (jt->kids[JT_LEFT])
772 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
773 if (jt->kids[JT_RIGHT])
774 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
775 jt->kids[JT_LEFT] = cg->jtFreeList;
776 cg->jtFreeList = jt;
779 static JSBool
780 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
782 jsbytecode *pc, *oldpc, *base, *limit, *next;
783 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
784 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
785 JSBool done;
786 JSOp op;
787 uint32 type;
788 size_t size, incr;
789 jssrcnote *sn, *snlimit;
790 JSSrcNoteSpec *spec;
791 uintN i, n, noteIndex;
792 JSTryNode *tryNode;
793 #ifdef DEBUG_brendan
794 int passes = 0;
795 #endif
797 base = CG_BASE(cg);
798 sdbase = cg->spanDeps;
799 sdlimit = sdbase + cg->numSpanDeps;
800 offset = CG_OFFSET(cg);
801 growth = 0;
803 do {
804 done = JS_TRUE;
805 delta = 0;
806 top = pivot = -1;
807 sdtop = NULL;
808 pc = NULL;
809 op = JSOP_NOP;
810 type = 0;
811 #ifdef DEBUG_brendan
812 passes++;
813 #endif
815 for (sd = sdbase; sd < sdlimit; sd++) {
816 JS_ASSERT(JT_HAS_TAG(sd->target));
817 sd->offset += delta;
819 if (sd->top != top) {
820 sdtop = sd;
821 top = sd->top;
822 JS_ASSERT(top == sd->before);
823 pivot = sd->offset;
824 pc = base + top;
825 op = (JSOp) *pc;
826 type = JOF_OPTYPE(op);
827 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
829 * We already extended all the jump offset operands for
830 * the opcode at sd->top. Jumps and branches have only
831 * one jump offset operand, but switches have many, all
832 * of which are adjacent in cg->spanDeps.
834 continue;
837 JS_ASSERT(type == JOF_JUMP ||
838 type == JOF_TABLESWITCH ||
839 type == JOF_LOOKUPSWITCH);
842 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
843 span = SD_SPAN(sd, pivot);
844 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
845 ptrdiff_t deltaFromTop = 0;
847 done = JS_FALSE;
849 switch (op) {
850 case JSOP_GOTO: op = JSOP_GOTOX; break;
851 case JSOP_IFEQ: op = JSOP_IFEQX; break;
852 case JSOP_IFNE: op = JSOP_IFNEX; break;
853 case JSOP_OR: op = JSOP_ORX; break;
854 case JSOP_AND: op = JSOP_ANDX; break;
855 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
856 case JSOP_CASE: op = JSOP_CASEX; break;
857 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
858 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
859 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
860 default:
861 ReportStatementTooLarge(cx, cg);
862 return JS_FALSE;
864 *pc = (jsbytecode) op;
866 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
867 if (sd2 <= sd) {
869 * sd2->offset already includes delta as it stood
870 * before we entered this loop, but it must also
871 * include the delta relative to top due to all the
872 * extended jump offset immediates for the opcode
873 * starting at top, which we extend in this loop.
875 * If there is only one extended jump offset, then
876 * sd2->offset won't change and this for loop will
877 * iterate once only.
879 sd2->offset += deltaFromTop;
880 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
881 } else {
883 * sd2 comes after sd, and won't be revisited by
884 * the outer for loop, so we have to increase its
885 * offset by delta, not merely by deltaFromTop.
887 sd2->offset += delta;
890 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
891 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
892 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
894 sd = sd2 - 1;
899 growth += delta;
900 } while (!done);
902 if (growth) {
903 #ifdef DEBUG_brendan
904 TokenStream *ts = &cg->parser->tokenStream;
906 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
907 ts->filename ? ts->filename : "stdin", cg->firstLine,
908 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
909 passes, offset + growth, offset, growth);
910 #endif
913 * Ensure that we have room for the extended jumps, but don't round up
914 * to a power of two -- we're done generating code, so we cut to fit.
916 limit = CG_LIMIT(cg);
917 length = offset + growth;
918 next = base + length;
919 if (next > limit) {
920 JS_ASSERT(length > BYTECODE_CHUNK);
921 size = BYTECODE_SIZE(limit - base);
922 incr = BYTECODE_SIZE(length) - size;
923 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
924 if (!base) {
925 js_ReportOutOfScriptQuota(cx);
926 return JS_FALSE;
928 CG_BASE(cg) = base;
929 CG_LIMIT(cg) = next = base + length;
931 CG_NEXT(cg) = next;
934 * Set up a fake span dependency record to guard the end of the code
935 * being generated. This guard record is returned as a fencepost by
936 * FindNearestSpanDep if there is no real spandep at or above a given
937 * unextended code offset.
939 guard.top = -1;
940 guard.offset = offset + growth;
941 guard.before = offset;
942 guard.target = NULL;
946 * Now work backwards through the span dependencies, copying chunks of
947 * bytecode between each extended jump toward the end of the grown code
948 * space, and restoring immediate offset operands for all jump bytecodes.
949 * The first chunk of bytecodes, starting at base and ending at the first
950 * extended jump offset (NB: this chunk includes the operation bytecode
951 * just before that immediate jump offset), doesn't need to be copied.
953 JS_ASSERT(sd == sdlimit);
954 top = -1;
955 while (--sd >= sdbase) {
956 if (sd->top != top) {
957 top = sd->top;
958 op = (JSOp) base[top];
959 type = JOF_OPTYPE(op);
961 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
962 continue;
963 sd2++;
964 pivot = sd2->offset;
965 JS_ASSERT(top == sd2->before);
968 oldpc = base + sd->before;
969 span = SD_SPAN(sd, pivot);
972 * If this jump didn't need to be extended, restore its span immediate
973 * offset operand now, overwriting the index of sd within cg->spanDeps
974 * that was stored temporarily after *pc when BuildSpanDepTable ran.
976 * Note that span might fit in 16 bits even for an extended jump op,
977 * if the op has multiple span operands, not all of which overflowed
978 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
979 * range for a short jump, but others are not).
981 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
982 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
983 SET_JUMP_OFFSET(oldpc, span);
984 continue;
988 * Set up parameters needed to copy the next run of bytecode starting
989 * at offset (which is a cursor into the unextended, original bytecode
990 * vector), down to sd->before (a cursor of the same scale as offset,
991 * it's the index of the original jump pc). Reuse delta to count the
992 * nominal number of bytes to copy.
994 pc = base + sd->offset;
995 delta = offset - sd->before;
996 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
999 * Don't bother copying the jump offset we're about to reset, but do
1000 * copy the bytecode at oldpc (which comes just before its immediate
1001 * jump offset operand), on the next iteration through the loop, by
1002 * including it in offset's new value.
1004 offset = sd->before + 1;
1005 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
1006 if (size) {
1007 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1008 oldpc + 1 + JUMP_OFFSET_LEN,
1009 size);
1012 SET_JUMPX_OFFSET(pc, span);
1015 if (growth) {
1017 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1018 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1019 * at each sd that moved. The future may bring different offset sizes
1020 * for span-dependent instruction operands. However, we fix only main
1021 * notes here, not prolog notes -- we know that prolog opcodes are not
1022 * span-dependent, and aren't likely ever to be.
1024 offset = growth = 0;
1025 sd = sdbase;
1026 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1027 sn < snlimit;
1028 sn = SN_NEXT(sn)) {
1030 * Recall that the offset of a given note includes its delta, and
1031 * tells the offset of the annotated bytecode from the main entry
1032 * point of the script.
1034 offset += SN_DELTA(sn);
1035 while (sd < sdlimit && sd->before < offset) {
1037 * To compute the delta to add to sn, we need to look at the
1038 * spandep after sd, whose offset - (before + growth) tells by
1039 * how many bytes sd's instruction grew.
1041 sd2 = sd + 1;
1042 if (sd2 == sdlimit)
1043 sd2 = &guard;
1044 delta = sd2->offset - (sd2->before + growth);
1045 if (delta > 0) {
1046 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1047 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1048 if (!sn)
1049 return JS_FALSE;
1050 snlimit = cg->main.notes + cg->main.noteCount;
1051 growth += delta;
1053 sd++;
1057 * If sn has span-dependent offset operands, check whether each
1058 * covers further span-dependencies, and increase those operands
1059 * accordingly. Some source notes measure offset not from the
1060 * annotated pc, but from that pc plus some small bias. NB: we
1061 * assume that spec->offsetBias can't itself span span-dependent
1062 * instructions!
1064 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1065 if (spec->isSpanDep) {
1066 pivot = offset + spec->offsetBias;
1067 n = spec->arity;
1068 for (i = 0; i < n; i++) {
1069 span = js_GetSrcNoteOffset(sn, i);
1070 if (span == 0)
1071 continue;
1072 target = pivot + span * spec->isSpanDep;
1073 sd2 = FindNearestSpanDep(cg, target,
1074 (target >= pivot)
1075 ? sd - sdbase
1076 : 0,
1077 &guard);
1080 * Increase target by sd2's before-vs-after offset delta,
1081 * which is absolute (i.e., relative to start of script,
1082 * as is target). Recompute the span by subtracting its
1083 * adjusted pivot from target.
1085 target += sd2->offset - sd2->before;
1086 span = target - (pivot + growth);
1087 span *= spec->isSpanDep;
1088 noteIndex = sn - cg->main.notes;
1089 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1090 return JS_FALSE;
1091 sn = cg->main.notes + noteIndex;
1092 snlimit = cg->main.notes + cg->main.noteCount;
1096 cg->main.lastNoteOffset += growth;
1099 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1100 * not clear how we can beat that).
1102 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1104 * First, look for the nearest span dependency at/above tn->start.
1105 * There may not be any such spandep, in which case the guard will
1106 * be returned.
1108 offset = tryNode->note.start;
1109 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1110 delta = sd->offset - sd->before;
1111 tryNode->note.start = offset + delta;
1114 * Next, find the nearest spandep at/above tn->start + tn->length.
1115 * Use its delta minus tn->start's delta to increase tn->length.
1117 length = tryNode->note.length;
1118 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1119 if (sd2 != sd) {
1120 tryNode->note.length =
1121 length + sd2->offset - sd2->before - delta;
1126 #ifdef DEBUG_brendan
1128 uintN bigspans = 0;
1129 top = -1;
1130 for (sd = sdbase; sd < sdlimit; sd++) {
1131 offset = sd->offset;
1133 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1134 if (sd->top != top) {
1135 JS_ASSERT(top == -1 ||
1136 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1137 bigspans != 0);
1138 bigspans = 0;
1139 top = sd->top;
1140 JS_ASSERT(top == sd->before);
1141 op = (JSOp) base[offset];
1142 type = JOF_OPTYPE(op);
1143 JS_ASSERT(type == JOF_JUMP ||
1144 type == JOF_JUMPX ||
1145 type == JOF_TABLESWITCH ||
1146 type == JOF_TABLESWITCHX ||
1147 type == JOF_LOOKUPSWITCH ||
1148 type == JOF_LOOKUPSWITCHX);
1149 pivot = offset;
1152 pc = base + offset;
1153 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1154 span = GET_JUMPX_OFFSET(pc);
1155 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1156 bigspans++;
1157 } else {
1158 JS_ASSERT(type == JOF_TABLESWITCHX ||
1159 type == JOF_LOOKUPSWITCHX);
1161 } else {
1162 span = GET_JUMP_OFFSET(pc);
1164 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1166 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1168 #endif
1171 * Reset so we optimize at most once -- cg may be used for further code
1172 * generation of successive, independent, top-level statements. No jump
1173 * can span top-level statements, because JS lacks goto.
1175 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1176 cx->free(cg->spanDeps);
1177 cg->spanDeps = NULL;
1178 FreeJumpTargets(cg, cg->jumpTargets);
1179 cg->jumpTargets = NULL;
1180 cg->numSpanDeps = cg->numJumpTargets = 0;
1181 cg->spanDepTodo = CG_OFFSET(cg);
1182 return JS_TRUE;
1185 static ptrdiff_t
1186 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1188 JSBool extend;
1189 ptrdiff_t jmp;
1190 jsbytecode *pc;
1192 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1193 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1194 return -1;
1196 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1197 if (jmp >= 0 && (extend || cg->spanDeps)) {
1198 pc = CG_CODE(cg, jmp);
1199 if (!AddSpanDep(cx, cg, pc, pc, off))
1200 return -1;
1202 return jmp;
1205 static ptrdiff_t
1206 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1208 JSSpanDep *sd;
1209 JSJumpTarget *jt;
1210 ptrdiff_t top;
1212 if (!cg->spanDeps)
1213 return GET_JUMP_OFFSET(pc);
1215 sd = GetSpanDep(cg, pc);
1216 jt = sd->target;
1217 if (!JT_HAS_TAG(jt))
1218 return JT_TO_BPDELTA(jt);
1220 top = sd->top;
1221 while (--sd >= cg->spanDeps && sd->top == top)
1222 continue;
1223 sd++;
1224 return JT_CLR_TAG(jt)->offset - sd->offset;
1227 JSBool
1228 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1229 ptrdiff_t off)
1231 if (!cg->spanDeps) {
1232 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1233 SET_JUMP_OFFSET(pc, off);
1234 return JS_TRUE;
1237 if (!BuildSpanDepTable(cx, cg))
1238 return JS_FALSE;
1241 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1244 bool
1245 JSTreeContext::inStatement(JSStmtType type)
1247 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1248 if (stmt->type == type)
1249 return true;
1251 return false;
1254 bool
1255 JSTreeContext::ensureSharpSlots()
1257 #if JS_HAS_SHARP_VARS
1258 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1260 if (sharpSlotBase >= 0) {
1261 JS_ASSERT(flags & TCF_HAS_SHARPS);
1262 return true;
1265 JS_ASSERT(!(flags & TCF_HAS_SHARPS));
1266 if (inFunction()) {
1267 JSContext *cx = parser->context;
1268 JSAtom *sharpArrayAtom = js_Atomize(cx, "#array", 6, 0);
1269 JSAtom *sharpDepthAtom = js_Atomize(cx, "#depth", 6, 0);
1270 if (!sharpArrayAtom || !sharpDepthAtom)
1271 return false;
1273 sharpSlotBase = fun->u.i.nvars;
1274 if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
1275 return false;
1276 if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
1277 return false;
1278 } else {
1280 * Compiler::compileScript will rebase immediate operands indexing
1281 * the sharp slots to come at the end of the global script's |nfixed|
1282 * slots storage, after gvars and regexps.
1284 sharpSlotBase = 0;
1286 flags |= TCF_HAS_SHARPS;
1287 #endif
1288 return true;
1291 bool
1292 JSTreeContext::skipSpansGenerator(unsigned skip)
1294 JSTreeContext *tc = this;
1295 for (unsigned i = 0; i < skip; ++i, tc = tc->parent) {
1296 if (!tc)
1297 return false;
1298 if (tc->flags & TCF_FUN_IS_GENERATOR)
1299 return true;
1301 return false;
1304 void
1305 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1306 ptrdiff_t top)
1308 stmt->type = type;
1309 stmt->flags = 0;
1310 stmt->blockid = tc->blockid();
1311 SET_STATEMENT_TOP(stmt, top);
1312 stmt->label = NULL;
1313 JS_ASSERT(!stmt->blockObj);
1314 stmt->down = tc->topStmt;
1315 tc->topStmt = stmt;
1316 if (STMT_LINKS_SCOPE(stmt)) {
1317 stmt->downScope = tc->topScopeStmt;
1318 tc->topScopeStmt = stmt;
1319 } else {
1320 stmt->downScope = NULL;
1324 void
1325 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1326 ptrdiff_t top)
1328 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1329 stmt->flags |= SIF_SCOPE;
1330 blockObj->setParent(tc->blockChain);
1331 stmt->downScope = tc->topScopeStmt;
1332 tc->topScopeStmt = stmt;
1333 tc->blockChain = blockObj;
1334 stmt->blockObj = blockObj;
1338 * Emit a backpatch op with offset pointing to the previous jump of this type,
1339 * so that we can walk back up the chain fixing up the op and jump offset.
1341 static ptrdiff_t
1342 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1344 ptrdiff_t offset, delta;
1346 offset = CG_OFFSET(cg);
1347 delta = offset - *lastp;
1348 *lastp = offset;
1349 JS_ASSERT(delta > 0);
1350 return EmitJump(cx, cg, op, delta);
1354 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1355 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1356 * NB: We use cx and cg from our caller's lexical environment, and return
1357 * false on error.
1359 #define EMIT_UINT16_IMM_OP(op, i) \
1360 JS_BEGIN_MACRO \
1361 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1362 return JS_FALSE; \
1363 JS_END_MACRO
1365 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1366 JS_BEGIN_MACRO \
1367 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1368 if (off_ < 0) \
1369 return JS_FALSE; \
1370 jsbytecode *pc_ = CG_CODE(cg, off_); \
1371 SET_UINT16(pc_, i); \
1372 pc_ += UINT16_LEN; \
1373 SET_UINT16(pc_, j); \
1374 JS_END_MACRO
1376 static JSBool
1377 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1379 JS_ASSERT(*npops != 0);
1380 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1381 return JS_FALSE;
1382 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1383 *npops = 0;
1384 return JS_TRUE;
1388 * Emit additional bytecode(s) for non-local jumps.
1390 static JSBool
1391 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1393 intN depth, npops;
1394 JSStmtInfo *stmt;
1397 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1398 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1399 * end of a with statement, so we save cg->stackDepth here and restore it
1400 * just before a successful return.
1402 depth = cg->stackDepth;
1403 npops = 0;
1405 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1407 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1408 switch (stmt->type) {
1409 case STMT_FINALLY:
1410 FLUSH_POPS();
1411 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1412 return JS_FALSE;
1413 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1414 return JS_FALSE;
1415 break;
1417 case STMT_WITH:
1418 /* There's a With object on the stack that we need to pop. */
1419 FLUSH_POPS();
1420 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1421 return JS_FALSE;
1422 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1423 return JS_FALSE;
1424 break;
1426 case STMT_FOR_IN_LOOP:
1428 * The iterator and the object being iterated need to be popped.
1430 FLUSH_POPS();
1431 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1432 return JS_FALSE;
1433 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1434 return JS_FALSE;
1435 break;
1437 case STMT_SUBROUTINE:
1439 * There's a [exception or hole, retsub pc-index] pair on the
1440 * stack that we need to pop.
1442 npops += 2;
1443 break;
1445 default:;
1448 if (stmt->flags & SIF_SCOPE) {
1449 uintN i;
1451 /* There is a Block object with locals on the stack to pop. */
1452 FLUSH_POPS();
1453 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1454 return JS_FALSE;
1455 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1456 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1460 FLUSH_POPS();
1461 cg->stackDepth = depth;
1462 return JS_TRUE;
1464 #undef FLUSH_POPS
1467 static ptrdiff_t
1468 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1469 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1471 intN index;
1473 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1474 return -1;
1476 if (label)
1477 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1478 else if (noteType != SRC_NULL)
1479 index = js_NewSrcNote(cx, cg, noteType);
1480 else
1481 index = 0;
1482 if (index < 0)
1483 return -1;
1485 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1488 static JSBool
1489 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1490 jsbytecode *target, jsbytecode op)
1492 jsbytecode *pc, *stop;
1493 ptrdiff_t delta, span;
1495 pc = CG_CODE(cg, last);
1496 stop = CG_CODE(cg, -1);
1497 while (pc != stop) {
1498 delta = GetJumpOffset(cg, pc);
1499 span = target - pc;
1500 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1503 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1504 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1505 * and need to see the JSOP_BACKPATCH* op at *pc).
1507 *pc = op;
1508 pc -= delta;
1510 return JS_TRUE;
1513 void
1514 js_PopStatement(JSTreeContext *tc)
1516 JSStmtInfo *stmt;
1518 stmt = tc->topStmt;
1519 tc->topStmt = stmt->down;
1520 if (STMT_LINKS_SCOPE(stmt)) {
1521 tc->topScopeStmt = stmt->downScope;
1522 if (stmt->flags & SIF_SCOPE) {
1523 tc->blockChain = stmt->blockObj->getParent();
1524 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1529 JSBool
1530 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1532 JSStmtInfo *stmt;
1534 stmt = cg->topStmt;
1535 if (!STMT_IS_TRYING(stmt) &&
1536 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1537 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1538 JSOP_GOTO))) {
1539 return JS_FALSE;
1541 js_PopStatement(cg);
1542 return JS_TRUE;
1545 JSBool
1546 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1547 JSParseNode *pn)
1549 jsdouble dval;
1550 jsint ival;
1551 JSAtom *valueAtom;
1552 jsval v;
1554 /* XXX just do numbers for now */
1555 if (pn->pn_type == TOK_NUMBER) {
1556 dval = pn->pn_dval;
1557 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1558 v = INT_TO_JSVAL(ival);
1559 } else {
1561 * We atomize double to root a jsdouble instance that we wrap as
1562 * jsval and store in cg->constList. This works because atoms are
1563 * protected from GC during compilation.
1565 valueAtom = js_AtomizeDouble(cx, dval);
1566 if (!valueAtom)
1567 return JS_FALSE;
1568 v = ATOM_KEY(valueAtom);
1570 if (!cg->constMap.put(atom, v))
1571 return JS_FALSE;
1573 return JS_TRUE;
1576 JSStmtInfo *
1577 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1579 JSObject *obj;
1580 JSScope *scope;
1581 JSScopeProperty *sprop;
1583 if (!stmt)
1584 stmt = tc->topScopeStmt;
1585 for (; stmt; stmt = stmt->downScope) {
1586 if (stmt->type == STMT_WITH)
1587 break;
1589 /* Skip "maybe scope" statements that don't contain let bindings. */
1590 if (!(stmt->flags & SIF_SCOPE))
1591 continue;
1593 obj = stmt->blockObj;
1594 JS_ASSERT(obj->getClass() == &js_BlockClass);
1595 scope = obj->scope();
1596 sprop = scope->lookup(ATOM_TO_JSID(atom));
1597 if (sprop) {
1598 JS_ASSERT(sprop->hasShortID());
1600 if (slotp) {
1601 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1602 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1603 sprop->shortid;
1605 return stmt;
1609 if (slotp)
1610 *slotp = -1;
1611 return stmt;
1615 * Check if the attributes describe a property holding a compile-time constant
1616 * or a permanent, read-only property without a getter.
1618 #define IS_CONSTANT_PROPERTY(attrs) \
1619 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1620 (JSPROP_READONLY | JSPROP_PERMANENT))
1623 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1624 * name defining a constant.
1626 static JSBool
1627 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1628 jsval *vp)
1630 JSBool ok;
1631 JSStmtInfo *stmt;
1632 JSObject *obj, *objbox;
1633 JSProperty *prop;
1634 uintN attrs;
1637 * Chase down the cg stack, but only until we reach the outermost cg.
1638 * This enables propagating consts from top-level into switch cases in a
1639 * function compiled along with the top-level script.
1641 *vp = JSVAL_HOLE;
1642 do {
1643 if (cg->inFunction() && cg->compileAndGo()) {
1644 /* XXX this will need revising if 'const' becomes block-scoped. */
1645 stmt = js_LexicalLookup(cg, atom, NULL);
1646 if (stmt)
1647 return JS_TRUE;
1649 if (JSCodeGenerator::ConstMap::Ptr p = cg->constMap.lookup(atom)) {
1650 JS_ASSERT(p->value != JSVAL_HOLE);
1651 *vp = p->value;
1652 return JS_TRUE;
1656 * Try looking in the variable object for a direct property that
1657 * is readonly and permanent. We know such a property can't be
1658 * shadowed by another property on obj's prototype chain, or a
1659 * with object or catch variable; nor can prop's value be changed,
1660 * nor can prop be deleted.
1662 if (cg->inFunction()) {
1663 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1664 break;
1665 } else {
1666 JS_ASSERT(cg->compileAndGo());
1667 obj = cg->scopeChain;
1668 ok = obj->lookupProperty(cx, ATOM_TO_JSID(atom), &objbox, &prop);
1669 if (!ok)
1670 return JS_FALSE;
1671 if (objbox == obj) {
1673 * We're compiling code that will be executed immediately,
1674 * not re-executed against a different scope chain and/or
1675 * variable object. Therefore we can get constant values
1676 * from our variable object here.
1678 ok = obj->getAttributes(cx, ATOM_TO_JSID(atom), prop, &attrs);
1679 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1680 ok = obj->getProperty(cx, ATOM_TO_JSID(atom), vp);
1681 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1684 if (prop)
1685 objbox->dropProperty(cx, prop);
1686 if (!ok)
1687 return JS_FALSE;
1688 if (prop)
1689 break;
1692 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1693 return JS_TRUE;
1697 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1698 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1699 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1700 * after the main bytecode sequence.
1702 static JSOp
1703 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1705 uintN indexBase;
1708 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1709 * for big indexes.
1711 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1712 JS_STATIC_ASSERT(INDEX_LIMIT >=
1713 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1715 if (index < JS_BIT(16))
1716 return JSOP_NOP;
1717 indexBase = index >> 16;
1718 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1719 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1720 return JSOP_FALSE;
1721 return JSOP_RESETBASE0;
1724 if (index >= INDEX_LIMIT) {
1725 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1726 JSMSG_TOO_MANY_LITERALS);
1727 return JSOP_FALSE;
1730 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1731 return JSOP_FALSE;
1732 return JSOP_RESETBASE;
1736 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1737 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1738 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1739 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1740 * with single-byte JSOP_INDEXBASE[123] codes.
1742 * Such prefixing currently requires a suffix to restore the "zero segment"
1743 * register setting, but this could be optimized further.
1745 static JSBool
1746 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1748 JSOp bigSuffix;
1750 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1751 if (bigSuffix == JSOP_FALSE)
1752 return JS_FALSE;
1753 EMIT_UINT16_IMM_OP(op, index);
1754 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1758 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1759 * caller's lexical environment, and embedding a false return on error.
1761 #define EMIT_INDEX_OP(op, index) \
1762 JS_BEGIN_MACRO \
1763 if (!EmitIndexOp(cx, op, index, cg)) \
1764 return JS_FALSE; \
1765 JS_END_MACRO
1767 static JSBool
1768 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1770 JSAtomListElement *ale;
1772 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1773 if (op == JSOP_GETPROP &&
1774 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1775 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1777 ale = cg->atomList.add(cg->parser, pn->pn_atom);
1778 if (!ale)
1779 return JS_FALSE;
1780 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1783 static JSBool
1784 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1785 JSCodeGenerator *cg)
1787 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1788 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1792 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1793 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1794 * other parts of the code there's no necessary relationship between the two.
1795 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1796 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1798 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1799 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1801 static JSBool
1802 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1803 JSCodeGenerator *cg)
1805 JSOp bigSuffix;
1806 ptrdiff_t off;
1807 jsbytecode *pc;
1809 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1810 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1811 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1812 if (bigSuffix == JSOP_FALSE)
1813 return JS_FALSE;
1815 /* Emit [op, slot, index]. */
1816 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1817 if (off < 0)
1818 return JS_FALSE;
1819 pc = CG_CODE(cg, off);
1820 SET_UINT16(pc, slot);
1821 pc += 2;
1822 SET_INDEX(pc, index);
1823 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1827 * Adjust the slot for a block local to account for the number of variables
1828 * that share the same index space with locals. Due to the incremental code
1829 * generation for top-level script, we do the adjustment via code patching in
1830 * Compiler::compileScript; see comments there.
1832 * The function returns -1 on failures.
1834 static jsint
1835 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1837 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1838 if (cg->inFunction()) {
1839 slot += cg->fun->u.i.nvars;
1840 if ((uintN) slot >= SLOTNO_LIMIT) {
1841 ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
1842 slot = -1;
1845 return slot;
1848 static bool
1849 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1851 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1852 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1853 return false;
1855 JSObject *blockObj = pn->pn_objbox->object;
1856 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1857 if (depth < 0)
1858 return false;
1860 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1861 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1862 slot < limit; slot++) {
1863 jsval v = blockObj->getSlot(slot);
1865 /* Beware the empty destructuring dummy. */
1866 if (JSVAL_IS_VOID(v)) {
1867 JS_ASSERT(slot + 1 <= limit);
1868 continue;
1871 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1872 JS_ASSERT(dn->pn_defn);
1873 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1874 dn->pn_cookie += depth;
1875 #ifdef DEBUG
1876 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1877 JS_ASSERT(pnu->pn_lexdef == dn);
1878 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1879 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1881 #endif
1884 blockObj->scope()->freeslot = JSSLOT_FREE(&js_BlockClass);
1885 return blockObj->growSlots(cx, JSSLOT_FREE(&js_BlockClass));
1889 * When eval is called from a function, the eval code or function code it
1890 * compiles may reference upvars that live in the eval-calling function. The
1891 * eval-invoked compiler does not have explicit definitions for these upvars
1892 * and we do not attempt to create them a-priori (by inspecting the function's
1893 * args and vars) -- we could, but we'd take an avoidable penalty for each
1894 * function local not referenced by any upvar. Instead, we map such upvars
1895 * lazily, growing upvarMap.vector by powers of two.
1897 * This function knows that it is called with pn pointing to a PN_NAME-arity
1898 * node, and cg->parser->callerFrame having a non-null fun member, and the
1899 * static level of cg at least one greater than the eval-calling function's
1900 * static level.
1902 static bool
1903 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1905 JSContext *cx = cg->parser->context;
1906 JSFunction *fun = cg->parser->callerFrame->fun;
1907 uintN upvarLevel = fun->u.i.script->staticLevel;
1909 JSFunctionBox *funbox = cg->funbox;
1910 if (funbox) {
1912 * Treat top-level function definitions as escaping (i.e., as funargs),
1913 * required since we compile each such top level function or statement
1914 * and throw away the AST, so we can't yet see all funarg uses of this
1915 * function being compiled (cg->funbox->object). See bug 493177.
1917 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1918 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1919 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1920 ((JSFunction *) funbox->object)->atom);
1921 return true;
1924 while (funbox->level >= upvarLevel) {
1925 if (funbox->node->pn_dflags & PND_FUNARG)
1926 return true;
1927 funbox = funbox->parent;
1928 if (!funbox)
1929 break;
1933 JSAtom *atom = pn->pn_atom;
1935 uintN index;
1936 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1937 if (localKind == JSLOCAL_NONE)
1938 return true;
1940 JS_ASSERT(cg->staticLevel > upvarLevel);
1941 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1942 return true;
1944 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1945 if (!ale) {
1946 if (cg->inFunction() &&
1947 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1948 return false;
1951 ale = cg->upvarList.add(cg->parser, atom);
1952 if (!ale)
1953 return false;
1954 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1956 uint32 *vector = cg->upvarMap.vector;
1957 uint32 length = cg->upvarMap.length;
1959 JS_ASSERT(ALE_INDEX(ale) <= length);
1960 if (ALE_INDEX(ale) == length) {
1961 length = 2 * JS_MAX(2, length);
1962 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1963 if (!vector)
1964 return false;
1965 cg->upvarMap.vector = vector;
1966 cg->upvarMap.length = length;
1969 if (localKind != JSLOCAL_ARG)
1970 index += fun->nargs;
1971 JS_ASSERT(index < JS_BIT(16));
1973 uintN skip = cg->staticLevel - upvarLevel;
1974 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1977 pn->pn_op = JSOP_GETUPVAR;
1978 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1979 pn->pn_dflags |= PND_BOUND;
1980 return true;
1984 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1985 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1986 * It returns false on error, true on success.
1988 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1989 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1990 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1991 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1992 * pn->pn_op was modified, if this function finds an argument or local variable
1993 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1994 * successful return.
1996 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1997 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1998 * in js_EmitTree.
2000 static JSBool
2001 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
2003 JSDefinition *dn;
2004 JSOp op;
2005 JSAtom *atom;
2006 uint32 cookie;
2007 JSDefinition::Kind dn_kind;
2008 JSAtomListElement *ale;
2009 uintN index;
2011 JS_ASSERT(pn->pn_type == TOK_NAME);
2013 /* Idempotency tests come first, since we may be called more than once. */
2014 if (pn->pn_dflags & PND_BOUND)
2015 return JS_TRUE;
2017 /* No cookie initialized for these two, they're pre-bound by definition. */
2018 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
2021 * The parser linked all uses (including forward references) to their
2022 * definitions, unless a with statement or direct eval intervened.
2024 if (pn->pn_used) {
2025 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
2026 dn = pn->pn_lexdef;
2027 JS_ASSERT(dn->pn_defn);
2028 if (pn->isDeoptimized())
2029 return JS_TRUE;
2030 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
2031 } else {
2032 if (!pn->pn_defn)
2033 return JS_TRUE;
2034 dn = (JSDefinition *) pn;
2037 op = PN_OP(pn);
2038 if (op == JSOP_NOP)
2039 return JS_TRUE;
2041 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2042 atom = pn->pn_atom;
2043 cookie = dn->pn_cookie;
2044 dn_kind = dn->kind();
2047 * Turn attempts to mutate const-declared bindings into get ops (for
2048 * pre-increment and pre-decrement ops, our caller will have to emit
2049 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2051 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2052 * bindings visible to the compiler are permanent in JS unless the
2053 * declaration originates in eval code. We detect eval code by testing
2054 * cg->parser->callerFrame, which is set only by eval or a debugger
2055 * equivalent.
2057 * Note that this callerFrame non-null test must be qualified by testing
2058 * !cg->funbox to exclude function code nested in eval code, which is not
2059 * subject to the deletable binding exception.
2061 switch (op) {
2062 case JSOP_NAME:
2063 case JSOP_SETCONST:
2064 break;
2065 case JSOP_DELNAME:
2066 if (dn_kind != JSDefinition::UNKNOWN) {
2067 if (cg->parser->callerFrame && !cg->funbox)
2068 JS_ASSERT(cg->compileAndGo());
2069 else
2070 pn->pn_op = JSOP_FALSE;
2071 pn->pn_dflags |= PND_BOUND;
2072 return JS_TRUE;
2074 break;
2075 default:
2076 if (pn->isConst())
2077 pn->pn_op = op = JSOP_NAME;
2080 if (cookie == FREE_UPVAR_COOKIE) {
2081 JSStackFrame *caller = cg->parser->callerFrame;
2082 if (caller) {
2083 JS_ASSERT(cg->compileAndGo());
2086 * Don't generate upvars on the left side of a for loop. See
2087 * bug 470758.
2089 if (cg->flags & TCF_IN_FOR_INIT)
2090 return JS_TRUE;
2092 JS_ASSERT(caller->script);
2093 if (!caller->fun)
2094 return JS_TRUE;
2097 * Make sure the variable object used by the compiler to initialize
2098 * parent links matches the caller's varobj. Compile-n-go compiler-
2099 * created function objects have the top-level cg's scopeChain set
2100 * as their parent by Parser::newFunction.
2102 JSObject *scopeobj = cg->inFunction()
2103 ? FUN_OBJECT(cg->fun)->getParent()
2104 : cg->scopeChain;
2105 if (scopeobj != cg->parser->callerVarObj)
2106 return JS_TRUE;
2109 * We are compiling eval or debug script inside a function frame
2110 * and the scope chain matches the function's variable object.
2111 * Optimize access to function's arguments and variable and the
2112 * arguments object.
2114 if (op != JSOP_NAME)
2115 return JS_TRUE;
2118 * Generator functions may be resumed from any call stack, which
2119 * defeats the display optimization to static link searching used
2120 * by JSOP_{GET,CALL}UPVAR.
2122 JSFunction *fun = cg->parser->callerFrame->fun;
2123 JS_ASSERT(cg->staticLevel >= fun->u.i.script->staticLevel);
2124 unsigned skip = cg->staticLevel - fun->u.i.script->staticLevel;
2125 if (cg->skipSpansGenerator(skip))
2126 return JS_TRUE;
2128 return MakeUpvarForEval(pn, cg);
2130 return JS_TRUE;
2133 if (dn->pn_dflags & PND_GVAR) {
2135 * If this is a global reference from within a function, leave pn_op as
2136 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2137 * only we could depend on the global frame's slots being valid for all
2138 * calls to the function, and if we could equate the atom index in the
2139 * function's atom map for every global name with its frame slot.
2141 if (cg->inFunction())
2142 return JS_TRUE;
2145 * We are optimizing global variables and there may be no pre-existing
2146 * global property named atom when this global script runs. If atom was
2147 * declared via const or var, optimize pn to access fp->vars using the
2148 * appropriate JSOP_*GVAR op.
2150 * FIXME: should be able to optimize global function access too.
2152 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2154 switch (op) {
2155 case JSOP_NAME: op = JSOP_GETGVAR; break;
2156 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2157 case JSOP_SETCONST: /* NB: no change */ break;
2158 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2159 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2160 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2161 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2162 case JSOP_FORNAME: /* NB: no change */ break;
2163 case JSOP_DELNAME: /* NB: no change */ break;
2164 default: JS_NOT_REACHED("gvar");
2166 pn->pn_op = op;
2167 pn->pn_cookie = cookie;
2168 pn->pn_dflags |= PND_BOUND;
2169 return JS_TRUE;
2172 uintN level = UPVAR_FRAME_SKIP(cookie);
2173 JS_ASSERT(cg->staticLevel >= level);
2176 * A JSDefinition witnessed as a declaration by the parser cannot be an
2177 * upvar, unless it is the degenerate kind of upvar selected above (in the
2178 * code before the PND_GVAR test) for the special case of compile-and-go
2179 * code generated from eval called from a function, where the eval code
2180 * uses local vars defined in the function. We detect this upvar-for-eval
2181 * case by checking dn's op.
2183 if (PN_OP(dn) == JSOP_GETUPVAR) {
2184 JS_ASSERT(cg->staticLevel >= level);
2185 if (op != JSOP_NAME)
2186 return JS_TRUE;
2188 #ifdef DEBUG
2189 JSStackFrame *caller = cg->parser->callerFrame;
2190 #endif
2191 JS_ASSERT(caller);
2192 JS_ASSERT(caller->script);
2194 JSTreeContext *tc = cg;
2195 while (tc->staticLevel != level)
2196 tc = tc->parent;
2197 JS_ASSERT(tc->compiling());
2199 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2200 JS_ASSERT(evalcg->compileAndGo());
2201 JS_ASSERT(caller->fun && cg->parser->callerVarObj == evalcg->scopeChain);
2204 * Don't generate upvars on the left side of a for loop. See
2205 * bug 470758 and bug 520513.
2207 if (evalcg->flags & TCF_IN_FOR_INIT)
2208 return JS_TRUE;
2210 if (cg->staticLevel == level) {
2211 pn->pn_op = JSOP_GETUPVAR;
2212 pn->pn_cookie = cookie;
2213 pn->pn_dflags |= PND_BOUND;
2214 return JS_TRUE;
2217 return MakeUpvarForEval(pn, cg);
2220 uintN skip = cg->staticLevel - level;
2221 if (skip != 0) {
2222 JS_ASSERT(cg->inFunction());
2223 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie) != CALLEE_UPVAR_SLOT,
2224 cg->lexdeps.lookup(atom));
2225 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2226 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2229 * If op is a mutating opcode, this upvar's static level is too big to
2230 * index into the display, or the function is heavyweight, we fall back
2231 * on JSOP_*NAME*.
2233 if (op != JSOP_NAME)
2234 return JS_TRUE;
2235 if (level >= JS_DISPLAY_SIZE)
2236 return JS_TRUE;
2237 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2238 return JS_TRUE;
2240 if (FUN_FLAT_CLOSURE(cg->fun)) {
2241 op = JSOP_GETDSLOT;
2242 } else {
2244 * The function we're compiling may not be heavyweight, but if it
2245 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2246 * Parser::analyzeFunctions has arranged for this function's
2247 * enclosing functions to be heavyweight, so we can safely stick
2248 * with JSOP_NAME/JSOP_CALLNAME.
2250 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2251 return JS_TRUE;
2254 * Generator functions may be resumed from any call stack, which
2255 * defeats the display optimization to static link searching used
2256 * by JSOP_{GET,CALL}UPVAR.
2258 if (cg->skipSpansGenerator(skip))
2259 return JS_TRUE;
2261 op = JSOP_GETUPVAR;
2264 ale = cg->upvarList.lookup(atom);
2265 if (ale) {
2266 index = ALE_INDEX(ale);
2267 } else {
2268 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2269 return JS_FALSE;
2271 ale = cg->upvarList.add(cg->parser, atom);
2272 if (!ale)
2273 return JS_FALSE;
2274 index = ALE_INDEX(ale);
2275 JS_ASSERT(index == cg->upvarList.count - 1);
2277 uint32 *vector = cg->upvarMap.vector;
2278 if (!vector) {
2279 uint32 length = cg->lexdeps.count;
2281 vector = (uint32 *) js_calloc(length * sizeof *vector);
2282 if (!vector) {
2283 JS_ReportOutOfMemory(cx);
2284 return JS_FALSE;
2286 cg->upvarMap.vector = vector;
2287 cg->upvarMap.length = length;
2290 uintN slot = UPVAR_FRAME_SLOT(cookie);
2291 if (slot != CALLEE_UPVAR_SLOT && dn_kind != JSDefinition::ARG) {
2292 JSTreeContext *tc = cg;
2293 do {
2294 tc = tc->parent;
2295 } while (tc->staticLevel != level);
2296 if (tc->inFunction())
2297 slot += tc->fun->nargs;
2300 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2303 pn->pn_op = op;
2304 pn->pn_cookie = index;
2305 pn->pn_dflags |= PND_BOUND;
2306 return JS_TRUE;
2310 * We are compiling a function body and may be able to optimize name
2311 * to stack slot. Look for an argument or variable in the function and
2312 * rewrite pn_op and update pn accordingly.
2314 switch (dn_kind) {
2315 case JSDefinition::UNKNOWN:
2316 return JS_TRUE;
2318 case JSDefinition::LET:
2319 switch (op) {
2320 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2321 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2322 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2323 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2324 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2325 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2326 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2327 default: JS_NOT_REACHED("let");
2329 break;
2331 case JSDefinition::ARG:
2332 switch (op) {
2333 case JSOP_NAME: op = JSOP_GETARG; break;
2334 case JSOP_SETNAME: op = JSOP_SETARG; break;
2335 case JSOP_INCNAME: op = JSOP_INCARG; break;
2336 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2337 case JSOP_DECNAME: op = JSOP_DECARG; break;
2338 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2339 case JSOP_FORNAME: op = JSOP_FORARG; break;
2340 default: JS_NOT_REACHED("arg");
2342 JS_ASSERT(!pn->isConst());
2343 break;
2345 case JSDefinition::VAR:
2346 if (PN_OP(dn) == JSOP_CALLEE) {
2347 JS_ASSERT(op != JSOP_CALLEE);
2348 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2351 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2352 * cannot be sure cg->fun is not something of the form:
2354 * var ff = (function f(s) { eval(s); return f; });
2356 * where a caller invokes ff("var f = 42"). The result returned for
2357 * such an invocation must be 42, since the callee name is
2358 * lexically bound in an outer declarative environment from the
2359 * function's activation. See jsfun.cpp:call_resolve.
2361 JS_ASSERT(op != JSOP_DELNAME);
2362 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2363 op = JSOP_CALLEE;
2364 pn->pn_dflags |= PND_CONST;
2367 pn->pn_op = op;
2368 pn->pn_dflags |= PND_BOUND;
2369 return JS_TRUE;
2371 /* FALL THROUGH */
2373 default:
2374 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2375 dn_kind == JSDefinition::VAR ||
2376 dn_kind == JSDefinition::CONST);
2377 switch (op) {
2378 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2379 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2380 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2381 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2382 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2383 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2384 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2385 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2386 default: JS_NOT_REACHED("local");
2388 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2389 break;
2392 JS_ASSERT(op != PN_OP(pn));
2393 pn->pn_op = op;
2394 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2395 pn->pn_dflags |= PND_BOUND;
2396 return JS_TRUE;
2400 * If pn contains a useful expression, return true with *answer set to true.
2401 * If pn contains a useless expression, return true with *answer set to false.
2402 * Return false on error.
2404 * The caller should initialize *answer to false and invoke this function on
2405 * an expression statement or similar subtree to decide whether the tree could
2406 * produce code that has any side effects. For an expression statement, we
2407 * define useless code as code with no side effects, because the main effect,
2408 * the value left on the stack after the code executes, will be discarded by a
2409 * pop bytecode.
2411 static JSBool
2412 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2413 JSBool *answer)
2415 JSBool ok;
2416 JSParseNode *pn2;
2418 ok = JS_TRUE;
2419 if (!pn || *answer)
2420 return ok;
2422 switch (pn->pn_arity) {
2423 case PN_FUNC:
2425 * A named function, contrary to ES3, is no longer useful, because we
2426 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2427 * Object instance and binding a readonly, permanent property in it
2428 * (the object and binding can be detected and hijacked or captured).
2429 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2431 *answer = JS_FALSE;
2432 break;
2434 case PN_LIST:
2435 if (pn->pn_op == JSOP_NOP ||
2436 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2437 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2439 * Non-operators along with ||, &&, ===, and !== never invoke
2440 * toString or valueOf.
2442 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2443 ok &= CheckSideEffects(cx, cg, pn2, answer);
2444 } else {
2446 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2447 * are presumed to be useful, because they may have side effects
2448 * even if their main effect (their return value) is discarded.
2450 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2451 * to avoid too much recursion. All such lists must be presumed
2452 * to be useful because each index operation could invoke a getter
2453 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2454 * does not apply here: arguments[i][j] might invoke a getter).
2456 * Likewise, array and object initialisers may call prototype
2457 * setters (the __defineSetter__ built-in, and writable __proto__
2458 * on Array.prototype create this hazard). Initialiser list nodes
2459 * have JSOP_NEWINIT in their pn_op.
2461 *answer = JS_TRUE;
2463 break;
2465 case PN_TERNARY:
2466 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2467 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2468 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2469 break;
2471 case PN_BINARY:
2472 if (pn->pn_type == TOK_ASSIGN) {
2474 * Assignment is presumed to be useful, even if the next operation
2475 * is another assignment overwriting this one's ostensible effect,
2476 * because the left operand may be a property with a setter that
2477 * has side effects.
2479 * The only exception is assignment of a useless value to a const
2480 * declared in the function currently being compiled.
2482 pn2 = pn->pn_left;
2483 if (pn2->pn_type != TOK_NAME) {
2484 *answer = JS_TRUE;
2485 } else {
2486 if (!BindNameToSlot(cx, cg, pn2))
2487 return JS_FALSE;
2488 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2489 return JS_FALSE;
2490 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2491 *answer = JS_TRUE;
2493 } else {
2494 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2495 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2497 * ||, &&, ===, and !== do not convert their operands via
2498 * toString or valueOf method calls.
2500 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2501 CheckSideEffects(cx, cg, pn->pn_right, answer);
2502 } else {
2504 * We can't easily prove that neither operand ever denotes an
2505 * object with a toString or valueOf method.
2507 *answer = JS_TRUE;
2510 break;
2512 case PN_UNARY:
2513 switch (pn->pn_type) {
2514 case TOK_DELETE:
2515 pn2 = pn->pn_kid;
2516 switch (pn2->pn_type) {
2517 case TOK_NAME:
2518 if (!BindNameToSlot(cx, cg, pn2))
2519 return JS_FALSE;
2520 if (pn2->isConst()) {
2521 *answer = JS_FALSE;
2522 break;
2524 /* FALL THROUGH */
2525 case TOK_DOT:
2526 #if JS_HAS_XML_SUPPORT
2527 case TOK_DBLDOT:
2528 #endif
2529 case TOK_LP:
2530 case TOK_LB:
2531 /* All these delete addressing modes have effects too. */
2532 *answer = JS_TRUE;
2533 break;
2534 default:
2535 ok = CheckSideEffects(cx, cg, pn2, answer);
2536 break;
2538 break;
2540 case TOK_UNARYOP:
2541 if (pn->pn_op == JSOP_NOT) {
2542 /* ! does not convert its operand via toString or valueOf. */
2543 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2544 break;
2546 /* FALL THROUGH */
2548 default:
2550 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2551 * have direct effects. Of the remaining unary-arity node types,
2552 * we can't easily prove that the operand never denotes an object
2553 * with a toString or valueOf method.
2555 *answer = JS_TRUE;
2556 break;
2558 break;
2560 case PN_NAME:
2562 * Take care to avoid trying to bind a label name (labels, both for
2563 * statements and property values in object initialisers, have pn_op
2564 * defaulted to JSOP_NOP).
2566 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2567 if (!BindNameToSlot(cx, cg, pn))
2568 return JS_FALSE;
2569 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2570 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2572 * Not an argument or local variable use, and not a use of a
2573 * unshadowed named function expression's given name, so this
2574 * expression could invoke a getter that has side effects.
2576 *answer = JS_TRUE;
2579 pn2 = pn->maybeExpr();
2580 if (pn->pn_type == TOK_DOT) {
2581 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2582 return JS_FALSE;
2583 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2584 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2586 * Any dotted property reference could call a getter, except
2587 * for arguments.length where arguments is unambiguous.
2589 *answer = JS_TRUE;
2592 ok = CheckSideEffects(cx, cg, pn2, answer);
2593 break;
2595 case PN_NAMESET:
2596 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2597 break;
2599 case PN_NULLARY:
2600 if (pn->pn_type == TOK_DEBUGGER)
2601 *answer = JS_TRUE;
2602 break;
2604 return ok;
2607 static JSBool
2608 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2609 JSBool callContext)
2611 JSOp op;
2613 if (!BindNameToSlot(cx, cg, pn))
2614 return JS_FALSE;
2615 op = PN_OP(pn);
2617 if (callContext) {
2618 switch (op) {
2619 case JSOP_NAME:
2620 op = JSOP_CALLNAME;
2621 break;
2622 case JSOP_GETGVAR:
2623 JS_ASSERT(!cg->funbox);
2624 op = JSOP_CALLGVAR;
2625 break;
2626 case JSOP_GETARG:
2627 op = JSOP_CALLARG;
2628 break;
2629 case JSOP_GETLOCAL:
2630 op = JSOP_CALLLOCAL;
2631 break;
2632 case JSOP_GETUPVAR:
2633 op = JSOP_CALLUPVAR;
2634 break;
2635 case JSOP_GETDSLOT:
2636 op = JSOP_CALLDSLOT;
2637 break;
2638 default:
2639 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2640 break;
2644 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2645 if (js_Emit1(cx, cg, op) < 0)
2646 return JS_FALSE;
2647 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2648 return JS_FALSE;
2649 } else {
2650 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2651 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2652 } else {
2653 if (!EmitAtomOp(cx, pn, op, cg))
2654 return JS_FALSE;
2658 return JS_TRUE;
2661 #if JS_HAS_XML_SUPPORT
2662 static JSBool
2663 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2665 JSParseNode *pn2;
2666 uintN oldflags;
2668 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2669 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2670 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2672 pn2 = pn->pn_kid;
2673 oldflags = cg->flags;
2674 cg->flags &= ~TCF_IN_FOR_INIT;
2675 if (!js_EmitTree(cx, cg, pn2))
2676 return JS_FALSE;
2677 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2678 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2679 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2680 return JS_FALSE;
2683 return js_Emit1(cx, cg, op) >= 0;
2685 #endif
2687 static JSBool
2688 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2691 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2692 * interpreter and trace recorder, which skip dense array instances by
2693 * going up to Array.prototype before looking up the property name.
2695 JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
2696 if (!ale)
2697 return JS_FALSE;
2698 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2699 return JS_FALSE;
2700 if (js_Emit1(cx, cg, op) < 0)
2701 return JS_FALSE;
2702 return JS_TRUE;
2705 static JSBool
2706 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2707 JSBool callContext)
2709 JSParseNode *pn2, *pndot, *pnup, *pndown;
2710 ptrdiff_t top;
2712 JS_ASSERT(pn->pn_arity == PN_NAME);
2713 pn2 = pn->maybeExpr();
2715 /* Special case deoptimization for __proto__. */
2716 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2717 pn->pn_atom == cx->runtime->atomState.protoAtom) {
2718 if (pn2 && !js_EmitTree(cx, cg, pn2))
2719 return JS_FALSE;
2720 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2723 if (callContext) {
2724 JS_ASSERT(pn->pn_type == TOK_DOT);
2725 JS_ASSERT(op == JSOP_GETPROP);
2726 op = JSOP_CALLPROP;
2727 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2728 if (pn2->pn_op == JSOP_THIS) {
2729 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2730 /* Fast path for gets of |this.foo|. */
2731 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2733 } else if (pn2->pn_type == TOK_NAME) {
2735 * Try to optimize:
2736 * - arguments.length into JSOP_ARGCNT
2737 * - argname.prop into JSOP_GETARGPROP
2738 * - localname.prop into JSOP_GETLOCALPROP
2739 * but don't do this if the property is 'length' -- prefer to emit
2740 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2742 if (!BindNameToSlot(cx, cg, pn2))
2743 return JS_FALSE;
2744 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2745 if (pn2->pn_op == JSOP_ARGUMENTS)
2746 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2747 } else {
2748 switch (pn2->pn_op) {
2749 case JSOP_GETARG:
2750 op = JSOP_GETARGPROP;
2751 goto do_indexconst;
2752 case JSOP_GETLOCAL:
2753 op = JSOP_GETLOCALPROP;
2754 do_indexconst: {
2755 JSAtomListElement *ale;
2756 jsatomid atomIndex;
2758 ale = cg->atomList.add(cg->parser, pn->pn_atom);
2759 if (!ale)
2760 return JS_FALSE;
2761 atomIndex = ALE_INDEX(ale);
2762 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2765 default:;
2772 * If the object operand is also a dotted property reference, reverse the
2773 * list linked via pn_expr temporarily so we can iterate over it from the
2774 * bottom up (reversing again as we go), to avoid excessive recursion.
2776 if (pn2->pn_type == TOK_DOT) {
2777 pndot = pn2;
2778 pnup = NULL;
2779 top = CG_OFFSET(cg);
2780 for (;;) {
2781 /* Reverse pndot->pn_expr to point up, not down. */
2782 pndot->pn_offset = top;
2783 JS_ASSERT(!pndot->pn_used);
2784 pndown = pndot->pn_expr;
2785 pndot->pn_expr = pnup;
2786 if (pndown->pn_type != TOK_DOT)
2787 break;
2788 pnup = pndot;
2789 pndot = pndown;
2792 /* pndown is a primary expression, not a dotted property reference. */
2793 if (!js_EmitTree(cx, cg, pndown))
2794 return JS_FALSE;
2796 do {
2797 /* Walk back up the list, emitting annotated name ops. */
2798 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2799 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2800 return JS_FALSE;
2803 /* Special case deoptimization on __proto__, as above. */
2804 if (pndot->pn_arity == PN_NAME && pndot->pn_atom == cx->runtime->atomState.protoAtom) {
2805 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2806 return JS_FALSE;
2807 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2808 return JS_FALSE;
2811 /* Reverse the pn_expr link again. */
2812 pnup = pndot->pn_expr;
2813 pndot->pn_expr = pndown;
2814 pndown = pndot;
2815 } while ((pndot = pnup) != NULL);
2816 } else {
2817 if (!js_EmitTree(cx, cg, pn2))
2818 return JS_FALSE;
2821 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2822 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2823 return JS_FALSE;
2826 return EmitAtomOp(cx, pn, op, cg);
2829 static JSBool
2830 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2832 ptrdiff_t top;
2833 JSParseNode *left, *right, *next, ltmp, rtmp;
2834 jsint slot;
2836 top = CG_OFFSET(cg);
2837 if (pn->pn_arity == PN_LIST) {
2838 /* Left-associative operator chain to avoid too much recursion. */
2839 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2840 JS_ASSERT(pn->pn_count >= 3);
2841 left = pn->pn_head;
2842 right = pn->last();
2843 next = left->pn_next;
2844 JS_ASSERT(next != right);
2847 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2848 * one or more index expression and JSOP_GETELEM op pairs.
2850 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2851 if (!BindNameToSlot(cx, cg, left))
2852 return JS_FALSE;
2853 if (left->pn_op == JSOP_ARGUMENTS &&
2854 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2855 (jsuint)slot < JS_BIT(16)) {
2857 * arguments[i]() requires arguments object as "this".
2858 * Check that we never generates list for that usage.
2860 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2861 left->pn_offset = next->pn_offset = top;
2862 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2863 left = next;
2864 next = left->pn_next;
2869 * Check whether we generated JSOP_ARGSUB, just above, and have only
2870 * one more index expression to emit. Given arguments[0][j], we must
2871 * skip the while loop altogether, falling through to emit code for j
2872 * (in the subtree referenced by right), followed by the annotated op,
2873 * at the bottom of this function.
2875 JS_ASSERT(next != right || pn->pn_count == 3);
2876 if (left == pn->pn_head) {
2877 if (!js_EmitTree(cx, cg, left))
2878 return JS_FALSE;
2880 while (next != right) {
2881 if (!js_EmitTree(cx, cg, next))
2882 return JS_FALSE;
2883 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2884 return JS_FALSE;
2885 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2886 return JS_FALSE;
2887 next = next->pn_next;
2889 } else {
2890 if (pn->pn_arity == PN_NAME) {
2892 * Set left and right so pn appears to be a TOK_LB node, instead
2893 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2894 * EmitDestructuringOps nearer below. In the destructuring case,
2895 * the base expression (pn_expr) of the name may be null, which
2896 * means we have to emit a JSOP_BINDNAME.
2898 left = pn->maybeExpr();
2899 if (!left) {
2900 left = &ltmp;
2901 left->pn_type = TOK_STRING;
2902 left->pn_op = JSOP_BINDNAME;
2903 left->pn_arity = PN_NULLARY;
2904 left->pn_pos = pn->pn_pos;
2905 left->pn_atom = pn->pn_atom;
2907 right = &rtmp;
2908 right->pn_type = TOK_STRING;
2909 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2910 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2911 ? JSOP_QNAMEPART
2912 : JSOP_STRING;
2913 right->pn_arity = PN_NULLARY;
2914 right->pn_pos = pn->pn_pos;
2915 right->pn_atom = pn->pn_atom;
2916 } else {
2917 JS_ASSERT(pn->pn_arity == PN_BINARY);
2918 left = pn->pn_left;
2919 right = pn->pn_right;
2922 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2923 if (op == JSOP_GETELEM &&
2924 left->pn_type == TOK_NAME &&
2925 right->pn_type == TOK_NUMBER) {
2926 if (!BindNameToSlot(cx, cg, left))
2927 return JS_FALSE;
2928 if (left->pn_op == JSOP_ARGUMENTS &&
2929 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2930 (jsuint)slot < JS_BIT(16)) {
2931 left->pn_offset = right->pn_offset = top;
2932 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2933 return JS_TRUE;
2937 if (!js_EmitTree(cx, cg, left))
2938 return JS_FALSE;
2941 /* The right side of the descendant operator is implicitly quoted. */
2942 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2943 right->pn_op == JSOP_QNAMEPART);
2944 if (!js_EmitTree(cx, cg, right))
2945 return JS_FALSE;
2946 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2947 return JS_FALSE;
2948 return js_Emit1(cx, cg, op) >= 0;
2951 static JSBool
2952 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2954 jsint ival;
2955 uint32 u;
2956 ptrdiff_t off;
2957 jsbytecode *pc;
2958 JSAtom *atom;
2959 JSAtomListElement *ale;
2961 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2962 if (ival == 0)
2963 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2964 if (ival == 1)
2965 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2966 if ((jsint)(int8)ival == ival)
2967 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2969 u = (uint32)ival;
2970 if (u < JS_BIT(16)) {
2971 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2972 } else if (u < JS_BIT(24)) {
2973 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2974 if (off < 0)
2975 return JS_FALSE;
2976 pc = CG_CODE(cg, off);
2977 SET_UINT24(pc, u);
2978 } else {
2979 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2980 if (off < 0)
2981 return JS_FALSE;
2982 pc = CG_CODE(cg, off);
2983 SET_INT32(pc, ival);
2985 return JS_TRUE;
2988 atom = js_AtomizeDouble(cx, dval);
2989 if (!atom)
2990 return JS_FALSE;
2992 ale = cg->atomList.add(cg->parser, atom);
2993 if (!ale)
2994 return JS_FALSE;
2995 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2998 static JSBool
2999 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3000 JSStmtInfo *stmtInfo)
3002 JSOp switchOp;
3003 JSBool ok, hasDefault, constPropagated;
3004 ptrdiff_t top, off, defaultOffset;
3005 JSParseNode *pn2, *pn3, *pn4;
3006 uint32 caseCount, tableLength;
3007 JSParseNode **table;
3008 jsdouble d;
3009 jsint i, low, high;
3010 jsval v;
3011 JSAtom *atom;
3012 JSAtomListElement *ale;
3013 intN noteIndex;
3014 size_t switchSize, tableSize;
3015 jsbytecode *pc, *savepc;
3016 #if JS_HAS_BLOCK_SCOPE
3017 jsint count;
3018 #endif
3020 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3021 switchOp = JSOP_TABLESWITCH;
3022 ok = JS_TRUE;
3023 hasDefault = constPropagated = JS_FALSE;
3024 defaultOffset = -1;
3027 * If the switch contains let variables scoped by its body, model the
3028 * resulting block on the stack first, before emitting the discriminant's
3029 * bytecode (in case the discriminant contains a stack-model dependency
3030 * such as a let expression).
3032 pn2 = pn->pn_right;
3033 #if JS_HAS_BLOCK_SCOPE
3034 if (pn2->pn_type == TOK_LEXICALSCOPE) {
3036 * Push the body's block scope before discriminant code-gen for proper
3037 * static block scope linkage in case the discriminant contains a let
3038 * expression. The block's locals must lie under the discriminant on
3039 * the stack so that case-dispatch bytecodes can find the discriminant
3040 * on top of stack.
3042 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
3043 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
3044 stmtInfo->type = STMT_SWITCH;
3046 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3047 if (!EmitEnterBlock(cx, pn2, cg))
3048 return JS_FALSE;
3051 * Pop the switch's statement info around discriminant code-gen. Note
3052 * how this leaves cg->blockChain referencing the switch's
3053 * block scope object, which is necessary for correct block parenting
3054 * in the case where the discriminant contains a let expression.
3056 cg->topStmt = stmtInfo->down;
3057 cg->topScopeStmt = stmtInfo->downScope;
3059 #ifdef __GNUC__
3060 else {
3061 count = 0;
3063 #endif
3064 #endif
3067 * Emit code for the discriminant first (or nearly first, in the case of a
3068 * switch whose body is a block scope).
3070 if (!js_EmitTree(cx, cg, pn->pn_left))
3071 return JS_FALSE;
3073 /* Switch bytecodes run from here till end of final case. */
3074 top = CG_OFFSET(cg);
3075 #if !JS_HAS_BLOCK_SCOPE
3076 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3077 #else
3078 if (pn2->pn_type == TOK_LC) {
3079 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3080 } else {
3081 /* Re-push the switch's statement info record. */
3082 cg->topStmt = cg->topScopeStmt = stmtInfo;
3084 /* Set the statement info record's idea of top. */
3085 stmtInfo->update = top;
3087 /* Advance pn2 to refer to the switch case list. */
3088 pn2 = pn2->expr();
3090 #endif
3092 caseCount = pn2->pn_count;
3093 tableLength = 0;
3094 table = NULL;
3096 if (caseCount == 0 ||
3097 (caseCount == 1 &&
3098 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3099 caseCount = 0;
3100 low = 0;
3101 high = -1;
3102 } else {
3103 #define INTMAP_LENGTH 256
3104 jsbitmap intmap_space[INTMAP_LENGTH];
3105 jsbitmap *intmap = NULL;
3106 int32 intmap_bitlen = 0;
3108 low = JSVAL_INT_MAX;
3109 high = JSVAL_INT_MIN;
3111 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3112 if (pn3->pn_type == TOK_DEFAULT) {
3113 hasDefault = JS_TRUE;
3114 caseCount--; /* one of the "cases" was the default */
3115 continue;
3118 JS_ASSERT(pn3->pn_type == TOK_CASE);
3119 if (switchOp == JSOP_CONDSWITCH)
3120 continue;
3122 pn4 = pn3->pn_left;
3123 while (pn4->pn_type == TOK_RP)
3124 pn4 = pn4->pn_kid;
3125 switch (pn4->pn_type) {
3126 case TOK_NUMBER:
3127 d = pn4->pn_dval;
3128 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3129 pn3->pn_val = INT_TO_JSVAL(i);
3130 } else {
3131 atom = js_AtomizeDouble(cx, d);
3132 if (!atom) {
3133 ok = JS_FALSE;
3134 goto release;
3136 pn3->pn_val = ATOM_KEY(atom);
3138 break;
3139 case TOK_STRING:
3140 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3141 break;
3142 case TOK_NAME:
3143 if (!pn4->maybeExpr()) {
3144 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3145 if (!ok)
3146 goto release;
3147 if (v != JSVAL_HOLE) {
3148 if (!JSVAL_IS_PRIMITIVE(v)) {
3150 * XXX JSOP_LOOKUPSWITCH does not support const-
3151 * propagated object values, see bug 407186.
3153 switchOp = JSOP_CONDSWITCH;
3154 continue;
3156 pn3->pn_val = v;
3157 constPropagated = JS_TRUE;
3158 break;
3161 /* FALL THROUGH */
3162 case TOK_PRIMARY:
3163 if (pn4->pn_op == JSOP_TRUE) {
3164 pn3->pn_val = JSVAL_TRUE;
3165 break;
3167 if (pn4->pn_op == JSOP_FALSE) {
3168 pn3->pn_val = JSVAL_FALSE;
3169 break;
3171 if (pn4->pn_op == JSOP_NULL) {
3172 pn3->pn_val = JSVAL_NULL;
3173 break;
3175 /* FALL THROUGH */
3176 default:
3177 switchOp = JSOP_CONDSWITCH;
3178 continue;
3181 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3183 if (switchOp != JSOP_TABLESWITCH)
3184 continue;
3185 if (!JSVAL_IS_INT(pn3->pn_val)) {
3186 switchOp = JSOP_LOOKUPSWITCH;
3187 continue;
3189 i = JSVAL_TO_INT(pn3->pn_val);
3190 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3191 switchOp = JSOP_LOOKUPSWITCH;
3192 continue;
3194 if (i < low)
3195 low = i;
3196 if (high < i)
3197 high = i;
3200 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3201 * We bias i by 65536 if it's negative, and hope that's a rare
3202 * case (because it requires a malloc'd bitmap).
3204 if (i < 0)
3205 i += JS_BIT(16);
3206 if (i >= intmap_bitlen) {
3207 if (!intmap &&
3208 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3209 intmap = intmap_space;
3210 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3211 } else {
3212 /* Just grab 8K for the worst-case bitmap. */
3213 intmap_bitlen = JS_BIT(16);
3214 intmap = (jsbitmap *)
3215 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3216 * sizeof(jsbitmap));
3217 if (!intmap) {
3218 JS_ReportOutOfMemory(cx);
3219 return JS_FALSE;
3222 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3224 if (JS_TEST_BIT(intmap, i)) {
3225 switchOp = JSOP_LOOKUPSWITCH;
3226 continue;
3228 JS_SET_BIT(intmap, i);
3231 release:
3232 if (intmap && intmap != intmap_space)
3233 cx->free(intmap);
3234 if (!ok)
3235 return JS_FALSE;
3238 * Compute table length and select lookup instead if overlarge or
3239 * more than half-sparse.
3241 if (switchOp == JSOP_TABLESWITCH) {
3242 tableLength = (uint32)(high - low + 1);
3243 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3244 switchOp = JSOP_LOOKUPSWITCH;
3245 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3247 * Lookup switch supports only atom indexes below 64K limit.
3248 * Conservatively estimate the maximum possible index during
3249 * switch generation and use conditional switch if it exceeds
3250 * the limit.
3252 if (caseCount + cg->atomList.count > JS_BIT(16))
3253 switchOp = JSOP_CONDSWITCH;
3258 * Emit a note with two offsets: first tells total switch code length,
3259 * second tells offset to first JSOP_CASE if condswitch.
3261 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3262 if (noteIndex < 0)
3263 return JS_FALSE;
3265 if (switchOp == JSOP_CONDSWITCH) {
3267 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3269 switchSize = 0;
3270 } else if (switchOp == JSOP_TABLESWITCH) {
3272 * 3 offsets (len, low, high) before the table, 1 per entry.
3274 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3275 } else {
3277 * JSOP_LOOKUPSWITCH:
3278 * 1 offset (len) and 1 atom index (npairs) before the table,
3279 * 1 atom index and 1 jump offset per entry.
3281 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3282 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3286 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3288 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3289 * to emit the immediate operand(s) by which bytecode readers such as
3290 * BuildSpanDepTable discover the length of the switch opcode *before*
3291 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3292 * also important to zero all unknown jump offset immediate operands,
3293 * so they can be converted to span dependencies with null targets to
3294 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3296 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3297 return JS_FALSE;
3299 off = -1;
3300 if (switchOp == JSOP_CONDSWITCH) {
3301 intN caseNoteIndex = -1;
3302 JSBool beforeCases = JS_TRUE;
3304 /* Emit code for evaluating cases and jumping to case statements. */
3305 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3306 pn4 = pn3->pn_left;
3307 if (pn4 && !js_EmitTree(cx, cg, pn4))
3308 return JS_FALSE;
3309 if (caseNoteIndex >= 0) {
3310 /* off is the previous JSOP_CASE's bytecode offset. */
3311 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3312 CG_OFFSET(cg) - off)) {
3313 return JS_FALSE;
3316 if (!pn4) {
3317 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3318 continue;
3320 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3321 if (caseNoteIndex < 0)
3322 return JS_FALSE;
3323 off = EmitJump(cx, cg, JSOP_CASE, 0);
3324 if (off < 0)
3325 return JS_FALSE;
3326 pn3->pn_offset = off;
3327 if (beforeCases) {
3328 uintN noteCount, noteCountDelta;
3330 /* Switch note's second offset is to first JSOP_CASE. */
3331 noteCount = CG_NOTE_COUNT(cg);
3332 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3333 off - top)) {
3334 return JS_FALSE;
3336 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3337 if (noteCountDelta != 0)
3338 caseNoteIndex += noteCountDelta;
3339 beforeCases = JS_FALSE;
3344 * If we didn't have an explicit default (which could fall in between
3345 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3346 * call in the loop above), link the last case to the implicit default
3347 * for the decompiler.
3349 if (!hasDefault &&
3350 caseNoteIndex >= 0 &&
3351 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3352 CG_OFFSET(cg) - off)) {
3353 return JS_FALSE;
3356 /* Emit default even if no explicit default statement. */
3357 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3358 if (defaultOffset < 0)
3359 return JS_FALSE;
3360 } else {
3361 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3363 if (switchOp == JSOP_TABLESWITCH) {
3364 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3365 SET_JUMP_OFFSET(pc, low);
3366 pc += JUMP_OFFSET_LEN;
3367 SET_JUMP_OFFSET(pc, high);
3368 pc += JUMP_OFFSET_LEN;
3371 * Use malloc to avoid arena bloat for programs with many switches.
3372 * We free table if non-null at label out, so all control flow must
3373 * exit this function through goto out or goto bad.
3375 if (tableLength != 0) {
3376 tableSize = (size_t)tableLength * sizeof *table;
3377 table = (JSParseNode **) cx->malloc(tableSize);
3378 if (!table)
3379 return JS_FALSE;
3380 memset(table, 0, tableSize);
3381 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3382 if (pn3->pn_type == TOK_DEFAULT)
3383 continue;
3384 i = JSVAL_TO_INT(pn3->pn_val);
3385 i -= low;
3386 JS_ASSERT((uint32)i < tableLength);
3387 table[i] = pn3;
3390 } else {
3391 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3393 /* Fill in the number of cases. */
3394 SET_INDEX(pc, caseCount);
3395 pc += INDEX_LEN;
3399 * After this point, all control flow involving JSOP_TABLESWITCH
3400 * must set ok and goto out to exit this function. To keep things
3401 * simple, all switchOp cases exit that way.
3403 MUST_FLOW_THROUGH("out");
3404 if (cg->spanDeps) {
3406 * We have already generated at least one big jump so we must
3407 * explicitly add span dependencies for the switch jumps. When
3408 * called below, js_SetJumpOffset can only do it when patching
3409 * the first big jump or when cg->spanDeps is null.
3411 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3412 goto bad;
3415 if (constPropagated) {
3417 * Skip switchOp, as we are not setting jump offsets in the two
3418 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3419 * unless there was an error.
3421 savepc = CG_NEXT(cg);
3422 CG_NEXT(cg) = pc + 1;
3423 if (switchOp == JSOP_TABLESWITCH) {
3424 for (i = 0; i < (jsint)tableLength; i++) {
3425 pn3 = table[i];
3426 if (pn3 &&
3427 (pn4 = pn3->pn_left) != NULL &&
3428 pn4->pn_type == TOK_NAME) {
3429 /* Note a propagated constant with the const's name. */
3430 JS_ASSERT(!pn4->maybeExpr());
3431 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3432 if (!ale)
3433 goto bad;
3434 CG_NEXT(cg) = pc;
3435 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3436 ALE_INDEX(ale)) < 0) {
3437 goto bad;
3440 pc += JUMP_OFFSET_LEN;
3442 } else {
3443 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3444 pn4 = pn3->pn_left;
3445 if (pn4 && pn4->pn_type == TOK_NAME) {
3446 /* Note a propagated constant with the const's name. */
3447 JS_ASSERT(!pn4->maybeExpr());
3448 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3449 if (!ale)
3450 goto bad;
3451 CG_NEXT(cg) = pc;
3452 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3453 ALE_INDEX(ale)) < 0) {
3454 goto bad;
3457 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3460 CG_NEXT(cg) = savepc;
3464 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3465 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3466 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3467 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3468 pn4 = pn3->pn_right;
3469 ok = js_EmitTree(cx, cg, pn4);
3470 if (!ok)
3471 goto out;
3472 pn3->pn_offset = pn4->pn_offset;
3473 if (pn3->pn_type == TOK_DEFAULT)
3474 off = pn3->pn_offset - top;
3477 if (!hasDefault) {
3478 /* If no default case, offset for default is to end of switch. */
3479 off = CG_OFFSET(cg) - top;
3482 /* We better have set "off" by now. */
3483 JS_ASSERT(off != -1);
3485 /* Set the default offset (to end of switch if no default). */
3486 if (switchOp == JSOP_CONDSWITCH) {
3487 pc = NULL;
3488 JS_ASSERT(defaultOffset != -1);
3489 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3490 off - (defaultOffset - top));
3491 if (!ok)
3492 goto out;
3493 } else {
3494 pc = CG_CODE(cg, top);
3495 ok = js_SetJumpOffset(cx, cg, pc, off);
3496 if (!ok)
3497 goto out;
3498 pc += JUMP_OFFSET_LEN;
3501 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3502 off = CG_OFFSET(cg) - top;
3503 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3504 if (!ok)
3505 goto out;
3507 if (switchOp == JSOP_TABLESWITCH) {
3508 /* Skip over the already-initialized switch bounds. */
3509 pc += 2 * JUMP_OFFSET_LEN;
3511 /* Fill in the jump table, if there is one. */
3512 for (i = 0; i < (jsint)tableLength; i++) {
3513 pn3 = table[i];
3514 off = pn3 ? pn3->pn_offset - top : 0;
3515 ok = js_SetJumpOffset(cx, cg, pc, off);
3516 if (!ok)
3517 goto out;
3518 pc += JUMP_OFFSET_LEN;
3520 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3521 /* Skip over the already-initialized number of cases. */
3522 pc += INDEX_LEN;
3524 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3525 if (pn3->pn_type == TOK_DEFAULT)
3526 continue;
3527 if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom))
3528 goto bad;
3529 ale = cg->atomList.add(cg->parser, atom);
3530 if (!ale)
3531 goto bad;
3532 SET_INDEX(pc, ALE_INDEX(ale));
3533 pc += INDEX_LEN;
3535 off = pn3->pn_offset - top;
3536 ok = js_SetJumpOffset(cx, cg, pc, off);
3537 if (!ok)
3538 goto out;
3539 pc += JUMP_OFFSET_LEN;
3543 out:
3544 if (table)
3545 cx->free(table);
3546 if (ok) {
3547 ok = js_PopStatementCG(cx, cg);
3549 #if JS_HAS_BLOCK_SCOPE
3550 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3551 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3552 #endif
3554 return ok;
3556 bad:
3557 ok = JS_FALSE;
3558 goto out;
3561 JSBool
3562 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3564 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3565 /* JSOP_GENERATOR must be the first instruction. */
3566 CG_SWITCH_TO_PROLOG(cg);
3567 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3568 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3569 return false;
3570 CG_SWITCH_TO_MAIN(cg);
3571 } else {
3573 * Emit a trace hint opcode only if not in a generator, since generators
3574 * are not yet traced and both want to be the first instruction.
3576 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
3577 return false;
3580 if (cg->flags & TCF_FUN_UNBRAND_THIS) {
3581 if (js_Emit1(cx, cg, JSOP_UNBRANDTHIS) < 0)
3582 return false;
3585 return js_EmitTree(cx, cg, body) &&
3586 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3587 js_NewScriptFromCG(cx, cg);
3590 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3591 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3592 JS_BEGIN_MACRO \
3593 uintN line_ = (line); \
3594 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3595 if (delta_ != 0) { \
3596 /* \
3597 * Encode any change in the current source line number by using \
3598 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3599 * whichever consumes less space. \
3601 * NB: We handle backward line number deltas (possible with for \
3602 * loops where the update part is emitted after the body, but its \
3603 * line number is <= any line number in the body) here by letting \
3604 * unsigned delta_ wrap to a very large number, which triggers a \
3605 * SRC_SETLINE. \
3606 */ \
3607 CG_CURRENT_LINE(cg) = line_; \
3608 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3609 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3610 return JS_FALSE; \
3611 } else { \
3612 do { \
3613 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3614 return JS_FALSE; \
3615 } while (--delta_ != 0); \
3618 JS_END_MACRO
3620 /* A function, so that we avoid macro-bloating all the other callsites. */
3621 static JSBool
3622 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3624 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3625 return JS_TRUE;
3628 static JSBool
3629 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3630 JSParseNode *pn, jsatomid *result)
3632 jsatomid atomIndex;
3633 JSAtomListElement *ale;
3635 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
3636 atomIndex = (jsatomid) UPVAR_FRAME_SLOT(pn->pn_cookie);
3637 } else {
3638 ale = cg->atomList.add(cg->parser, pn->pn_atom);
3639 if (!ale)
3640 return JS_FALSE;
3641 atomIndex = ALE_INDEX(ale);
3644 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3645 (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
3646 CG_SWITCH_TO_PROLOG(cg);
3647 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3648 return JS_FALSE;
3649 EMIT_INDEX_OP(prologOp, atomIndex);
3650 CG_SWITCH_TO_MAIN(cg);
3653 if (result)
3654 *result = atomIndex;
3655 return JS_TRUE;
3658 #if JS_HAS_DESTRUCTURING
3660 typedef JSBool
3661 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3662 JSParseNode *pn);
3664 static JSBool
3665 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3666 JSParseNode *pn)
3668 JS_ASSERT(pn->pn_type == TOK_NAME);
3669 if (!BindNameToSlot(cx, cg, pn))
3670 return JS_FALSE;
3672 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3673 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3676 static JSBool
3677 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3678 JSParseNode *pn)
3680 JSParseNode *pn2, *pn3;
3681 DestructuringDeclEmitter emitter;
3683 if (pn->pn_type == TOK_RB) {
3684 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3685 if (pn2->pn_type == TOK_COMMA)
3686 continue;
3687 emitter = (pn2->pn_type == TOK_NAME)
3688 ? EmitDestructuringDecl
3689 : EmitDestructuringDecls;
3690 if (!emitter(cx, cg, prologOp, pn2))
3691 return JS_FALSE;
3693 } else {
3694 JS_ASSERT(pn->pn_type == TOK_RC);
3695 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3696 pn3 = pn2->pn_right;
3697 emitter = (pn3->pn_type == TOK_NAME)
3698 ? EmitDestructuringDecl
3699 : EmitDestructuringDecls;
3700 if (!emitter(cx, cg, prologOp, pn3))
3701 return JS_FALSE;
3704 return JS_TRUE;
3707 static JSBool
3708 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3710 static JSBool
3711 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3713 jsuint slot;
3716 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3717 * destructuring initialiser-form, call ourselves to handle it, then
3718 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3719 * ending with a JSOP_ENUMELEM or equivalent op.
3721 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3722 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3723 return JS_FALSE;
3724 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3725 return JS_FALSE;
3726 } else {
3727 if (pn->pn_type == TOK_NAME) {
3728 if (!BindNameToSlot(cx, cg, pn))
3729 return JS_FALSE;
3730 if (pn->isConst() && !pn->isInitialized())
3731 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3734 switch (pn->pn_op) {
3735 case JSOP_SETNAME:
3737 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3738 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3739 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3741 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3742 return JS_FALSE;
3743 break;
3745 case JSOP_SETCONST:
3746 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3747 return JS_FALSE;
3748 break;
3750 case JSOP_SETLOCAL:
3751 slot = (jsuint) pn->pn_cookie;
3752 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3753 break;
3755 case JSOP_SETARG:
3756 case JSOP_SETGVAR:
3757 slot = (jsuint) pn->pn_cookie;
3758 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3759 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3760 return JS_FALSE;
3761 break;
3763 default:
3765 ptrdiff_t top;
3767 top = CG_OFFSET(cg);
3768 if (!js_EmitTree(cx, cg, pn))
3769 return JS_FALSE;
3770 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3771 return JS_FALSE;
3772 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3773 return JS_FALSE;
3774 break;
3777 case JSOP_ENUMELEM:
3778 JS_ASSERT(0);
3782 return JS_TRUE;
3786 * Recursive helper for EmitDestructuringOps.
3788 * Given a value to destructure on the stack, walk over an object or array
3789 * initialiser at pn, emitting bytecodes to match property values and store
3790 * them in the lvalues identified by the matched property names.
3792 static JSBool
3793 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3795 jsuint index;
3796 JSParseNode *pn2, *pn3;
3797 JSBool doElemOp;
3799 #ifdef DEBUG
3800 intN stackDepth = cg->stackDepth;
3801 JS_ASSERT(stackDepth != 0);
3802 JS_ASSERT(pn->pn_arity == PN_LIST);
3803 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3804 #endif
3806 if (pn->pn_count == 0) {
3807 /* Emit a DUP;POP sequence for the decompiler. */
3808 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3809 js_Emit1(cx, cg, JSOP_POP) >= 0;
3812 index = 0;
3813 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3815 * Duplicate the value being destructured to use as a reference base.
3816 * If dup is not the first one, annotate it for the decompiler.
3818 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3819 return JS_FALSE;
3820 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3821 return JS_FALSE;
3824 * Now push the property name currently being matched, which is either
3825 * the array initialiser's current index, or the current property name
3826 * "label" on the left of a colon in the object initialiser. Set pn3
3827 * to the lvalue node, which is in the value-initializing position.
3829 doElemOp = JS_TRUE;
3830 if (pn->pn_type == TOK_RB) {
3831 if (!EmitNumberOp(cx, index, cg))
3832 return JS_FALSE;
3833 pn3 = pn2;
3834 } else {
3835 JS_ASSERT(pn->pn_type == TOK_RC);
3836 JS_ASSERT(pn2->pn_type == TOK_COLON);
3837 pn3 = pn2->pn_left;
3838 if (pn3->pn_type == TOK_NUMBER) {
3840 * If we are emitting an object destructuring initialiser,
3841 * annotate the index op with SRC_INITPROP so we know we are
3842 * not decompiling an array initialiser.
3844 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3845 return JS_FALSE;
3846 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3847 return JS_FALSE;
3848 } else {
3849 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3850 pn3->pn_type == TOK_NAME);
3851 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3852 return JS_FALSE;
3853 doElemOp = JS_FALSE;
3855 pn3 = pn2->pn_right;
3858 if (doElemOp) {
3860 * Ok, get the value of the matching property name. This leaves
3861 * that value on top of the value being destructured, so the stack
3862 * is one deeper than when we started.
3864 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3865 return JS_FALSE;
3866 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3869 /* Nullary comma node makes a hole in the array destructurer. */
3870 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3871 JS_ASSERT(pn->pn_type == TOK_RB);
3872 JS_ASSERT(pn2 == pn3);
3873 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3874 return JS_FALSE;
3875 } else {
3876 if (!EmitDestructuringLHS(cx, cg, pn3))
3877 return JS_FALSE;
3880 JS_ASSERT(cg->stackDepth == stackDepth);
3881 ++index;
3884 return JS_TRUE;
3887 static ptrdiff_t
3888 OpToDeclType(JSOp op)
3890 switch (op) {
3891 case JSOP_NOP:
3892 return SRC_DECL_LET;
3893 case JSOP_DEFCONST:
3894 return SRC_DECL_CONST;
3895 case JSOP_DEFVAR:
3896 return SRC_DECL_VAR;
3897 default:
3898 return SRC_DECL_NONE;
3902 static JSBool
3903 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3904 JSParseNode *pn)
3907 * If we're called from a variable declaration, help the decompiler by
3908 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3909 * If the destructuring initialiser is empty, our helper will emit a
3910 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3912 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
3913 return JS_FALSE;
3916 * Call our recursive helper to emit the destructuring assignments and
3917 * related stack manipulations.
3919 return EmitDestructuringOpsHelper(cx, cg, pn);
3922 static JSBool
3923 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3924 JSParseNode *lhs, JSParseNode *rhs)
3926 jsuint depth, limit, i, nslots;
3927 JSParseNode *pn;
3929 depth = limit = (uintN) cg->stackDepth;
3930 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3931 if (limit == JS_BIT(16)) {
3932 ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
3933 return JS_FALSE;
3936 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3937 JS_ASSERT(!(pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY));
3938 if (!js_EmitTree(cx, cg, pn))
3939 return JS_FALSE;
3940 ++limit;
3943 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
3944 return JS_FALSE;
3946 i = depth;
3947 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
3948 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3949 JS_ASSERT(i < limit);
3950 jsint slot = AdjustBlockSlot(cx, cg, i);
3951 if (slot < 0)
3952 return JS_FALSE;
3953 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3955 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3956 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3957 return JS_FALSE;
3958 } else {
3959 if (!EmitDestructuringLHS(cx, cg, pn))
3960 return JS_FALSE;
3964 nslots = limit - depth;
3965 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
3966 cg->stackDepth = (uintN) depth;
3967 return JS_TRUE;
3971 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3972 * can emit a group assignment sequence, which results in 0 stack depth delta,
3973 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3975 static JSBool
3976 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3977 JSParseNode *pn, JSOp *pop)
3979 JSParseNode *lhs, *rhs;
3981 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3982 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3983 lhs = pn->pn_left;
3984 rhs = pn->pn_right;
3985 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3986 !(rhs->pn_xflags & PNX_HOLEY) &&
3987 lhs->pn_count <= rhs->pn_count) {
3988 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
3989 return JS_FALSE;
3990 *pop = JSOP_NOP;
3992 return JS_TRUE;
3995 #endif /* JS_HAS_DESTRUCTURING */
3997 static JSBool
3998 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3999 JSBool inLetHead, ptrdiff_t *headNoteIndex)
4001 bool let, forInVar, first;
4002 #if JS_HAS_BLOCK_SCOPE
4003 bool forInLet, popScope;
4004 JSStmtInfo *stmt, *scopeStmt;
4005 #endif
4006 ptrdiff_t off, noteIndex, tmp;
4007 JSParseNode *pn2, *pn3, *next;
4008 JSOp op;
4009 jsatomid atomIndex;
4010 uintN oldflags;
4012 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
4013 *headNoteIndex = -1;
4016 * Let blocks and expressions have a parenthesized head in which the new
4017 * scope is not yet open. Initializer evaluation uses the parent node's
4018 * lexical scope. If popScope is true below, then we hide the top lexical
4019 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
4020 * it won't find any names in the new let block.
4022 * The same goes for let declarations in the head of any kind of for loop.
4023 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
4024 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
4025 * in the containing scope, and puts x in the loop body's scope.
4027 let = (pn->pn_op == JSOP_NOP);
4028 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
4029 #if JS_HAS_BLOCK_SCOPE
4030 forInLet = let && forInVar;
4031 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
4032 if (popScope) {
4033 stmt = cg->topStmt;
4034 scopeStmt = cg->topScopeStmt;
4036 # ifdef __GNUC__
4037 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
4038 # endif
4039 JS_ASSERT(!popScope || let);
4040 #endif
4042 off = noteIndex = -1;
4043 for (pn2 = pn->pn_head; ; pn2 = next) {
4044 first = pn2 == pn->pn_head;
4045 next = pn2->pn_next;
4047 if (pn2->pn_type != TOK_NAME) {
4048 #if JS_HAS_DESTRUCTURING
4049 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
4051 * Emit variable binding ops, but not destructuring ops.
4052 * The parser (see Variables, jsparse.c) has ensured that
4053 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4054 * and that case will emit the destructuring code only after
4055 * emitting an enumerating opcode and a branch that tests
4056 * whether the enumeration ended.
4058 JS_ASSERT(forInVar);
4059 JS_ASSERT(pn->pn_count == 1);
4060 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
4061 return JS_FALSE;
4062 break;
4064 #endif
4067 * A destructuring initialiser assignment preceded by var will
4068 * never occur to the left of 'in' in a for-in loop. As with 'for
4069 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4070 * i' to be hoisted out of the loop.
4072 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
4073 JS_ASSERT(!forInVar);
4076 * To allow the front end to rewrite var f = x; as f = x; when a
4077 * function f(){} precedes the var, detect simple name assignment
4078 * here and initialize the name.
4080 #if !JS_HAS_DESTRUCTURING
4081 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4082 #else
4083 if (pn2->pn_left->pn_type == TOK_NAME)
4084 #endif
4086 pn3 = pn2->pn_right;
4087 pn2 = pn2->pn_left;
4088 goto do_name;
4091 #if JS_HAS_DESTRUCTURING
4092 if (pn->pn_count == 1) {
4094 * If this is the only destructuring assignment in the list,
4095 * try to optimize to a group assignment. If we're in a let
4096 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4097 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4099 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4100 op = JSOP_POP;
4101 if (!MaybeEmitGroupAssignment(cx, cg,
4102 inLetHead ? JSOP_POP : PN_OP(pn),
4103 pn2, &op)) {
4104 return JS_FALSE;
4106 if (op == JSOP_NOP) {
4107 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4108 break;
4112 pn3 = pn2->pn_left;
4113 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4114 return JS_FALSE;
4116 if (!js_EmitTree(cx, cg, pn2->pn_right))
4117 return JS_FALSE;
4120 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4121 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4122 * we will emit at the bottom of this function.
4124 if (!EmitDestructuringOps(cx, cg,
4125 inLetHead ? JSOP_POP : PN_OP(pn),
4126 pn3)) {
4127 return JS_FALSE;
4129 goto emit_note_pop;
4130 #endif
4134 * Load initializer early to share code above that jumps to do_name.
4135 * NB: if this var redeclares an existing binding, then pn2 is linked
4136 * on its definition's use-chain and pn_expr has been overlayed with
4137 * pn_lexdef.
4139 pn3 = pn2->maybeExpr();
4141 do_name:
4142 if (!BindNameToSlot(cx, cg, pn2))
4143 return JS_FALSE;
4145 op = PN_OP(pn2);
4146 if (op == JSOP_ARGUMENTS) {
4147 /* JSOP_ARGUMENTS => no initializer */
4148 JS_ASSERT(!pn3 && !let);
4149 pn3 = NULL;
4150 #ifdef __GNUC__
4151 atomIndex = 0; /* quell GCC overwarning */
4152 #endif
4153 } else {
4154 JS_ASSERT(op != JSOP_CALLEE);
4155 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE || !let);
4156 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4157 return JS_FALSE;
4159 if (pn3) {
4160 JS_ASSERT(!forInVar);
4161 if (op == JSOP_SETNAME) {
4162 JS_ASSERT(!let);
4163 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4165 if (pn->pn_op == JSOP_DEFCONST &&
4166 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4167 return JS_FALSE;
4170 #if JS_HAS_BLOCK_SCOPE
4171 /* Evaluate expr in the outer lexical scope if requested. */
4172 if (popScope) {
4173 cg->topStmt = stmt->down;
4174 cg->topScopeStmt = scopeStmt->downScope;
4176 #endif
4178 oldflags = cg->flags;
4179 cg->flags &= ~TCF_IN_FOR_INIT;
4180 if (!js_EmitTree(cx, cg, pn3))
4181 return JS_FALSE;
4182 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4184 #if JS_HAS_BLOCK_SCOPE
4185 if (popScope) {
4186 cg->topStmt = stmt;
4187 cg->topScopeStmt = scopeStmt;
4189 #endif
4194 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4195 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4196 * a TOK_SEQ node to make the two statements appear as one. Therefore
4197 * if this declaration is part of a for-in loop head, we do not need to
4198 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4199 * js_EmitTree, will annotate appropriately.
4201 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4202 if (forInVar) {
4203 JS_ASSERT(pn->pn_count == 1);
4204 JS_ASSERT(!pn3);
4205 break;
4208 if (first &&
4209 !inLetHead &&
4210 js_NewSrcNote2(cx, cg, SRC_DECL,
4211 (pn->pn_op == JSOP_DEFCONST)
4212 ? SRC_DECL_CONST
4213 : (pn->pn_op == JSOP_DEFVAR)
4214 ? SRC_DECL_VAR
4215 : SRC_DECL_LET) < 0) {
4216 return JS_FALSE;
4218 if (op == JSOP_ARGUMENTS) {
4219 if (js_Emit1(cx, cg, op) < 0)
4220 return JS_FALSE;
4221 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
4222 EMIT_UINT16_IMM_OP(op, atomIndex);
4223 } else {
4224 EMIT_INDEX_OP(op, atomIndex);
4227 #if JS_HAS_DESTRUCTURING
4228 emit_note_pop:
4229 #endif
4230 tmp = CG_OFFSET(cg);
4231 if (noteIndex >= 0) {
4232 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4233 return JS_FALSE;
4235 if (!next)
4236 break;
4237 off = tmp;
4238 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4239 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4240 return JS_FALSE;
4243 /* If this is a let head, emit and return a srcnote on the pop. */
4244 if (inLetHead) {
4245 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4246 if (*headNoteIndex < 0)
4247 return JS_FALSE;
4248 if (!(pn->pn_xflags & PNX_POPVAR))
4249 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4252 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4255 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4256 static JSBool
4257 GettableNoteForNextOp(JSCodeGenerator *cg)
4259 ptrdiff_t offset, target;
4260 jssrcnote *sn, *end;
4262 offset = 0;
4263 target = CG_OFFSET(cg);
4264 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4265 sn = SN_NEXT(sn)) {
4266 if (offset == target && SN_IS_GETTABLE(sn))
4267 return JS_TRUE;
4268 offset += SN_DELTA(sn);
4270 return JS_FALSE;
4272 #endif
4274 /* Top-level named functions need a nop for decompilation. */
4275 static JSBool
4276 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4278 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4279 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4282 static bool
4283 EmitNewInit(JSContext *cx, JSCodeGenerator *cg, JSProtoKey key, JSParseNode *pn, int sharpnum)
4285 if (js_Emit2(cx, cg, JSOP_NEWINIT, (jsbytecode) key) < 0)
4286 return false;
4287 #if JS_HAS_SHARP_VARS
4288 if (cg->hasSharps()) {
4289 if (pn->pn_count != 0)
4290 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4291 if (sharpnum >= 0)
4292 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
4293 } else {
4294 JS_ASSERT(sharpnum < 0);
4296 #endif
4297 return true;
4300 static bool
4301 EmitEndInit(JSContext *cx, JSCodeGenerator *cg, uint32 count)
4303 #if JS_HAS_SHARP_VARS
4304 /* Emit an op for sharp array cleanup and decompilation. */
4305 if (cg->hasSharps() && count != 0)
4306 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4307 #endif
4308 return js_Emit1(cx, cg, JSOP_ENDINIT) >= 0;
4311 /* See the SRC_FOR source note offsetBias comments later in this file. */
4312 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4313 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4315 JSBool
4316 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4318 JSBool ok, useful, wantval;
4319 JSStmtInfo *stmt, stmtInfo;
4320 ptrdiff_t top, off, tmp, beq, jmp;
4321 JSParseNode *pn2, *pn3;
4322 JSAtom *atom;
4323 JSAtomListElement *ale;
4324 jsatomid atomIndex;
4325 uintN index;
4326 ptrdiff_t noteIndex;
4327 JSSrcNoteType noteType;
4328 jsbytecode *pc;
4329 JSOp op;
4330 TokenKind type;
4331 uint32 argc;
4332 #if JS_HAS_SHARP_VARS
4333 jsint sharpnum;
4334 #endif
4336 JS_CHECK_RECURSION(cx, return JS_FALSE);
4338 ok = JS_TRUE;
4339 cg->emitLevel++;
4340 pn->pn_offset = top = CG_OFFSET(cg);
4342 /* Emit notes to tell the current bytecode's source line number. */
4343 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4345 switch (pn->pn_type) {
4346 case TOK_FUNCTION:
4348 JSFunction *fun;
4349 uintN slot;
4351 #if JS_HAS_XML_SUPPORT
4352 if (pn->pn_arity == PN_NULLARY) {
4353 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4354 return JS_FALSE;
4355 break;
4357 #endif
4359 fun = (JSFunction *) pn->pn_funbox->object;
4360 JS_ASSERT(FUN_INTERPRETED(fun));
4361 if (fun->u.i.script) {
4363 * This second pass is needed to emit JSOP_NOP with a source note
4364 * for the already-emitted function definition prolog opcode. See
4365 * comments in the TOK_LC case.
4367 JS_ASSERT(pn->pn_op == JSOP_NOP);
4368 JS_ASSERT(cg->inFunction());
4369 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4370 return JS_FALSE;
4371 break;
4374 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
4375 pn->pn_defn ||
4376 (!pn->pn_used && !pn->isTopLevel()) ||
4377 (fun->flags & JSFUN_LAMBDA));
4379 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4380 FUN_KIND(fun) == JSFUN_INTERPRETED);
4382 /* Generate code for the function's body. */
4383 void *cg2mark = JS_ARENA_MARK(cg->codePool);
4384 void *cg2space;
4385 JS_ARENA_ALLOCATE_TYPE(cg2space, JSCodeGenerator, cg->codePool);
4386 if (!cg2space) {
4387 js_ReportOutOfScriptQuota(cx);
4388 return JS_FALSE;
4390 JSCodeGenerator *cg2 =
4391 new (cg2space) JSCodeGenerator(cg->parser,
4392 cg->codePool, cg->notePool,
4393 pn->pn_pos.begin.lineno);
4395 if (!cg2->init())
4396 return JS_FALSE;
4398 cg2->flags = pn->pn_funbox->tcflags | TCF_IN_FUNCTION;
4399 #if JS_HAS_SHARP_VARS
4400 if (cg2->flags & TCF_HAS_SHARPS) {
4401 cg2->sharpSlotBase = fun->sharpSlotBase(cx);
4402 if (cg2->sharpSlotBase < 0)
4403 return JS_FALSE;
4405 #endif
4406 cg2->fun = fun;
4407 cg2->funbox = pn->pn_funbox;
4408 cg2->parent = cg;
4411 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4412 * bits and to reserve the all-ones value, thereby reserving the magic
4413 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4415 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4416 cg2->staticLevel = cg->staticLevel + 1;
4418 /* We measured the max scope depth when we parsed the function. */
4419 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = uint16(-1));
4420 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4421 pn = NULL;
4423 cg2->~JSCodeGenerator();
4424 JS_ARENA_RELEASE(cg->codePool, cg2mark);
4425 cg2 = NULL;
4426 if (!pn)
4427 return JS_FALSE;
4429 /* Make the function object a literal in the outer script's pool. */
4430 index = cg->objectList.index(pn->pn_funbox);
4432 /* Emit a bytecode pointing to the closure object in its immediate. */
4433 op = PN_OP(pn);
4434 if (op != JSOP_NOP) {
4435 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4436 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4437 return JS_FALSE;
4439 EMIT_INDEX_OP(op, index);
4440 break;
4444 * For a script we emit the code as we parse. Thus the bytecode for
4445 * top-level functions should go in the prolog to predefine their
4446 * names in the variable object before the already-generated main code
4447 * is executed. This extra work for top-level scripts is not necessary
4448 * when we emit the code for a function. It is fully parsed prior to
4449 * invocation of the emitter and calls to js_EmitTree for function
4450 * definitions can be scheduled before generating the rest of code.
4452 if (!cg->inFunction()) {
4453 JS_ASSERT(!cg->topStmt);
4454 CG_SWITCH_TO_PROLOG(cg);
4455 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4456 EMIT_INDEX_OP(op, index);
4457 CG_SWITCH_TO_MAIN(cg);
4459 /* Emit NOP for the decompiler. */
4460 if (!EmitFunctionDefNop(cx, cg, index))
4461 return JS_FALSE;
4462 } else {
4463 #ifdef DEBUG
4464 JSLocalKind localKind =
4465 #endif
4466 js_LookupLocal(cx, cg->fun, fun->atom, &slot);
4467 JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
4468 JS_ASSERT(index < JS_BIT(20));
4469 pn->pn_index = index;
4470 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4471 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4472 return JS_FALSE;
4474 break;
4477 case TOK_ARGSBODY:
4478 ok = js_EmitTree(cx, cg, pn->last());
4479 break;
4481 case TOK_UPVARS:
4482 JS_ASSERT(cg->lexdeps.count == 0);
4483 JS_ASSERT(pn->pn_names.count != 0);
4484 cg->lexdeps = pn->pn_names;
4485 ok = js_EmitTree(cx, cg, pn->pn_tree);
4486 break;
4488 case TOK_IF:
4489 /* Initialize so we can detect else-if chains and avoid recursion. */
4490 stmtInfo.type = STMT_IF;
4491 beq = jmp = -1;
4492 noteIndex = -1;
4494 if_again:
4495 /* Emit code for the condition before pushing stmtInfo. */
4496 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4497 return JS_FALSE;
4498 top = CG_OFFSET(cg);
4499 if (stmtInfo.type == STMT_IF) {
4500 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4501 } else {
4503 * We came here from the goto further below that detects else-if
4504 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4505 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4506 * to help the decompiler. Actually, we need two offsets, one for
4507 * decompiling any else clause and the second for decompiling an
4508 * else-if chain without bracing, overindenting, or incorrectly
4509 * scoping let declarations.
4511 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4512 stmtInfo.type = STMT_IF;
4513 stmtInfo.update = top;
4514 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4515 return JS_FALSE;
4516 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - beq))
4517 return JS_FALSE;
4520 /* Emit an annotated branch-if-false around the then part. */
4521 pn3 = pn->pn_kid3;
4522 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4523 if (noteIndex < 0)
4524 return JS_FALSE;
4525 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4526 if (beq < 0)
4527 return JS_FALSE;
4529 /* Emit code for the then and optional else parts. */
4530 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4531 return JS_FALSE;
4532 if (pn3) {
4533 /* Modify stmtInfo so we know we're in the else part. */
4534 stmtInfo.type = STMT_ELSE;
4537 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4538 * around the else part. The js_PopStatementCG call at the bottom
4539 * of this switch case will fix up the backpatch chain linked from
4540 * stmtInfo.breaks.
4542 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4543 if (jmp < 0)
4544 return JS_FALSE;
4546 /* Ensure the branch-if-false comes here, then emit the else. */
4547 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4548 if (pn3->pn_type == TOK_IF) {
4549 pn = pn3;
4550 goto if_again;
4553 if (!js_EmitTree(cx, cg, pn3))
4554 return JS_FALSE;
4557 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4558 * the decompiler's benefit. We can't just "back up" from the pc
4559 * of the else clause, because we don't know whether an extended
4560 * jump was required to leap from the end of the then clause over
4561 * the else clause.
4563 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4564 return JS_FALSE;
4565 } else {
4566 /* No else part, fixup the branch-if-false to come here. */
4567 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4569 ok = js_PopStatementCG(cx, cg);
4570 break;
4572 case TOK_SWITCH:
4573 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4574 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4575 break;
4577 case TOK_WHILE:
4579 * Minimize bytecodes issued for one or more iterations by jumping to
4580 * the condition below the body and closing the loop if the condition
4581 * is true with a backward branch. For iteration count i:
4583 * i test at the top test at the bottom
4584 * = =============== ==================
4585 * 0 ifeq-pass goto; ifne-fail
4586 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4587 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4588 * . . .
4589 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4591 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4592 * test at the top. When JSParseNode trees were added during the ES3
4593 * work (1998-9), the code generation scheme was not optimized, and
4594 * the decompiler continued to take advantage of the branch and jump
4595 * that bracketed the body. But given the SRC_WHILE note, it is easy
4596 * to support the more efficient scheme.
4598 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4599 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4600 if (noteIndex < 0)
4601 return JS_FALSE;
4602 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4603 if (jmp < 0)
4604 return JS_FALSE;
4605 top = js_Emit1(cx, cg, JSOP_TRACE);
4606 if (top < 0)
4607 return JS_FALSE;
4608 if (!js_EmitTree(cx, cg, pn->pn_right))
4609 return JS_FALSE;
4610 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4611 if (!js_EmitTree(cx, cg, pn->pn_left))
4612 return JS_FALSE;
4613 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4614 if (beq < 0)
4615 return JS_FALSE;
4616 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4617 return JS_FALSE;
4618 ok = js_PopStatementCG(cx, cg);
4619 break;
4621 case TOK_DO:
4622 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4623 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4624 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4625 return JS_FALSE;
4627 /* Compile the loop body. */
4628 top = js_Emit1(cx, cg, JSOP_TRACE);
4629 if (top < 0)
4630 return JS_FALSE;
4631 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4632 if (!js_EmitTree(cx, cg, pn->pn_left))
4633 return JS_FALSE;
4635 /* Set loop and enclosing label update offsets, for continue. */
4636 stmt = &stmtInfo;
4637 do {
4638 stmt->update = CG_OFFSET(cg);
4639 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4641 /* Compile the loop condition, now that continues know where to go. */
4642 if (!js_EmitTree(cx, cg, pn->pn_right))
4643 return JS_FALSE;
4646 * Since we use JSOP_IFNE for other purposes as well as for do-while
4647 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4648 * and the decompiler must get that delta and decompile recursively.
4650 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4651 if (beq < 0)
4652 return JS_FALSE;
4653 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4654 return JS_FALSE;
4655 ok = js_PopStatementCG(cx, cg);
4656 break;
4658 case TOK_FOR:
4659 beq = 0; /* suppress gcc warnings */
4660 jmp = -1;
4661 pn2 = pn->pn_left;
4662 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4664 if (pn2->pn_type == TOK_IN) {
4665 /* Set stmtInfo type for later testing. */
4666 stmtInfo.type = STMT_FOR_IN_LOOP;
4669 * If the left part is 'var x', emit code to define x if necessary
4670 * using a prolog opcode, but do not emit a pop. If the left part
4671 * is 'var x = i', emit prolog code to define x if necessary; then
4672 * emit code to evaluate i, assign the result to x, and pop the
4673 * result off the stack.
4675 * All the logic to do this is implemented in the outer switch's
4676 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4678 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4679 * called here will generate the proper note for the assignment
4680 * op that sets x = i, hoisting the initialized var declaration
4681 * out of the loop: 'var x = i; for (x in o) ...'.
4683 * In the 'for (var x in o) ...' case, nothing but the prolog op
4684 * (if needed) should be generated here, we must emit the note
4685 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4686 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4688 * A 'for (let x = i in o)' loop must not be hoisted, since in
4689 * this form the let variable is scoped by the loop body (but not
4690 * the head). The initializer expression i must be evaluated for
4691 * any side effects. So we hoist only i in the let case.
4693 pn3 = pn2->pn_left;
4694 type = PN_TYPE(pn3);
4695 cg->flags |= TCF_IN_FOR_INIT;
4696 if (TokenKindIsDecl(type) && !js_EmitTree(cx, cg, pn3))
4697 return JS_FALSE;
4698 cg->flags &= ~TCF_IN_FOR_INIT;
4700 /* Compile the object expression to the right of 'in'. */
4701 if (!js_EmitTree(cx, cg, pn2->pn_right))
4702 return JS_FALSE;
4705 * Emit a bytecode to convert top of stack value to the iterator
4706 * object depending on the loop variant (for-in, for-each-in, or
4707 * destructuring for-in).
4709 JS_ASSERT(pn->pn_op == JSOP_ITER);
4710 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4711 return JS_FALSE;
4713 /* Annotate so the decompiler can find the loop-closing jump. */
4714 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4715 if (noteIndex < 0)
4716 return JS_FALSE;
4719 * Jump down to the loop condition to minimize overhead assuming at
4720 * least one iteration, as the other loop forms do.
4722 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4723 if (jmp < 0)
4724 return JS_FALSE;
4726 top = CG_OFFSET(cg);
4727 SET_STATEMENT_TOP(&stmtInfo, top);
4728 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4729 return JS_FALSE;
4731 #ifdef DEBUG
4732 intN loopDepth = cg->stackDepth;
4733 #endif
4736 * Compile a JSOP_FOR* bytecode based on the left hand side.
4738 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4739 * or similar, to signify assignment, rather than declaration, to
4740 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4741 * parameter and emits the appropriate source note, defaulting to
4742 * assignment, so JSOP_SETNAME is not critical here; many similar
4743 * ops could be used -- just not JSOP_NOP (which means 'let').
4745 op = JSOP_SETNAME;
4746 switch (type) {
4747 #if JS_HAS_BLOCK_SCOPE
4748 case TOK_LET:
4749 #endif
4750 case TOK_VAR:
4751 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4752 pn3 = pn3->pn_head;
4753 #if JS_HAS_DESTRUCTURING
4754 if (pn3->pn_type == TOK_ASSIGN) {
4755 pn3 = pn3->pn_left;
4756 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4758 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4759 op = PN_OP(pn2->pn_left);
4760 goto destructuring_for;
4762 #else
4763 JS_ASSERT(pn3->pn_type == TOK_NAME);
4764 #endif
4765 /* FALL THROUGH */
4767 case TOK_NAME:
4769 * Always annotate JSOP_FORLOCAL if given input of the form
4770 * 'for (let x in * o)' -- the decompiler must not hoist the
4771 * 'let x' out of the loop head, or x will be bound in the
4772 * wrong scope. Likewise, but in this case only for the sake
4773 * of higher decompilation fidelity only, do not hoist 'var x'
4774 * when given 'for (var x in o)'.
4776 if ((
4777 #if JS_HAS_BLOCK_SCOPE
4778 type == TOK_LET ||
4779 #endif
4780 (type == TOK_VAR && !pn3->maybeExpr())) &&
4781 js_NewSrcNote2(cx, cg, SRC_DECL,
4782 (type == TOK_VAR)
4783 ? SRC_DECL_VAR
4784 : SRC_DECL_LET) < 0) {
4785 return JS_FALSE;
4787 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4788 op = PN_OP(pn3);
4789 switch (op) {
4790 case JSOP_GETARG: /* FALL THROUGH */
4791 case JSOP_SETARG: op = JSOP_FORARG; break;
4792 case JSOP_GETGVAR: /* FALL THROUGH */
4793 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4794 case JSOP_GETLOCAL: /* FALL THROUGH */
4795 case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4796 default: JS_ASSERT(0);
4798 } else {
4799 pn3->pn_op = JSOP_FORNAME;
4800 if (!BindNameToSlot(cx, cg, pn3))
4801 return JS_FALSE;
4802 op = PN_OP(pn3);
4804 if (pn3->isConst()) {
4805 ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
4806 JSMSG_BAD_FOR_LEFTSIDE);
4807 return JS_FALSE;
4809 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4810 atomIndex = (jsatomid) pn3->pn_cookie;
4811 EMIT_UINT16_IMM_OP(op, atomIndex);
4812 } else {
4813 if (!EmitAtomOp(cx, pn3, op, cg))
4814 return JS_FALSE;
4816 break;
4818 case TOK_DOT:
4820 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4821 * has no side effects.
4823 useful = JS_FALSE;
4824 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
4825 return JS_FALSE;
4826 if (!useful) {
4827 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
4828 return JS_FALSE;
4829 break;
4831 /* FALL THROUGH */
4833 #if JS_HAS_DESTRUCTURING
4834 destructuring_for:
4835 #endif
4836 default:
4837 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
4838 return JS_FALSE;
4839 JS_ASSERT(cg->stackDepth >= 3);
4841 #if JS_HAS_DESTRUCTURING
4842 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4843 if (!EmitDestructuringOps(cx, cg, op, pn3))
4844 return JS_FALSE;
4845 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4846 return JS_FALSE;
4847 } else
4848 #endif
4849 if (pn3->pn_type == TOK_LP) {
4850 JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4851 if (!js_EmitTree(cx, cg, pn3))
4852 return JS_FALSE;
4853 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4854 return JS_FALSE;
4855 } else
4856 #if JS_HAS_XML_SUPPORT
4857 if (pn3->pn_type == TOK_UNARYOP) {
4858 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4859 if (!js_EmitTree(cx, cg, pn3))
4860 return JS_FALSE;
4861 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4862 return JS_FALSE;
4863 } else
4864 #endif
4865 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4866 return JS_FALSE;
4867 break;
4870 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4871 JS_ASSERT(cg->stackDepth == loopDepth);
4873 /* Set the first srcnote offset so we can find the start of the loop body. */
4874 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - jmp))
4875 return JS_FALSE;
4877 /* Emit code for the loop body. */
4878 if (!js_EmitTree(cx, cg, pn->pn_right))
4879 return JS_FALSE;
4881 /* Set loop and enclosing "update" offsets, for continue. */
4882 stmt = &stmtInfo;
4883 do {
4884 stmt->update = CG_OFFSET(cg);
4885 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4888 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4890 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4891 if (js_Emit1(cx, cg, JSOP_NEXTITER) < 0)
4892 return JS_FALSE;
4893 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4894 if (beq < 0)
4895 return JS_FALSE;
4897 /* Set the second srcnote offset so we can find the closing jump. */
4898 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
4899 return JS_FALSE;
4900 } else {
4901 /* C-style for (init; cond; update) ... loop. */
4902 op = JSOP_POP;
4903 pn3 = pn2->pn_kid1;
4904 if (!pn3) {
4905 /* No initializer: emit an annotated nop for the decompiler. */
4906 op = JSOP_NOP;
4907 } else {
4908 cg->flags |= TCF_IN_FOR_INIT;
4909 #if JS_HAS_DESTRUCTURING
4910 if (pn3->pn_type == TOK_ASSIGN &&
4911 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4912 return JS_FALSE;
4914 #endif
4915 if (op == JSOP_POP) {
4916 if (!js_EmitTree(cx, cg, pn3))
4917 return JS_FALSE;
4918 if (TokenKindIsDecl(PN_TYPE(pn3))) {
4920 * Check whether a destructuring-initialized var decl
4921 * was optimized to a group assignment. If so, we do
4922 * not need to emit a pop below, so switch to a nop,
4923 * just for the decompiler.
4925 JS_ASSERT(pn3->pn_arity == PN_LIST);
4926 if (pn3->pn_xflags & PNX_GROUPINIT)
4927 op = JSOP_NOP;
4930 cg->flags &= ~TCF_IN_FOR_INIT;
4934 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4935 * Use tmp to hold the biased srcnote "top" offset, which differs
4936 * from the top local variable by the length of the JSOP_GOTO{,X}
4937 * emitted in between tmp and top if this loop has a condition.
4939 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4940 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
4941 return JS_FALSE;
4942 tmp = CG_OFFSET(cg);
4944 if (pn2->pn_kid2) {
4945 /* Goto the loop condition, which branches back to iterate. */
4946 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4947 if (jmp < 0)
4948 return JS_FALSE;
4951 top = CG_OFFSET(cg);
4952 SET_STATEMENT_TOP(&stmtInfo, top);
4954 /* Emit code for the loop body. */
4955 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4956 return JS_FALSE;
4957 if (!js_EmitTree(cx, cg, pn->pn_right))
4958 return JS_FALSE;
4960 /* Set the second note offset so we can find the update part. */
4961 JS_ASSERT(noteIndex != -1);
4962 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4963 CG_OFFSET(cg) - tmp)) {
4964 return JS_FALSE;
4967 /* Set loop and enclosing "update" offsets, for continue. */
4968 stmt = &stmtInfo;
4969 do {
4970 stmt->update = CG_OFFSET(cg);
4971 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4973 /* Check for update code to do before the condition (if any). */
4974 pn3 = pn2->pn_kid3;
4975 if (pn3) {
4976 op = JSOP_POP;
4977 #if JS_HAS_DESTRUCTURING
4978 if (pn3->pn_type == TOK_ASSIGN &&
4979 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4980 return JS_FALSE;
4982 #endif
4983 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
4984 return JS_FALSE;
4986 /* Always emit the POP or NOP, to help the decompiler. */
4987 if (js_Emit1(cx, cg, op) < 0)
4988 return JS_FALSE;
4990 /* Restore the absolute line number for source note readers. */
4991 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4992 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4993 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4994 return JS_FALSE;
4995 CG_CURRENT_LINE(cg) = (uintN) off;
4999 /* Set the first note offset so we can find the loop condition. */
5000 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5001 CG_OFFSET(cg) - tmp)) {
5002 return JS_FALSE;
5005 if (pn2->pn_kid2) {
5006 /* Fix up the goto from top to target the loop condition. */
5007 JS_ASSERT(jmp >= 0);
5008 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5010 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
5011 return JS_FALSE;
5014 /* The third note offset helps us find the loop-closing jump. */
5015 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
5016 CG_OFFSET(cg) - tmp)) {
5017 return JS_FALSE;
5020 if (pn2->pn_kid2) {
5021 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
5022 if (beq < 0)
5023 return JS_FALSE;
5024 } else {
5025 /* No loop condition -- emit the loop-closing jump. */
5026 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
5027 if (jmp < 0)
5028 return JS_FALSE;
5032 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5033 if (!js_PopStatementCG(cx, cg))
5034 return JS_FALSE;
5036 if (pn2->pn_type == TOK_IN) {
5038 * JSOP_ENDITER must have a slot to save an exception thrown from
5039 * the body of for-in loop when closing the iterator object, and
5040 * fortunately it does: the slot that was set by JSOP_NEXTITER to
5041 * the return value of iterator.next().
5043 JS_ASSERT(js_CodeSpec[JSOP_ENDITER].nuses == 2);
5044 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
5045 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
5046 return JS_FALSE;
5049 break;
5051 case TOK_BREAK:
5052 stmt = cg->topStmt;
5053 atom = pn->pn_atom;
5054 if (atom) {
5055 ale = cg->atomList.add(cg->parser, atom);
5056 if (!ale)
5057 return JS_FALSE;
5058 while (stmt->type != STMT_LABEL || stmt->label != atom)
5059 stmt = stmt->down;
5060 noteType = SRC_BREAK2LABEL;
5061 } else {
5062 ale = NULL;
5063 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
5064 stmt = stmt->down;
5065 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
5068 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
5069 return JS_FALSE;
5070 break;
5072 case TOK_CONTINUE:
5073 stmt = cg->topStmt;
5074 atom = pn->pn_atom;
5075 if (atom) {
5076 /* Find the loop statement enclosed by the matching label. */
5077 JSStmtInfo *loop = NULL;
5078 ale = cg->atomList.add(cg->parser, atom);
5079 if (!ale)
5080 return JS_FALSE;
5081 while (stmt->type != STMT_LABEL || stmt->label != atom) {
5082 if (STMT_IS_LOOP(stmt))
5083 loop = stmt;
5084 stmt = stmt->down;
5086 stmt = loop;
5087 noteType = SRC_CONT2LABEL;
5088 } else {
5089 ale = NULL;
5090 while (!STMT_IS_LOOP(stmt))
5091 stmt = stmt->down;
5092 noteType = SRC_CONTINUE;
5095 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
5096 return JS_FALSE;
5097 break;
5099 case TOK_WITH:
5100 if (!js_EmitTree(cx, cg, pn->pn_left))
5101 return JS_FALSE;
5102 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
5103 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
5104 return JS_FALSE;
5105 if (!js_EmitTree(cx, cg, pn->pn_right))
5106 return JS_FALSE;
5107 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
5108 return JS_FALSE;
5109 ok = js_PopStatementCG(cx, cg);
5110 break;
5112 case TOK_TRY:
5114 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5115 intN depth;
5116 JSParseNode *lastCatch;
5118 catchJump = -1;
5121 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5122 * for later fixup.
5124 * When a finally block is active (STMT_FINALLY in our tree context),
5125 * non-local jumps (including jumps-over-catches) result in a GOSUB
5126 * being written into the bytecode stream and fixed-up later (c.f.
5127 * EmitBackPatchOp and BackPatch).
5129 js_PushStatement(cg, &stmtInfo,
5130 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5131 CG_OFFSET(cg));
5134 * Since an exception can be thrown at any place inside the try block,
5135 * we need to restore the stack and the scope chain before we transfer
5136 * the control to the exception handler.
5138 * For that we store in a try note associated with the catch or
5139 * finally block the stack depth upon the try entry. The interpreter
5140 * uses this depth to properly unwind the stack and the scope chain.
5142 depth = cg->stackDepth;
5144 /* Mark try location for decompilation, then emit try block. */
5145 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5146 return JS_FALSE;
5147 tryStart = CG_OFFSET(cg);
5148 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5149 return JS_FALSE;
5150 JS_ASSERT(depth == cg->stackDepth);
5152 /* GOSUB to finally, if present. */
5153 if (pn->pn_kid3) {
5154 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5155 return JS_FALSE;
5156 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5157 if (jmp < 0)
5158 return JS_FALSE;
5161 /* Emit (hidden) jump over catch and/or finally. */
5162 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5163 return JS_FALSE;
5164 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5165 if (jmp < 0)
5166 return JS_FALSE;
5168 tryEnd = CG_OFFSET(cg);
5170 /* If this try has a catch block, emit it. */
5171 pn2 = pn->pn_kid2;
5172 lastCatch = NULL;
5173 if (pn2) {
5174 jsint count = 0; /* previous catch block's population */
5177 * The emitted code for a catch block looks like:
5179 * [throwing] only if 2nd+ catch block
5180 * [leaveblock] only if 2nd+ catch block
5181 * enterblock with SRC_CATCH
5182 * exception
5183 * [dup] only if catchguard
5184 * setlocalpop <slot> or destructuring code
5185 * [< catchguard code >] if there's a catchguard
5186 * [ifeq <offset to next catch block>] " "
5187 * [pop] only if catchguard
5188 * < catch block contents >
5189 * leaveblock
5190 * goto <end of catch blocks> non-local; finally applies
5192 * If there's no catch block without a catchguard, the last
5193 * <offset to next catch block> points to rethrow code. This
5194 * code will [gosub] to the finally code if appropriate, and is
5195 * also used for the catch-all trynote for capturing exceptions
5196 * thrown from catch{} blocks.
5198 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5199 ptrdiff_t guardJump, catchNote;
5201 JS_ASSERT(cg->stackDepth == depth);
5202 guardJump = GUARDJUMP(stmtInfo);
5203 if (guardJump != -1) {
5204 /* Fix up and clean up previous catch block. */
5205 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5208 * Account for JSOP_ENTERBLOCK (whose block object count
5209 * is saved below) and pushed exception object that we
5210 * still have after the jumping from the previous guard.
5212 cg->stackDepth = depth + count + 1;
5215 * Move exception back to cx->exception to prepare for
5216 * the next catch. We hide [throwing] from the decompiler
5217 * since it compensates for the hidden JSOP_DUP at the
5218 * start of the previous guarded catch.
5220 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5221 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5222 return JS_FALSE;
5224 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5225 return JS_FALSE;
5226 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
5227 JS_ASSERT(cg->stackDepth == depth);
5231 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5232 * by the call to js_EmitTree immediately below. Save this
5233 * source note's index in stmtInfo for use by the TOK_CATCH:
5234 * case, where the length of the catch guard is set as the
5235 * note's offset.
5237 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5238 if (catchNote < 0)
5239 return JS_FALSE;
5240 CATCHNOTE(stmtInfo) = catchNote;
5243 * Emit the lexical scope and catch body. Save the catch's
5244 * block object population via count, for use when targeting
5245 * guardJump at the next catch (the guard mismatch case).
5247 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5248 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5249 if (!js_EmitTree(cx, cg, pn3))
5250 return JS_FALSE;
5252 /* gosub <finally>, if required */
5253 if (pn->pn_kid3) {
5254 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5255 &GOSUBS(stmtInfo));
5256 if (jmp < 0)
5257 return JS_FALSE;
5258 JS_ASSERT(cg->stackDepth == depth);
5262 * Jump over the remaining catch blocks. This will get fixed
5263 * up to jump to after catch/finally.
5265 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5266 return JS_FALSE;
5267 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5268 if (jmp < 0)
5269 return JS_FALSE;
5272 * Save a pointer to the last catch node to handle try-finally
5273 * and try-catch(guard)-finally special cases.
5275 lastCatch = pn3->expr();
5280 * Last catch guard jumps to the rethrow code sequence if none of the
5281 * guards match. Target guardJump at the beginning of the rethrow
5282 * sequence, just in case a guard expression throws and leaves the
5283 * stack unbalanced.
5285 if (lastCatch && lastCatch->pn_kid2) {
5286 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5288 /* Sync the stack to take into account pushed exception. */
5289 JS_ASSERT(cg->stackDepth == depth);
5290 cg->stackDepth = depth + 1;
5293 * Rethrow the exception, delegating executing of finally if any
5294 * to the exception handler.
5296 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5297 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5298 return JS_FALSE;
5302 JS_ASSERT(cg->stackDepth == depth);
5304 /* Emit finally handler if any. */
5305 finallyStart = 0; /* to quell GCC uninitialized warnings */
5306 if (pn->pn_kid3) {
5308 * Fix up the gosubs that might have been emitted before non-local
5309 * jumps to the finally code.
5311 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5312 return JS_FALSE;
5314 finallyStart = CG_OFFSET(cg);
5316 /* Indicate that we're emitting a subroutine body. */
5317 stmtInfo.type = STMT_SUBROUTINE;
5318 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5319 return JS_FALSE;
5320 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5321 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5322 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5323 return JS_FALSE;
5325 JS_ASSERT(cg->stackDepth == depth);
5327 if (!js_PopStatementCG(cx, cg))
5328 return JS_FALSE;
5330 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5331 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5332 return JS_FALSE;
5335 /* Fix up the end-of-try/catch jumps to come here. */
5336 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5337 return JS_FALSE;
5340 * Add the try note last, to let post-order give us the right ordering
5341 * (first to last for a given nesting level, inner to outer by level).
5343 if (pn->pn_kid2 &&
5344 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5345 return JS_FALSE;
5349 * If we've got a finally, mark try+catch region with additional
5350 * trynote to catch exceptions (re)thrown from a catch block or
5351 * for the try{}finally{} case.
5353 if (pn->pn_kid3 &&
5354 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5355 return JS_FALSE;
5357 break;
5360 case TOK_CATCH:
5362 ptrdiff_t catchStart, guardJump;
5363 JSObject *blockObj;
5366 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5367 * and save the block object atom.
5369 stmt = cg->topStmt;
5370 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5371 stmt->type = STMT_CATCH;
5372 catchStart = stmt->update;
5373 blockObj = stmt->blockObj;
5375 /* Go up one statement info record to the TRY or FINALLY record. */
5376 stmt = stmt->down;
5377 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5379 /* Pick up the pending exception and bind it to the catch variable. */
5380 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5381 return JS_FALSE;
5384 * Dup the exception object if there is a guard for rethrowing to use
5385 * it later when rethrowing or in other catches.
5387 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5388 return JS_FALSE;
5390 pn2 = pn->pn_kid1;
5391 switch (pn2->pn_type) {
5392 #if JS_HAS_DESTRUCTURING
5393 case TOK_RB:
5394 case TOK_RC:
5395 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5396 return JS_FALSE;
5397 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5398 return JS_FALSE;
5399 break;
5400 #endif
5402 case TOK_NAME:
5403 /* Inline and specialize BindNameToSlot for pn2. */
5404 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE);
5405 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie);
5406 break;
5408 default:
5409 JS_ASSERT(0);
5412 /* Emit the guard expression, if there is one. */
5413 if (pn->pn_kid2) {
5414 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5415 return JS_FALSE;
5416 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5417 CG_OFFSET(cg) - catchStart)) {
5418 return JS_FALSE;
5420 /* ifeq <next block> */
5421 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5422 if (guardJump < 0)
5423 return JS_FALSE;
5424 GUARDJUMP(*stmt) = guardJump;
5426 /* Pop duplicated exception object as we no longer need it. */
5427 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5428 return JS_FALSE;
5431 /* Emit the catch body. */
5432 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5433 return JS_FALSE;
5436 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5437 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5439 off = cg->stackDepth;
5440 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5441 return JS_FALSE;
5442 break;
5445 case TOK_VAR:
5446 if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5447 return JS_FALSE;
5448 break;
5450 case TOK_RETURN:
5451 /* Push a return value */
5452 pn2 = pn->pn_kid;
5453 if (pn2) {
5454 if (!js_EmitTree(cx, cg, pn2))
5455 return JS_FALSE;
5456 } else {
5457 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5458 return JS_FALSE;
5462 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5463 * blocks having finally clauses and to exit intermingled let blocks.
5464 * We can't simply transfer control flow to our caller in that case,
5465 * because we must gosub to those finally clauses from inner to outer,
5466 * with the correct stack pointer (i.e., after popping any with,
5467 * for/in, etc., slots nested inside the finally's try).
5469 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5470 * extra JSOP_RETRVAL after the fixups.
5472 top = CG_OFFSET(cg);
5473 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5474 return JS_FALSE;
5475 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5476 return JS_FALSE;
5477 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5478 CG_BASE(cg)[top] = JSOP_SETRVAL;
5479 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5480 return JS_FALSE;
5482 break;
5484 #if JS_HAS_GENERATORS
5485 case TOK_YIELD:
5486 if (!cg->inFunction()) {
5487 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5488 JSMSG_BAD_RETURN_OR_YIELD,
5489 js_yield_str);
5490 return JS_FALSE;
5492 if (pn->pn_kid) {
5493 if (!js_EmitTree(cx, cg, pn->pn_kid))
5494 return JS_FALSE;
5495 } else {
5496 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5497 return JS_FALSE;
5499 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5500 return JS_FALSE;
5501 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5502 return JS_FALSE;
5503 break;
5504 #endif
5506 case TOK_LC:
5508 #if JS_HAS_XML_SUPPORT
5509 if (pn->pn_arity == PN_UNARY) {
5510 if (!js_EmitTree(cx, cg, pn->pn_kid))
5511 return JS_FALSE;
5512 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5513 return JS_FALSE;
5514 break;
5516 #endif
5518 JS_ASSERT(pn->pn_arity == PN_LIST);
5520 noteIndex = -1;
5521 tmp = CG_OFFSET(cg);
5522 if (pn->pn_xflags & PNX_NEEDBRACES) {
5523 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5524 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5525 return JS_FALSE;
5528 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5530 JSParseNode *pnchild = pn->pn_head;
5531 if (pn->pn_xflags & PNX_FUNCDEFS) {
5533 * This block contains top-level function definitions. To ensure
5534 * that we emit the bytecode defining them before the rest of code
5535 * in the block we use a separate pass over functions. During the
5536 * main pass later the emitter will add JSOP_NOP with source notes
5537 * for the function to preserve the original functions position
5538 * when decompiling.
5540 * Currently this is used only for functions, as compile-as-we go
5541 * mode for scripts does not allow separate emitter passes.
5543 JS_ASSERT(cg->inFunction());
5544 if (pn->pn_xflags & PNX_DESTRUCT) {
5546 * Assign the destructuring arguments before defining any
5547 * functions, see bug 419662.
5549 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5550 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_COMMA);
5551 if (!js_EmitTree(cx, cg, pnchild))
5552 return JS_FALSE;
5553 pnchild = pnchild->pn_next;
5556 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5557 if (pn2->pn_type == TOK_FUNCTION) {
5558 if (pn2->pn_op == JSOP_NOP) {
5559 if (!js_EmitTree(cx, cg, pn2))
5560 return JS_FALSE;
5561 } else {
5563 * JSOP_DEFFUN in a top-level block with function
5564 * definitions appears, for example, when "if (true)"
5565 * is optimized away from "if (true) function x() {}".
5566 * See bug 428424.
5568 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5573 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5574 if (!js_EmitTree(cx, cg, pn2))
5575 return JS_FALSE;
5578 if (noteIndex >= 0 &&
5579 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5580 CG_OFFSET(cg) - tmp)) {
5581 return JS_FALSE;
5584 ok = js_PopStatementCG(cx, cg);
5585 break;
5588 case TOK_SEQ:
5589 JS_ASSERT(pn->pn_arity == PN_LIST);
5590 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5591 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5592 if (!js_EmitTree(cx, cg, pn2))
5593 return JS_FALSE;
5595 ok = js_PopStatementCG(cx, cg);
5596 break;
5598 case TOK_SEMI:
5599 pn2 = pn->pn_kid;
5600 if (pn2) {
5602 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5603 * debugger, and eval frames may need the value of the ultimate
5604 * expression statement as the script's result, despite the fact
5605 * that it appears useless to the compiler.
5607 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5608 * calling JS_Compile* to suppress JSOP_POPV.
5610 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5611 if (!useful) {
5612 if (!CheckSideEffects(cx, cg, pn2, &useful))
5613 return JS_FALSE;
5617 * Don't eliminate apparently useless expressions if they are
5618 * labeled expression statements. The tc->topStmt->update test
5619 * catches the case where we are nesting in js_EmitTree for a
5620 * labeled compound statement.
5622 if (!useful &&
5623 (!cg->topStmt ||
5624 cg->topStmt->type != STMT_LABEL ||
5625 cg->topStmt->update < CG_OFFSET(cg))) {
5626 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5627 if (!ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5628 JSREPORT_WARNING | JSREPORT_STRICT,
5629 JSMSG_USELESS_EXPR)) {
5630 return JS_FALSE;
5632 } else {
5633 op = wantval ? JSOP_POPV : JSOP_POP;
5634 #if JS_HAS_DESTRUCTURING
5635 if (!wantval &&
5636 pn2->pn_type == TOK_ASSIGN &&
5637 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5638 return JS_FALSE;
5640 #endif
5641 if (op != JSOP_NOP) {
5643 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5644 * avoid null closure cloning. Do this only for assignment
5645 * statements that are not completion values wanted by a
5646 * script evaluator, to ensure that the joined function
5647 * can't escape directly.
5649 if (!wantval &&
5650 PN_TYPE(pn2) == TOK_ASSIGN &&
5651 PN_OP(pn2) == JSOP_NOP &&
5652 PN_OP(pn2->pn_left) == JSOP_SETPROP &&
5653 PN_OP(pn2->pn_right) == JSOP_LAMBDA &&
5654 pn2->pn_right->pn_funbox->joinable()) {
5655 pn2->pn_left->pn_op = JSOP_SETMETHOD;
5657 if (!js_EmitTree(cx, cg, pn2))
5658 return JS_FALSE;
5659 if (js_Emit1(cx, cg, op) < 0)
5660 return JS_FALSE;
5664 break;
5666 case TOK_COLON:
5667 /* Emit an annotated nop so we know to decompile a label. */
5668 atom = pn->pn_atom;
5669 ale = cg->atomList.add(cg->parser, atom);
5670 if (!ale)
5671 return JS_FALSE;
5672 pn2 = pn->expr();
5673 noteType = (pn2->pn_type == TOK_LC ||
5674 (pn2->pn_type == TOK_LEXICALSCOPE &&
5675 pn2->expr()->pn_type == TOK_LC))
5676 ? SRC_LABELBRACE
5677 : SRC_LABEL;
5678 noteIndex = js_NewSrcNote2(cx, cg, noteType,
5679 (ptrdiff_t) ALE_INDEX(ale));
5680 if (noteIndex < 0 ||
5681 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5682 return JS_FALSE;
5685 /* Emit code for the labeled statement. */
5686 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
5687 stmtInfo.label = atom;
5688 if (!js_EmitTree(cx, cg, pn2))
5689 return JS_FALSE;
5690 if (!js_PopStatementCG(cx, cg))
5691 return JS_FALSE;
5693 /* If the statement was compound, emit a note for the end brace. */
5694 if (noteType == SRC_LABELBRACE) {
5695 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5696 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5697 return JS_FALSE;
5700 break;
5702 case TOK_COMMA:
5704 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5705 * These notes help the decompiler bracket the bytecodes generated
5706 * from each sub-expression that follows a comma.
5708 off = noteIndex = -1;
5709 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5710 if (!js_EmitTree(cx, cg, pn2))
5711 return JS_FALSE;
5712 tmp = CG_OFFSET(cg);
5713 if (noteIndex >= 0) {
5714 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5715 return JS_FALSE;
5717 if (!pn2->pn_next)
5718 break;
5719 off = tmp;
5720 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5721 if (noteIndex < 0 ||
5722 js_Emit1(cx, cg, JSOP_POP) < 0) {
5723 return JS_FALSE;
5726 break;
5728 case TOK_ASSIGN:
5730 * Check left operand type and generate specialized code for it.
5731 * Specialize to avoid ECMA "reference type" values on the operand
5732 * stack, which impose pervasive runtime "GetValue" costs.
5734 pn2 = pn->pn_left;
5735 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
5736 switch (PN_TYPE(pn2)) {
5737 case TOK_NAME:
5738 if (!BindNameToSlot(cx, cg, pn2))
5739 return JS_FALSE;
5740 if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
5741 atomIndex = (jsatomid) pn2->pn_cookie;
5742 } else {
5743 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5744 if (!ale)
5745 return JS_FALSE;
5746 atomIndex = ALE_INDEX(ale);
5747 if (!pn2->isConst())
5748 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
5750 break;
5751 case TOK_DOT:
5752 if (!js_EmitTree(cx, cg, pn2->expr()))
5753 return JS_FALSE;
5754 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5755 if (!ale)
5756 return JS_FALSE;
5757 atomIndex = ALE_INDEX(ale);
5758 break;
5759 case TOK_LB:
5760 JS_ASSERT(pn2->pn_arity == PN_BINARY);
5761 if (!js_EmitTree(cx, cg, pn2->pn_left))
5762 return JS_FALSE;
5763 if (!js_EmitTree(cx, cg, pn2->pn_right))
5764 return JS_FALSE;
5765 break;
5766 #if JS_HAS_DESTRUCTURING
5767 case TOK_RB:
5768 case TOK_RC:
5769 break;
5770 #endif
5771 case TOK_LP:
5772 if (!js_EmitTree(cx, cg, pn2))
5773 return JS_FALSE;
5774 break;
5775 #if JS_HAS_XML_SUPPORT
5776 case TOK_UNARYOP:
5777 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5778 if (!js_EmitTree(cx, cg, pn2->pn_kid))
5779 return JS_FALSE;
5780 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5781 return JS_FALSE;
5782 break;
5783 #endif
5784 default:
5785 JS_ASSERT(0);
5788 op = PN_OP(pn);
5789 if (op != JSOP_NOP) {
5790 switch (pn2->pn_type) {
5791 case TOK_NAME:
5792 if (pn2->isConst()) {
5793 if (PN_OP(pn2) == JSOP_CALLEE) {
5794 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
5795 return JS_FALSE;
5796 } else {
5797 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5799 } else if (PN_OP(pn2) == JSOP_SETNAME) {
5800 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5801 return JS_FALSE;
5802 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
5803 } else {
5804 JS_ASSERT(PN_OP(pn2) != JSOP_GETUPVAR);
5805 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
5806 ? JSOP_GETGVAR
5807 : (PN_OP(pn2) == JSOP_SETARG)
5808 ? JSOP_GETARG
5809 : JSOP_GETLOCAL,
5810 atomIndex);
5812 break;
5813 case TOK_DOT:
5814 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5815 return JS_FALSE;
5816 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
5817 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
5818 return JS_FALSE;
5819 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
5820 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
5821 return JS_FALSE;
5822 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5823 return JS_FALSE;
5824 } else {
5825 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
5827 break;
5828 case TOK_LB:
5829 case TOK_LP:
5830 #if JS_HAS_XML_SUPPORT
5831 case TOK_UNARYOP:
5832 #endif
5833 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5834 return JS_FALSE;
5835 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5836 return JS_FALSE;
5837 break;
5838 default:;
5842 /* Now emit the right operand (it may affect the namespace). */
5843 if (!js_EmitTree(cx, cg, pn->pn_right))
5844 return JS_FALSE;
5846 /* If += etc., emit the binary operator with a decompiler note. */
5847 if (op != JSOP_NOP) {
5849 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5850 * declared in the current compilation unit, as in this case (just
5851 * a bit further below) we will avoid emitting the assignment op.
5853 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
5854 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5855 return JS_FALSE;
5857 if (js_Emit1(cx, cg, op) < 0)
5858 return JS_FALSE;
5861 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5862 if (pn2->pn_type != TOK_NAME &&
5863 #if JS_HAS_DESTRUCTURING
5864 pn2->pn_type != TOK_RB &&
5865 pn2->pn_type != TOK_RC &&
5866 #endif
5867 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
5868 return JS_FALSE;
5871 /* Finally, emit the specialized assignment bytecode. */
5872 switch (pn2->pn_type) {
5873 case TOK_NAME:
5874 if (pn2->isConst())
5875 break;
5876 /* FALL THROUGH */
5877 case TOK_DOT:
5878 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5879 break;
5880 case TOK_LB:
5881 case TOK_LP:
5882 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5883 return JS_FALSE;
5884 break;
5885 #if JS_HAS_DESTRUCTURING
5886 case TOK_RB:
5887 case TOK_RC:
5888 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5889 return JS_FALSE;
5890 break;
5891 #endif
5892 #if JS_HAS_XML_SUPPORT
5893 case TOK_UNARYOP:
5894 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5895 return JS_FALSE;
5896 break;
5897 #endif
5898 default:
5899 JS_ASSERT(0);
5901 break;
5903 case TOK_HOOK:
5904 /* Emit the condition, then branch if false to the else part. */
5905 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5906 return JS_FALSE;
5907 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5908 if (noteIndex < 0)
5909 return JS_FALSE;
5910 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5911 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5912 return JS_FALSE;
5914 /* Jump around else, fixup the branch, emit else, fixup jump. */
5915 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5916 if (jmp < 0)
5917 return JS_FALSE;
5918 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5921 * Because each branch pushes a single value, but our stack budgeting
5922 * analysis ignores branches, we now have to adjust cg->stackDepth to
5923 * ignore the value pushed by the first branch. Execution will follow
5924 * only one path, so we must decrement cg->stackDepth.
5926 * Failing to do this will foil code, such as the try/catch/finally
5927 * exception handling code generator, that samples cg->stackDepth for
5928 * use at runtime (JSOP_SETSP), or in let expression and block code
5929 * generation, which must use the stack depth to compute local stack
5930 * indexes correctly.
5932 JS_ASSERT(cg->stackDepth > 0);
5933 cg->stackDepth--;
5934 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5935 return JS_FALSE;
5936 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5937 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5938 return JS_FALSE;
5939 break;
5941 case TOK_OR:
5942 case TOK_AND:
5944 * JSOP_OR converts the operand on the stack to boolean, and if true,
5945 * leaves the original operand value on the stack and jumps; otherwise
5946 * it pops and falls into the next bytecode, which evaluates the right
5947 * operand. The jump goes around the right operand evaluation.
5949 * JSOP_AND converts the operand on the stack to boolean, and if false,
5950 * leaves the original operand value on the stack and jumps; otherwise
5951 * it pops and falls into the right operand's bytecode.
5953 if (pn->pn_arity == PN_BINARY) {
5954 if (!js_EmitTree(cx, cg, pn->pn_left))
5955 return JS_FALSE;
5956 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5957 if (top < 0)
5958 return JS_FALSE;
5959 if (!js_EmitTree(cx, cg, pn->pn_right))
5960 return JS_FALSE;
5961 off = CG_OFFSET(cg);
5962 pc = CG_CODE(cg, top);
5963 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5964 *pc = pn->pn_op;
5965 } else {
5966 JS_ASSERT(pn->pn_arity == PN_LIST);
5967 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5969 /* Left-associative operator chain: avoid too much recursion. */
5970 pn2 = pn->pn_head;
5971 if (!js_EmitTree(cx, cg, pn2))
5972 return JS_FALSE;
5973 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5974 if (top < 0)
5975 return JS_FALSE;
5977 /* Emit nodes between the head and the tail. */
5978 jmp = top;
5979 while ((pn2 = pn2->pn_next)->pn_next) {
5980 if (!js_EmitTree(cx, cg, pn2))
5981 return JS_FALSE;
5982 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5983 if (off < 0)
5984 return JS_FALSE;
5985 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5986 return JS_FALSE;
5987 jmp = off;
5990 if (!js_EmitTree(cx, cg, pn2))
5991 return JS_FALSE;
5993 pn2 = pn->pn_head;
5994 off = CG_OFFSET(cg);
5995 do {
5996 pc = CG_CODE(cg, top);
5997 tmp = GetJumpOffset(cg, pc);
5998 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5999 *pc = pn->pn_op;
6000 top += tmp;
6001 } while ((pn2 = pn2->pn_next)->pn_next);
6003 break;
6005 case TOK_PLUS:
6006 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
6007 if (pn->pn_arity == PN_LIST && pn->pn_count < JS_BIT(16) &&
6008 cg->inFunction()) {
6009 /* Emit up to the first string literal conventionally. */
6010 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6011 if (pn2->pn_type == TOK_STRING)
6012 break;
6013 if (!js_EmitTree(cx, cg, pn2))
6014 return JS_FALSE;
6015 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6016 return JS_FALSE;
6019 if (!pn2)
6020 break;
6023 * Having seen a string literal, we know statically that the rest
6024 * of the additions are string concatenation, so we emit them as a
6025 * single concatn. First, do string conversion on the result of the
6026 * preceding zero or more additions so that any side effects of
6027 * string conversion occur before the next operand begins.
6029 if (pn2 == pn->pn_head) {
6030 index = 0;
6031 } else {
6032 if (!js_Emit1(cx, cg, JSOP_OBJTOSTR))
6033 return JS_FALSE;
6034 index = 1;
6037 for (; pn2; pn2 = pn2->pn_next, index++) {
6038 if (!js_EmitTree(cx, cg, pn2))
6039 return JS_FALSE;
6040 if (!pn2->isLiteral() && js_Emit1(cx, cg, JSOP_OBJTOSTR) < 0)
6041 return JS_FALSE;
6044 EMIT_UINT16_IMM_OP(JSOP_CONCATN, index);
6045 break;
6047 case TOK_BITOR:
6048 case TOK_BITXOR:
6049 case TOK_BITAND:
6050 case TOK_EQOP:
6051 case TOK_RELOP:
6052 case TOK_IN:
6053 case TOK_INSTANCEOF:
6054 case TOK_SHOP:
6055 case TOK_MINUS:
6056 case TOK_STAR:
6057 case TOK_DIVOP:
6058 if (pn->pn_arity == PN_LIST) {
6059 /* Left-associative operator chain: avoid too much recursion. */
6060 pn2 = pn->pn_head;
6061 if (!js_EmitTree(cx, cg, pn2))
6062 return JS_FALSE;
6063 op = PN_OP(pn);
6064 while ((pn2 = pn2->pn_next) != NULL) {
6065 if (!js_EmitTree(cx, cg, pn2))
6066 return JS_FALSE;
6067 if (js_Emit1(cx, cg, op) < 0)
6068 return JS_FALSE;
6070 } else {
6071 #if JS_HAS_XML_SUPPORT
6072 uintN oldflags;
6074 case TOK_DBLCOLON:
6075 if (pn->pn_arity == PN_NAME) {
6076 if (!js_EmitTree(cx, cg, pn->expr()))
6077 return JS_FALSE;
6078 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
6079 return JS_FALSE;
6080 break;
6084 * Binary :: has a right operand that brackets arbitrary code,
6085 * possibly including a let (a = b) ... expression. We must clear
6086 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6088 oldflags = cg->flags;
6089 cg->flags &= ~TCF_IN_FOR_INIT;
6090 #endif
6092 /* Binary operators that evaluate both operands unconditionally. */
6093 if (!js_EmitTree(cx, cg, pn->pn_left))
6094 return JS_FALSE;
6095 if (!js_EmitTree(cx, cg, pn->pn_right))
6096 return JS_FALSE;
6097 #if JS_HAS_XML_SUPPORT
6098 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6099 #endif
6100 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6101 return JS_FALSE;
6103 break;
6105 case TOK_THROW:
6106 #if JS_HAS_XML_SUPPORT
6107 case TOK_AT:
6108 case TOK_DEFAULT:
6109 JS_ASSERT(pn->pn_arity == PN_UNARY);
6110 /* FALL THROUGH */
6111 #endif
6112 case TOK_UNARYOP:
6114 uintN oldflags;
6116 /* Unary op, including unary +/-. */
6117 op = PN_OP(pn);
6118 #if JS_HAS_XML_SUPPORT
6119 if (op == JSOP_XMLNAME) {
6120 if (!EmitXMLName(cx, pn, op, cg))
6121 return JS_FALSE;
6122 break;
6124 #endif
6125 pn2 = pn->pn_kid;
6127 if (op == JSOP_TYPEOF && pn2->pn_type != TOK_NAME)
6128 op = JSOP_TYPEOFEXPR;
6130 oldflags = cg->flags;
6131 cg->flags &= ~TCF_IN_FOR_INIT;
6132 if (!js_EmitTree(cx, cg, pn2))
6133 return JS_FALSE;
6134 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6135 if (js_Emit1(cx, cg, op) < 0)
6136 return JS_FALSE;
6137 break;
6140 case TOK_INC:
6141 case TOK_DEC:
6142 /* Emit lvalue-specialized code for ++/-- operators. */
6143 pn2 = pn->pn_kid;
6144 JS_ASSERT(pn2->pn_type != TOK_RP);
6145 op = PN_OP(pn);
6146 switch (pn2->pn_type) {
6147 default:
6148 JS_ASSERT(pn2->pn_type == TOK_NAME);
6149 pn2->pn_op = op;
6150 if (!BindNameToSlot(cx, cg, pn2))
6151 return JS_FALSE;
6152 op = PN_OP(pn2);
6153 if (op == JSOP_CALLEE) {
6154 if (js_Emit1(cx, cg, op) < 0)
6155 return JS_FALSE;
6156 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
6157 atomIndex = (jsatomid) pn2->pn_cookie;
6158 EMIT_UINT16_IMM_OP(op, atomIndex);
6159 } else {
6160 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6161 if (!EmitAtomOp(cx, pn2, op, cg))
6162 return JS_FALSE;
6163 break;
6165 if (pn2->isConst()) {
6166 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6167 return JS_FALSE;
6168 op = PN_OP(pn);
6169 if (!(js_CodeSpec[op].format & JOF_POST)) {
6170 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6171 return JS_FALSE;
6172 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6173 if (js_Emit1(cx, cg, op) < 0)
6174 return JS_FALSE;
6177 break;
6178 case TOK_DOT:
6179 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6180 return JS_FALSE;
6181 break;
6182 case TOK_LB:
6183 if (!EmitElemOp(cx, pn2, op, cg))
6184 return JS_FALSE;
6185 break;
6186 case TOK_LP:
6187 if (!js_EmitTree(cx, cg, pn2))
6188 return JS_FALSE;
6189 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6190 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6191 return JS_FALSE;
6193 if (js_Emit1(cx, cg, op) < 0)
6194 return JS_FALSE;
6195 break;
6196 #if JS_HAS_XML_SUPPORT
6197 case TOK_UNARYOP:
6198 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6199 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6200 return JS_FALSE;
6201 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6202 return JS_FALSE;
6203 if (js_Emit1(cx, cg, op) < 0)
6204 return JS_FALSE;
6205 break;
6206 #endif
6208 break;
6210 case TOK_DELETE:
6212 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6213 * must evaluate the operand if it appears it might have side effects.
6215 pn2 = pn->pn_kid;
6216 switch (pn2->pn_type) {
6217 case TOK_NAME:
6218 if (!BindNameToSlot(cx, cg, pn2))
6219 return JS_FALSE;
6220 op = PN_OP(pn2);
6221 if (op == JSOP_FALSE) {
6222 if (js_Emit1(cx, cg, op) < 0)
6223 return JS_FALSE;
6224 } else {
6225 if (!EmitAtomOp(cx, pn2, op, cg))
6226 return JS_FALSE;
6228 break;
6229 case TOK_DOT:
6230 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6231 return JS_FALSE;
6232 break;
6233 #if JS_HAS_XML_SUPPORT
6234 case TOK_DBLDOT:
6235 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6236 return JS_FALSE;
6237 break;
6238 #endif
6239 case TOK_LB:
6240 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6241 return JS_FALSE;
6242 break;
6243 default:
6245 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6246 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6248 useful = JS_FALSE;
6249 if (!CheckSideEffects(cx, cg, pn2, &useful))
6250 return JS_FALSE;
6251 if (!useful) {
6252 off = noteIndex = -1;
6253 } else {
6254 if (pn2->pn_op == JSOP_SETCALL)
6255 pn2->pn_op = JSOP_CALL;
6256 if (!js_EmitTree(cx, cg, pn2))
6257 return JS_FALSE;
6258 off = CG_OFFSET(cg);
6259 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6260 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6261 return JS_FALSE;
6263 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6264 return JS_FALSE;
6265 if (noteIndex >= 0) {
6266 tmp = CG_OFFSET(cg);
6267 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6268 return JS_FALSE;
6271 break;
6273 #if JS_HAS_XML_SUPPORT
6274 case TOK_FILTER:
6275 if (!js_EmitTree(cx, cg, pn->pn_left))
6276 return JS_FALSE;
6277 jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
6278 if (jmp < 0)
6279 return JS_FALSE;
6280 top = js_Emit1(cx, cg, JSOP_TRACE);
6281 if (top < 0)
6282 return JS_FALSE;
6283 if (!js_EmitTree(cx, cg, pn->pn_right))
6284 return JS_FALSE;
6285 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6286 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6287 return JS_FALSE;
6288 break;
6289 #endif
6291 case TOK_DOT:
6293 * Pop a stack operand, convert it to object, get a property named by
6294 * this bytecode's immediate-indexed atom operand, and push its value
6295 * (not a reference to it).
6297 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6298 break;
6300 case TOK_LB:
6301 #if JS_HAS_XML_SUPPORT
6302 case TOK_DBLDOT:
6303 #endif
6305 * Pop two operands, convert the left one to object and the right one
6306 * to property name (atom or tagged int), get the named property, and
6307 * push its value. Set the "obj" register to the result of ToObject
6308 * on the left operand.
6310 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6311 break;
6313 case TOK_NEW:
6314 case TOK_LP:
6316 bool callop = (PN_TYPE(pn) == TOK_LP);
6319 * Emit callable invocation or operator new (constructor call) code.
6320 * First, emit code for the left operand to evaluate the callable or
6321 * constructable object expression.
6323 * For operator new applied to other expressions than E4X ones, we emit
6324 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6325 * interpose the lambda-initialized method read barrier -- see the code
6326 * in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6328 * Then (or in a call case that has no explicit reference-base object)
6329 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6330 * parameter: in the operator new case, the newborn instance; in the
6331 * base-less call case, a cookie meaning "use the global object as the
6332 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6333 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6335 pn2 = pn->pn_head;
6336 switch (pn2->pn_type) {
6337 case TOK_NAME:
6338 if (!EmitNameOp(cx, cg, pn2, callop))
6339 return JS_FALSE;
6340 break;
6341 case TOK_DOT:
6342 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
6343 return JS_FALSE;
6344 break;
6345 case TOK_LB:
6346 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6347 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
6348 return JS_FALSE;
6349 break;
6350 case TOK_UNARYOP:
6351 #if JS_HAS_XML_SUPPORT
6352 if (pn2->pn_op == JSOP_XMLNAME) {
6353 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6354 return JS_FALSE;
6355 callop = true; /* suppress JSOP_NULL after */
6356 break;
6358 #endif
6359 /* FALL THROUGH */
6360 default:
6362 * Push null as a placeholder for the global object, per ECMA-262
6363 * 11.2.3 step 6.
6365 if (!js_EmitTree(cx, cg, pn2))
6366 return JS_FALSE;
6367 callop = false; /* trigger JSOP_NULL after */
6368 break;
6370 if (!callop && js_Emit1(cx, cg, JSOP_NULL) < 0)
6371 return JS_FALSE;
6373 /* Remember start of callable-object bytecode for decompilation hint. */
6374 off = top;
6377 * Emit code for each argument in order, then emit the JSOP_*CALL or
6378 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6379 * were pushed on the operand stack.
6381 uintN oldflags = cg->flags;
6382 cg->flags &= ~TCF_IN_FOR_INIT;
6383 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6384 if (!js_EmitTree(cx, cg, pn3))
6385 return JS_FALSE;
6387 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6388 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6389 return JS_FALSE;
6391 argc = pn->pn_count - 1;
6392 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6393 return JS_FALSE;
6394 if (PN_OP(pn) == JSOP_CALL) {
6395 /* Add a trace hint opcode for recursion. */
6396 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
6397 return JS_FALSE;
6399 if (PN_OP(pn) == JSOP_EVAL)
6400 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6401 break;
6404 case TOK_LEXICALSCOPE:
6406 JSObjectBox *objbox;
6407 uintN count;
6409 objbox = pn->pn_objbox;
6410 js_PushBlockScope(cg, &stmtInfo, objbox->object, CG_OFFSET(cg));
6413 * If this lexical scope is not for a catch block, let block or let
6414 * expression, or any kind of for loop (where the scope starts in the
6415 * head after the first part if for (;;), else in the body if for-in);
6416 * and if our container is top-level but not a function body, or else
6417 * a block statement; then emit a SRC_BRACE note. All other container
6418 * statements get braces by default from the decompiler.
6420 noteIndex = -1;
6421 type = PN_TYPE(pn->expr());
6422 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6423 (!(stmt = stmtInfo.down)
6424 ? !cg->inFunction()
6425 : stmt->type == STMT_BLOCK)) {
6426 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6427 /* There must be no source note already output for the next op. */
6428 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6429 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6430 !GettableNoteForNextOp(cg));
6431 #endif
6432 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6433 if (noteIndex < 0)
6434 return JS_FALSE;
6437 JS_ASSERT(CG_OFFSET(cg) == top);
6438 if (!EmitEnterBlock(cx, pn, cg))
6439 return JS_FALSE;
6441 if (!js_EmitTree(cx, cg, pn->pn_expr))
6442 return JS_FALSE;
6444 op = PN_OP(pn);
6445 if (op == JSOP_LEAVEBLOCKEXPR) {
6446 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6447 return JS_FALSE;
6448 } else {
6449 if (noteIndex >= 0 &&
6450 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6451 CG_OFFSET(cg) - top)) {
6452 return JS_FALSE;
6456 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6457 count = OBJ_BLOCK_COUNT(cx, objbox->object);
6458 EMIT_UINT16_IMM_OP(op, count);
6460 ok = js_PopStatementCG(cx, cg);
6461 break;
6464 #if JS_HAS_BLOCK_SCOPE
6465 case TOK_LET:
6466 /* Let statements have their variable declarations on the left. */
6467 if (pn->pn_arity == PN_BINARY) {
6468 pn2 = pn->pn_right;
6469 pn = pn->pn_left;
6470 } else {
6471 pn2 = NULL;
6474 /* Non-null pn2 means that pn is the variable list from a let head. */
6475 JS_ASSERT(pn->pn_arity == PN_LIST);
6476 if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
6477 return JS_FALSE;
6479 /* Thus non-null pn2 is the body of the let block or expression. */
6480 tmp = CG_OFFSET(cg);
6481 if (pn2 && !js_EmitTree(cx, cg, pn2))
6482 return JS_FALSE;
6484 if (noteIndex >= 0 &&
6485 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6486 CG_OFFSET(cg) - tmp)) {
6487 return JS_FALSE;
6489 break;
6490 #endif /* JS_HAS_BLOCK_SCOPE */
6492 #if JS_HAS_GENERATORS
6493 case TOK_ARRAYPUSH: {
6494 jsint slot;
6497 * The array object's stack index is in cg->arrayCompDepth. See below
6498 * under the array initialiser code generator for array comprehension
6499 * special casing.
6501 if (!js_EmitTree(cx, cg, pn->pn_kid))
6502 return JS_FALSE;
6503 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6504 if (slot < 0)
6505 return JS_FALSE;
6506 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6507 break;
6509 #endif
6511 case TOK_RB:
6512 #if JS_HAS_GENERATORS
6513 case TOK_ARRAYCOMP:
6514 #endif
6516 * Emit code for [a, b, c] that is equivalent to constructing a new
6517 * array and in source order evaluating each element value and adding
6518 * it to the array, without invoking latent setters. We use the
6519 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6520 * avoid dup'ing and popping the array as each element is added, as
6521 * JSOP_SETELEM/JSOP_SETPROP would do.
6523 * If no sharp variable is defined, the initializer is not for an array
6524 * comprehension, the initializer is not overlarge, and the initializer
6525 * is not in global code (whose stack growth cannot be precisely modeled
6526 * due to the need to reserve space for global variables and regular
6527 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6528 * array using a fast, all-at-once process rather than a slow, element-
6529 * by-element process.
6531 #if JS_HAS_SHARP_VARS
6532 sharpnum = -1;
6533 do_emit_array:
6534 #endif
6536 op = (JS_LIKELY(pn->pn_count < JS_BIT(16)) && cg->inFunction())
6537 ? JSOP_NEWARRAY
6538 : JSOP_NEWINIT;
6540 #if JS_HAS_GENERATORS
6541 if (pn->pn_type == TOK_ARRAYCOMP)
6542 op = JSOP_NEWINIT;
6543 #endif
6544 #if JS_HAS_SHARP_VARS
6545 JS_ASSERT_IF(sharpnum >= 0, cg->hasSharps());
6546 if (cg->hasSharps())
6547 op = JSOP_NEWINIT;
6548 #endif
6550 if (op == JSOP_NEWINIT && !EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6551 return JS_FALSE;
6553 #if JS_HAS_GENERATORS
6554 if (pn->pn_type == TOK_ARRAYCOMP) {
6555 uintN saveDepth;
6558 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6559 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6560 * its kids under pn2 to generate this comprehension.
6562 JS_ASSERT(cg->stackDepth > 0);
6563 saveDepth = cg->arrayCompDepth;
6564 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6565 if (!js_EmitTree(cx, cg, pn->pn_head))
6566 return JS_FALSE;
6567 cg->arrayCompDepth = saveDepth;
6569 /* Emit the usual op needed for decompilation. */
6570 if (!EmitEndInit(cx, cg, 1))
6571 return JS_FALSE;
6572 break;
6574 #endif /* JS_HAS_GENERATORS */
6576 pn2 = pn->pn_head;
6577 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6578 if (op == JSOP_NEWINIT && !EmitNumberOp(cx, atomIndex, cg))
6579 return JS_FALSE;
6580 if (pn2->pn_type == TOK_COMMA && pn2->pn_arity == PN_NULLARY) {
6581 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6582 return JS_FALSE;
6583 } else {
6584 if (!js_EmitTree(cx, cg, pn2))
6585 return JS_FALSE;
6587 if (op == JSOP_NEWINIT && js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6588 return JS_FALSE;
6590 JS_ASSERT(atomIndex == pn->pn_count);
6592 if (pn->pn_xflags & PNX_ENDCOMMA) {
6593 /* Emit a source note so we know to decompile an extra comma. */
6594 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6595 return JS_FALSE;
6598 if (op == JSOP_NEWINIT) {
6600 * Emit an op to finish the array and, secondarily, to aid in sharp
6601 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6603 if (!EmitEndInit(cx, cg, atomIndex))
6604 return JS_FALSE;
6605 break;
6608 JS_ASSERT(atomIndex < JS_BIT(16));
6609 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY, atomIndex);
6610 break;
6612 case TOK_RC: {
6613 #if JS_HAS_SHARP_VARS
6614 sharpnum = -1;
6615 do_emit_object:
6616 #endif
6617 #if JS_HAS_DESTRUCTURING_SHORTHAND
6618 if (pn->pn_xflags & PNX_DESTRUCT) {
6619 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
6620 return JS_FALSE;
6622 #endif
6624 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6625 * a new object and in source order evaluating each property value and
6626 * adding the property to the object, without invoking latent setters.
6627 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6628 * ignore setters and to avoid dup'ing and popping the object as each
6629 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6631 if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
6632 return JS_FALSE;
6634 uintN methodInits = 0, slowMethodInits = 0;
6635 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6636 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6637 pn3 = pn2->pn_left;
6638 if (pn3->pn_type == TOK_NUMBER) {
6639 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6640 return JS_FALSE;
6643 /* Emit code for the property initializer. */
6644 if (!js_EmitTree(cx, cg, pn2->pn_right))
6645 return JS_FALSE;
6647 op = PN_OP(pn2);
6648 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6649 if (js_Emit1(cx, cg, op) < 0)
6650 return JS_FALSE;
6653 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6654 if (pn3->pn_type == TOK_NUMBER) {
6655 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6656 return JS_FALSE;
6657 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6658 return JS_FALSE;
6659 } else {
6660 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6661 pn3->pn_type == TOK_STRING);
6662 ale = cg->atomList.add(cg->parser, pn3->pn_atom);
6663 if (!ale)
6664 return JS_FALSE;
6666 /* Check whether we can optimize to JSOP_INITMETHOD. */
6667 JSParseNode *init = pn2->pn_right;
6668 bool lambda = PN_OP(init) == JSOP_LAMBDA;
6669 if (lambda)
6670 ++methodInits;
6671 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable())
6673 op = JSOP_INITMETHOD;
6674 pn2->pn_op = uint8(op);
6675 } else {
6676 op = JSOP_INITPROP;
6677 if (lambda)
6678 ++slowMethodInits;
6681 EMIT_INDEX_OP(op, ALE_INDEX(ale));
6685 if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
6686 if (js_Emit1(cx, cg, JSOP_UNBRAND) < 0)
6687 return JS_FALSE;
6689 if (!EmitEndInit(cx, cg, pn->pn_count))
6690 return JS_FALSE;
6691 break;
6694 #if JS_HAS_SHARP_VARS
6695 case TOK_DEFSHARP:
6696 JS_ASSERT(cg->hasSharps());
6697 sharpnum = pn->pn_num;
6698 pn = pn->pn_kid;
6699 if (pn->pn_type == TOK_RB)
6700 goto do_emit_array;
6701 # if JS_HAS_GENERATORS
6702 if (pn->pn_type == TOK_ARRAYCOMP)
6703 goto do_emit_array;
6704 # endif
6705 if (pn->pn_type == TOK_RC)
6706 goto do_emit_object;
6708 if (!js_EmitTree(cx, cg, pn))
6709 return JS_FALSE;
6710 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
6711 break;
6713 case TOK_USESHARP:
6714 JS_ASSERT(cg->hasSharps());
6715 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
6716 break;
6717 #endif /* JS_HAS_SHARP_VARS */
6719 case TOK_NAME:
6721 * Cope with a left-over function definition that was replaced by a use
6722 * of a later function definition of the same name. See FunctionDef and
6723 * MakeDefIntoUse in jsparse.cpp.
6725 if (pn->pn_op == JSOP_NOP)
6726 return JS_TRUE;
6727 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
6728 return JS_FALSE;
6729 break;
6731 #if JS_HAS_XML_SUPPORT
6732 case TOK_XMLATTR:
6733 case TOK_XMLSPACE:
6734 case TOK_XMLTEXT:
6735 case TOK_XMLCDATA:
6736 case TOK_XMLCOMMENT:
6737 #endif
6738 case TOK_STRING:
6739 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
6740 break;
6742 case TOK_NUMBER:
6743 ok = EmitNumberOp(cx, pn->pn_dval, cg);
6744 break;
6746 case TOK_REGEXP: {
6748 * If the regexp's script is one-shot and the regexp is not used in a
6749 * loop, we can avoid the extra fork-on-exec costs of JSOP_REGEXP by
6750 * selecting JSOP_OBJECT. Otherwise, to avoid incorrect proto, parent,
6751 * and lastIndex sharing, select JSOP_REGEXP.
6753 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
6754 bool singleton = !cg->fun && cg->compileAndGo();
6755 if (singleton) {
6756 for (JSStmtInfo *stmt = cg->topStmt; stmt; stmt = stmt->down) {
6757 if (STMT_IS_LOOP(stmt)) {
6758 singleton = false;
6759 break;
6763 if (singleton) {
6764 ok = EmitObjectOp(cx, pn->pn_objbox, JSOP_OBJECT, cg);
6765 } else {
6766 ok = EmitIndexOp(cx, JSOP_REGEXP,
6767 cg->regexpList.index(pn->pn_objbox),
6768 cg);
6770 break;
6773 #if JS_HAS_XML_SUPPORT
6774 case TOK_ANYNAME:
6775 #endif
6776 case TOK_PRIMARY:
6777 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6778 return JS_FALSE;
6779 break;
6781 #if JS_HAS_DEBUGGER_KEYWORD
6782 case TOK_DEBUGGER:
6783 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6784 return JS_FALSE;
6785 break;
6786 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6788 #if JS_HAS_XML_SUPPORT
6789 case TOK_XMLELEM:
6790 case TOK_XMLLIST:
6791 if (pn->pn_op == JSOP_XMLOBJECT) {
6792 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6793 break;
6796 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
6797 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
6798 case TOK_XMLETAGO:
6799 JS_ASSERT(0);
6800 /* FALL THROUGH */
6801 case TOK_XMLPTAGC:
6802 case TOK_XMLSTAGO:
6803 break;
6804 default:
6805 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6806 return JS_FALSE;
6809 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6810 if (pn2->pn_type == TOK_LC &&
6811 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6812 return JS_FALSE;
6814 if (!js_EmitTree(cx, cg, pn2))
6815 return JS_FALSE;
6816 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6817 return JS_FALSE;
6820 if (pn->pn_xflags & PNX_XMLROOT) {
6821 if (pn->pn_count == 0) {
6822 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6823 atom = cx->runtime->atomState.emptyAtom;
6824 ale = cg->atomList.add(cg->parser, atom);
6825 if (!ale)
6826 return JS_FALSE;
6827 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6829 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6830 return JS_FALSE;
6832 #ifdef DEBUG
6833 else
6834 JS_ASSERT(pn->pn_count != 0);
6835 #endif
6836 break;
6838 case TOK_XMLPTAGC:
6839 if (pn->pn_op == JSOP_XMLOBJECT) {
6840 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6841 break;
6843 /* FALL THROUGH */
6845 case TOK_XMLSTAGO:
6846 case TOK_XMLETAGO:
6848 uint32 i;
6850 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6851 return JS_FALSE;
6853 ale = cg->atomList.add(cg->parser,
6854 (pn->pn_type == TOK_XMLETAGO)
6855 ? cx->runtime->atomState.etagoAtom
6856 : cx->runtime->atomState.stagoAtom);
6857 if (!ale)
6858 return JS_FALSE;
6859 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6861 JS_ASSERT(pn->pn_count != 0);
6862 pn2 = pn->pn_head;
6863 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6864 return JS_FALSE;
6865 if (!js_EmitTree(cx, cg, pn2))
6866 return JS_FALSE;
6867 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6868 return JS_FALSE;
6870 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6871 if (pn2->pn_type == TOK_LC &&
6872 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6873 return JS_FALSE;
6875 if (!js_EmitTree(cx, cg, pn2))
6876 return JS_FALSE;
6877 if ((i & 1) && pn2->pn_type == TOK_LC) {
6878 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6879 return JS_FALSE;
6881 if (js_Emit1(cx, cg,
6882 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6883 return JS_FALSE;
6887 ale = cg->atomList.add(cg->parser,
6888 (pn->pn_type == TOK_XMLPTAGC)
6889 ? cx->runtime->atomState.ptagcAtom
6890 : cx->runtime->atomState.tagcAtom);
6891 if (!ale)
6892 return JS_FALSE;
6893 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6894 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6895 return JS_FALSE;
6897 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
6898 return JS_FALSE;
6899 break;
6902 case TOK_XMLNAME:
6903 if (pn->pn_arity == PN_LIST) {
6904 JS_ASSERT(pn->pn_count != 0);
6905 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6906 if (pn2->pn_type == TOK_LC &&
6907 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6908 return JS_FALSE;
6910 if (!js_EmitTree(cx, cg, pn2))
6911 return JS_FALSE;
6912 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6913 return JS_FALSE;
6915 } else {
6916 JS_ASSERT(pn->pn_arity == PN_NULLARY);
6917 ok = (pn->pn_op == JSOP_OBJECT)
6918 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
6919 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
6921 break;
6923 case TOK_XMLPI:
6924 ale = cg->atomList.add(cg->parser, pn->pn_atom2);
6925 if (!ale)
6926 return JS_FALSE;
6927 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6928 return JS_FALSE;
6929 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6930 return JS_FALSE;
6931 break;
6932 #endif /* JS_HAS_XML_SUPPORT */
6934 default:
6935 JS_ASSERT(0);
6938 if (ok && --cg->emitLevel == 0) {
6939 if (cg->spanDeps)
6940 ok = OptimizeSpanDeps(cx, cg);
6941 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
6942 return JS_FALSE;
6945 return ok;
6949 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6950 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6952 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6953 {"null", 0, 0, 0},
6954 {"if", 0, 0, 0},
6955 {"if-else", 2, 0, 1},
6956 {"for", 3, 1, 1},
6957 {"while", 1, 0, 1},
6958 {"continue", 0, 0, 0},
6959 {"decl", 1, 1, 1},
6960 {"pcdelta", 1, 0, 1},
6961 {"assignop", 0, 0, 0},
6962 {"cond", 1, 0, 1},
6963 {"brace", 1, 0, 1},
6964 {"hidden", 0, 0, 0},
6965 {"pcbase", 1, 0, -1},
6966 {"label", 1, 0, 0},
6967 {"labelbrace", 1, 0, 0},
6968 {"endbrace", 0, 0, 0},
6969 {"break2label", 1, 0, 0},
6970 {"cont2label", 1, 0, 0},
6971 {"switch", 2, 0, 1},
6972 {"funcdef", 1, 0, 0},
6973 {"catch", 1, 0, 1},
6974 {"extended", -1, 0, 0},
6975 {"newline", 0, 0, 0},
6976 {"setline", 1, 0, 0},
6977 {"xdelta", 0, 0, 0},
6980 static intN
6981 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6983 intN index;
6984 JSArenaPool *pool;
6985 size_t size;
6987 index = CG_NOTE_COUNT(cg);
6988 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6989 pool = cg->notePool;
6990 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6991 if (!CG_NOTES(cg)) {
6992 /* Allocate the first note array lazily; leave noteMask alone. */
6993 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6994 } else {
6995 /* Grow by doubling note array size; update noteMask on success. */
6996 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6997 if (CG_NOTES(cg))
6998 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7000 if (!CG_NOTES(cg)) {
7001 js_ReportOutOfScriptQuota(cx);
7002 return -1;
7006 CG_NOTE_COUNT(cg) = index + 1;
7007 return index;
7010 intN
7011 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
7013 intN index, n;
7014 jssrcnote *sn;
7015 ptrdiff_t offset, delta, xdelta;
7018 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
7019 * incrementing CG_NOTE_COUNT(cg).
7021 index = AllocSrcNote(cx, cg);
7022 if (index < 0)
7023 return -1;
7024 sn = &CG_NOTES(cg)[index];
7027 * Compute delta from the last annotated bytecode's offset. If it's too
7028 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7030 offset = CG_OFFSET(cg);
7031 delta = offset - CG_LAST_NOTE_OFFSET(cg);
7032 CG_LAST_NOTE_OFFSET(cg) = offset;
7033 if (delta >= SN_DELTA_LIMIT) {
7034 do {
7035 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
7036 SN_MAKE_XDELTA(sn, xdelta);
7037 delta -= xdelta;
7038 index = AllocSrcNote(cx, cg);
7039 if (index < 0)
7040 return -1;
7041 sn = &CG_NOTES(cg)[index];
7042 } while (delta >= SN_DELTA_LIMIT);
7046 * Initialize type and delta, then allocate the minimum number of notes
7047 * needed for type's arity. Usually, we won't need more, but if an offset
7048 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7050 SN_MAKE_NOTE(sn, type, delta);
7051 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
7052 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
7053 return -1;
7055 return index;
7058 intN
7059 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7060 ptrdiff_t offset)
7062 intN index;
7064 index = js_NewSrcNote(cx, cg, type);
7065 if (index >= 0) {
7066 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
7067 return -1;
7069 return index;
7072 intN
7073 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7074 ptrdiff_t offset1, ptrdiff_t offset2)
7076 intN index;
7078 index = js_NewSrcNote(cx, cg, type);
7079 if (index >= 0) {
7080 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
7081 return -1;
7082 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
7083 return -1;
7085 return index;
7088 static JSBool
7089 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
7091 JSArenaPool *pool;
7092 size_t size;
7094 /* Grow by doubling note array size; update noteMask on success. */
7095 pool = cg->notePool;
7096 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7097 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
7098 if (!CG_NOTES(cg)) {
7099 js_ReportOutOfScriptQuota(cx);
7100 return JS_FALSE;
7102 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7103 return JS_TRUE;
7106 jssrcnote *
7107 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
7108 ptrdiff_t delta)
7110 ptrdiff_t base, limit, newdelta, diff;
7111 intN index;
7114 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7115 * main script note deltas, and only by a small positive amount.
7117 JS_ASSERT(cg->current == &cg->main);
7118 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7120 base = SN_DELTA(sn);
7121 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7122 newdelta = base + delta;
7123 if (newdelta < limit) {
7124 SN_SET_DELTA(sn, newdelta);
7125 } else {
7126 index = sn - cg->main.notes;
7127 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
7128 if (!GrowSrcNotes(cx, cg))
7129 return NULL;
7130 sn = cg->main.notes + index;
7132 diff = cg->main.noteCount - index;
7133 cg->main.noteCount++;
7134 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
7135 SN_MAKE_XDELTA(sn, delta);
7136 sn++;
7138 return sn;
7141 JS_FRIEND_API(uintN)
7142 js_SrcNoteLength(jssrcnote *sn)
7144 uintN arity;
7145 jssrcnote *base;
7147 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7148 for (base = sn++; arity; sn++, arity--) {
7149 if (*sn & SN_3BYTE_OFFSET_FLAG)
7150 sn += 2;
7152 return sn - base;
7155 JS_FRIEND_API(ptrdiff_t)
7156 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7158 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7159 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7160 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7161 for (sn++; which; sn++, which--) {
7162 if (*sn & SN_3BYTE_OFFSET_FLAG)
7163 sn += 2;
7165 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7166 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7167 | (sn[1] << 8)
7168 | sn[2]);
7170 return (ptrdiff_t)*sn;
7173 JSBool
7174 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7175 uintN which, ptrdiff_t offset)
7177 jssrcnote *sn;
7178 ptrdiff_t diff;
7180 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7181 ReportStatementTooLarge(cx, cg);
7182 return JS_FALSE;
7185 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7186 sn = &CG_NOTES(cg)[index];
7187 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7188 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7189 for (sn++; which; sn++, which--) {
7190 if (*sn & SN_3BYTE_OFFSET_FLAG)
7191 sn += 2;
7194 /* See if the new offset requires three bytes. */
7195 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7196 /* Maybe this offset was already set to a three-byte value. */
7197 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7198 /* Losing, need to insert another two bytes for this offset. */
7199 index = sn - CG_NOTES(cg);
7202 * Simultaneously test to see if the source note array must grow to
7203 * accommodate either the first or second byte of additional storage
7204 * required by this 3-byte offset.
7206 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7207 if (!GrowSrcNotes(cx, cg))
7208 return JS_FALSE;
7209 sn = CG_NOTES(cg) + index;
7211 CG_NOTE_COUNT(cg) += 2;
7213 diff = CG_NOTE_COUNT(cg) - (index + 3);
7214 JS_ASSERT(diff >= 0);
7215 if (diff > 0)
7216 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7218 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7219 *sn++ = (jssrcnote)(offset >> 8);
7221 *sn = (jssrcnote)offset;
7222 return JS_TRUE;
7225 #ifdef DEBUG_notme
7226 #define DEBUG_srcnotesize
7227 #endif
7229 #ifdef DEBUG_srcnotesize
7230 #define NBINS 10
7231 static uint32 hist[NBINS];
7233 void DumpSrcNoteSizeHist()
7235 static FILE *fp;
7236 int i, n;
7238 if (!fp) {
7239 fp = fopen("/tmp/srcnotes.hist", "w");
7240 if (!fp)
7241 return;
7242 setvbuf(fp, NULL, _IONBF, 0);
7244 fprintf(fp, "SrcNote size histogram:\n");
7245 for (i = 0; i < NBINS; i++) {
7246 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7247 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7248 fputc('*', fp);
7249 fputc('\n', fp);
7251 fputc('\n', fp);
7253 #endif
7256 * Fill in the storage at notes with prolog and main srcnotes; the space at
7257 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7258 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7259 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7261 JSBool
7262 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7264 uintN prologCount, mainCount, totalCount;
7265 ptrdiff_t offset, delta;
7266 jssrcnote *sn;
7268 JS_ASSERT(cg->current == &cg->main);
7270 prologCount = cg->prolog.noteCount;
7271 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7272 CG_SWITCH_TO_PROLOG(cg);
7273 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7274 return JS_FALSE;
7275 prologCount = cg->prolog.noteCount;
7276 CG_SWITCH_TO_MAIN(cg);
7277 } else {
7279 * Either no prolog srcnotes, or no line number change over prolog.
7280 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7281 * of the first main note, by adding to its delta and possibly even
7282 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7283 * that came at and after the last annotated bytecode.
7285 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7286 JS_ASSERT(offset >= 0);
7287 if (offset > 0 && cg->main.noteCount != 0) {
7288 /* NB: Use as much of the first main note's delta as we can. */
7289 sn = cg->main.notes;
7290 delta = SN_IS_XDELTA(sn)
7291 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7292 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7293 if (offset < delta)
7294 delta = offset;
7295 for (;;) {
7296 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7297 return JS_FALSE;
7298 offset -= delta;
7299 if (offset == 0)
7300 break;
7301 delta = JS_MIN(offset, SN_XDELTA_MASK);
7302 sn = cg->main.notes;
7307 mainCount = cg->main.noteCount;
7308 totalCount = prologCount + mainCount;
7309 if (prologCount)
7310 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7311 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7312 SN_MAKE_TERMINATOR(&notes[totalCount]);
7314 #ifdef DEBUG_notme
7315 { int bin = JS_CeilingLog2(totalCount);
7316 if (bin >= NBINS)
7317 bin = NBINS - 1;
7318 ++hist[bin];
7320 #endif
7321 return JS_TRUE;
7324 static JSBool
7325 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7326 uintN stackDepth, size_t start, size_t end)
7328 JSTryNode *tryNode;
7330 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7331 JS_ASSERT(start <= end);
7332 JS_ASSERT((size_t)(uint32)start == start);
7333 JS_ASSERT((size_t)(uint32)end == end);
7335 JS_ARENA_ALLOCATE_TYPE(tryNode, JSTryNode, &cx->tempPool);
7336 if (!tryNode) {
7337 js_ReportOutOfScriptQuota(cx);
7338 return JS_FALSE;
7341 tryNode->note.kind = kind;
7342 tryNode->note.stackDepth = (uint16)stackDepth;
7343 tryNode->note.start = (uint32)start;
7344 tryNode->note.length = (uint32)(end - start);
7345 tryNode->prev = cg->lastTryNode;
7346 cg->lastTryNode = tryNode;
7347 cg->ntrynotes++;
7348 return JS_TRUE;
7351 void
7352 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7354 JSTryNode *tryNode;
7355 JSTryNote *tn;
7357 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7358 tn = array->vector + array->length;
7359 tryNode = cg->lastTryNode;
7360 do {
7361 *--tn = tryNode->note;
7362 } while ((tryNode = tryNode->prev) != NULL);
7363 JS_ASSERT(tn == array->vector);
7367 * Find the index of the given object for code generator.
7369 * Since the emitter refers to each parsed object only once, for the index we
7370 * use the number of already indexes objects. We also add the object to a list
7371 * to convert the list to a fixed-size array when we complete code generation,
7372 * see JSCGObjectList::finish below.
7374 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7375 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7376 * directly used to store and fetch a reference to a cloned RegExp object that
7377 * shares the same JSRegExp private data created for the object literal in
7378 * objbox. We need a cloned object to hold lastIndex and other direct properties
7379 * that should not be shared among threads sharing a precompiled function or
7380 * script.
7382 * If the code being compiled is function code, allocate a reserved slot in
7383 * the cloned function object that shares its precompiled script with other
7384 * cloned function objects and with the compiler-created clone-parent. There
7385 * are nregexps = script->regexps()->length such reserved slots in each
7386 * function object cloned from fun->object. NB: during compilation, a funobj
7387 * slots element must never be allocated, because js_AllocSlot could hand out
7388 * one of the slots that should be given to a regexp clone.
7390 * If the code being compiled is global code, the cloned regexp are stored in
7391 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7392 * fp->nvars to ngvars + nregexps.
7394 * The slots initially contain undefined or null. We populate them lazily when
7395 * JSOP_REGEXP is executed for the first time.
7397 * Why clone regexp objects? ECMA specifies that when a regular expression
7398 * literal is scanned, a RegExp object is created. In the spec, compilation
7399 * and execution happen indivisibly, but in this implementation and many of
7400 * its embeddings, code is precompiled early and re-executed in multiple
7401 * threads, or using multiple global objects, or both, for efficiency.
7403 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7404 * objects, which makes for collisions on the lastIndex property (especially
7405 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7406 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7408 uintN
7409 JSCGObjectList::index(JSObjectBox *objbox)
7411 JS_ASSERT(!objbox->emitLink);
7412 objbox->emitLink = lastbox;
7413 lastbox = objbox;
7414 return length++;
7417 void
7418 JSCGObjectList::finish(JSObjectArray *array)
7420 JSObject **cursor;
7421 JSObjectBox *objbox;
7423 JS_ASSERT(length <= INDEX_LIMIT);
7424 JS_ASSERT(length == array->length);
7426 cursor = array->vector + array->length;
7427 objbox = lastbox;
7428 do {
7429 --cursor;
7430 JS_ASSERT(!*cursor);
7431 *cursor = objbox->object;
7432 } while ((objbox = objbox->emitLink) != NULL);
7433 JS_ASSERT(cursor == array->vector);