Bug 578272: Remove Algol-like display optimization. (r=brendan)
[mozilla-central.git] / js / src / jsemit.cpp
blobec1b7ad9803b9495ec3feb351ecab0855415f70a
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h" // generated headers last
70 #include "jsstaticcheck.h"
72 #include "jsobjinlines.h"
73 #include "jsscopeinlines.h"
75 /* Allocation chunk counts, must be powers of two in general. */
76 #define BYTECODE_CHUNK 256 /* code allocation increment */
77 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
78 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
80 /* Macros to compute byte sizes from typed element counts. */
81 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
82 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
83 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
85 using namespace js;
87 static JSBool
88 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
89 uintN stackDepth, size_t start, size_t end);
91 JSCodeGenerator::JSCodeGenerator(Parser *parser,
92 JSArenaPool *cpool, JSArenaPool *npool,
93 uintN lineno)
94 : JSTreeContext(parser),
95 codePool(cpool), notePool(npool),
96 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
97 stackDepth(0), maxStackDepth(0),
98 ntrynotes(0), lastTryNode(NULL),
99 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
100 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
101 arrayCompDepth(0),
102 emitLevel(0),
103 constMap(parser->context),
104 constList(parser->context)
106 flags = TCF_COMPILING;
107 memset(&prolog, 0, sizeof prolog);
108 memset(&main, 0, sizeof main);
109 current = &main;
110 firstLine = prolog.currentLine = main.currentLine = lineno;
111 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
112 memset(&upvarMap, 0, sizeof upvarMap);
115 bool JSCodeGenerator::init()
117 return constMap.init();
120 JSCodeGenerator::~JSCodeGenerator()
122 JS_ARENA_RELEASE(codePool, codeMark);
123 JS_ARENA_RELEASE(notePool, noteMark);
125 /* NB: non-null only after OOM. */
126 if (spanDeps)
127 parser->context->free(spanDeps);
129 if (upvarMap.vector)
130 parser->context->free(upvarMap.vector);
133 static ptrdiff_t
134 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
136 jsbytecode *base, *limit, *next;
137 ptrdiff_t offset, length;
138 size_t incr, size;
140 base = CG_BASE(cg);
141 next = CG_NEXT(cg);
142 limit = CG_LIMIT(cg);
143 offset = next - base;
144 if (next + delta > limit) {
145 length = offset + delta;
146 length = (length <= BYTECODE_CHUNK)
147 ? BYTECODE_CHUNK
148 : JS_BIT(JS_CeilingLog2(length));
149 incr = BYTECODE_SIZE(length);
150 if (!base) {
151 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
152 } else {
153 size = BYTECODE_SIZE(limit - base);
154 incr -= size;
155 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
157 if (!base) {
158 js_ReportOutOfScriptQuota(cx);
159 return -1;
161 CG_BASE(cg) = base;
162 CG_LIMIT(cg) = base + length;
163 CG_NEXT(cg) = base + offset;
165 return offset;
168 static void
169 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
171 jsbytecode *pc;
172 JSOp op;
173 const JSCodeSpec *cs;
174 uintN extra, depth, nuses;
175 intN ndefs;
177 pc = CG_CODE(cg, target);
178 op = (JSOp) *pc;
179 cs = &js_CodeSpec[op];
180 #ifdef JS_TRACER
181 extern uint8 js_opcode2extra[];
182 extra = js_opcode2extra[op];
183 #else
184 extra = 0;
185 #endif
186 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
187 depth = (uintN) cg->stackDepth +
188 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
189 extra;
190 if (depth > cg->maxStackDepth)
191 cg->maxStackDepth = depth;
194 nuses = js_GetStackUses(cs, op, pc);
195 cg->stackDepth -= nuses;
196 JS_ASSERT(cg->stackDepth >= 0);
197 if (cg->stackDepth < 0) {
198 char numBuf[12];
199 TokenStream *ts;
201 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
202 ts = &cg->parser->tokenStream;
203 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
204 js_GetErrorMessage, NULL,
205 JSMSG_STACK_UNDERFLOW,
206 ts->getFilename() ? ts->getFilename() : "stdin",
207 numBuf);
209 ndefs = cs->ndefs;
210 if (ndefs < 0) {
211 JSObject *blockObj;
213 /* We just executed IndexParsedObject */
214 JS_ASSERT(op == JSOP_ENTERBLOCK);
215 JS_ASSERT(nuses == 0);
216 blockObj = cg->objectList.lastbox->object;
217 JS_ASSERT(blockObj->getClass() == &js_BlockClass);
218 JS_ASSERT(blockObj->fslots[JSSLOT_BLOCK_DEPTH].isUndefined());
220 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
221 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
223 cg->stackDepth += ndefs;
224 if ((uintN)cg->stackDepth > cg->maxStackDepth)
225 cg->maxStackDepth = cg->stackDepth;
228 ptrdiff_t
229 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
231 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
233 if (offset >= 0) {
234 *CG_NEXT(cg)++ = (jsbytecode)op;
235 UpdateDepth(cx, cg, offset);
237 return offset;
240 ptrdiff_t
241 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
243 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
245 if (offset >= 0) {
246 jsbytecode *next = CG_NEXT(cg);
247 next[0] = (jsbytecode)op;
248 next[1] = op1;
249 CG_NEXT(cg) = next + 2;
250 UpdateDepth(cx, cg, offset);
252 return offset;
255 ptrdiff_t
256 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
257 jsbytecode op2)
259 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
261 if (offset >= 0) {
262 jsbytecode *next = CG_NEXT(cg);
263 next[0] = (jsbytecode)op;
264 next[1] = op1;
265 next[2] = op2;
266 CG_NEXT(cg) = next + 3;
267 UpdateDepth(cx, cg, offset);
269 return offset;
272 ptrdiff_t
273 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
275 ptrdiff_t length = 1 + (ptrdiff_t)extra;
276 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
278 if (offset >= 0) {
279 jsbytecode *next = CG_NEXT(cg);
280 *next = (jsbytecode)op;
281 memset(next + 1, 0, BYTECODE_SIZE(extra));
282 CG_NEXT(cg) = next + length;
285 * Don't UpdateDepth if op's use-count comes from the immediate
286 * operand yet to be stored in the extra bytes after op.
288 if (js_CodeSpec[op].nuses >= 0)
289 UpdateDepth(cx, cg, offset);
291 return offset;
294 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
295 const char js_with_statement_str[] = "with statement";
296 const char js_finally_block_str[] = "finally block";
297 const char js_script_str[] = "script";
299 static const char *statementName[] = {
300 "label statement", /* LABEL */
301 "if statement", /* IF */
302 "else statement", /* ELSE */
303 "destructuring body", /* BODY */
304 "switch statement", /* SWITCH */
305 "block", /* BLOCK */
306 js_with_statement_str, /* WITH */
307 "catch block", /* CATCH */
308 "try block", /* TRY */
309 js_finally_block_str, /* FINALLY */
310 js_finally_block_str, /* SUBROUTINE */
311 "do loop", /* DO_LOOP */
312 "for loop", /* FOR_LOOP */
313 "for/in loop", /* FOR_IN_LOOP */
314 "while loop", /* WHILE_LOOP */
317 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
319 static const char *
320 StatementName(JSCodeGenerator *cg)
322 if (!cg->topStmt)
323 return js_script_str;
324 return statementName[cg->topStmt->type];
327 static void
328 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
330 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
331 StatementName(cg));
335 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
336 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
337 into unconditional (gotos and gosubs), and conditional jumps or branches
338 (which pop a value, test it, and jump depending on its value). Most jumps
339 have just one immediate operand, a signed offset from the jump opcode's pc
340 to the target bytecode. The lookup and table switch opcodes may contain
341 many jump offsets.
343 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
344 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
345 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
346 the extended form of the JSOP_OR branch opcode). The unextended or short
347 formats have 16-bit signed immediate offset operands, the extended or long
348 formats have 32-bit signed immediates. The span-dependency problem consists
349 of selecting as few long instructions as possible, or about as few -- since
350 jumps can span other jumps, extending one jump may cause another to need to
351 be extended.
353 Most JS scripts are short, so need no extended jumps. We optimize for this
354 case by generating short jumps until we know a long jump is needed. After
355 that point, we keep generating short jumps, but each jump's 16-bit immediate
356 offset operand is actually an unsigned index into cg->spanDeps, an array of
357 JSSpanDep structs. Each struct tells the top offset in the script of the
358 opcode, the "before" offset of the jump (which will be the same as top for
359 simplex jumps, but which will index further into the bytecode array for a
360 non-initial jump offset in a lookup or table switch), the after "offset"
361 adjusted during span-dependent instruction selection (initially the same
362 value as the "before" offset), and the jump target (more below).
364 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
365 ensure that all bytecode generated so far can be inspected to discover where
366 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
367 that we generate span-dependency records sorted by their offsets, so we can
368 binary-search when trying to find a JSSpanDep for a given bytecode offset,
369 or the nearest JSSpanDep at or above a given pc.
371 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
372 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
373 tells us that we need to binary-search for the cg->spanDeps entry by the
374 jump opcode's bytecode offset (sd->before).
376 Jump targets need to be maintained in a data structure that lets us look
377 up an already-known target by its address (jumps may have a common target),
378 and that also lets us update the addresses (script-relative, a.k.a. absolute
379 offsets) of targets that come after a jump target (for when a jump below
380 that target needs to be extended). We use an AVL tree, implemented using
381 recursion, but with some tricky optimizations to its height-balancing code
382 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
384 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
385 positive sign, even though they link "backward" (i.e., toward lower bytecode
386 address). We don't want to waste space and search time in the AVL tree for
387 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
388 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
389 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
390 target, or is still awaiting backpatching.
392 Note that backpatch chains would present a problem for BuildSpanDepTable,
393 which inspects bytecode to build cg->spanDeps on demand, when the first
394 short jump offset overflows. To solve this temporary problem, we emit a
395 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
396 nuses/ndefs counts help keep the stack balanced, but whose opcode format
397 distinguishes its backpatch delta immediate operand from a normal jump
398 offset.
400 static int
401 BalanceJumpTargets(JSJumpTarget **jtp)
403 JSJumpTarget *jt, *jt2, *root;
404 int dir, otherDir, heightChanged;
405 JSBool doubleRotate;
407 jt = *jtp;
408 JS_ASSERT(jt->balance != 0);
410 if (jt->balance < -1) {
411 dir = JT_RIGHT;
412 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
413 } else if (jt->balance > 1) {
414 dir = JT_LEFT;
415 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
416 } else {
417 return 0;
420 otherDir = JT_OTHER_DIR(dir);
421 if (doubleRotate) {
422 jt2 = jt->kids[otherDir];
423 *jtp = root = jt2->kids[dir];
425 jt->kids[otherDir] = root->kids[dir];
426 root->kids[dir] = jt;
428 jt2->kids[dir] = root->kids[otherDir];
429 root->kids[otherDir] = jt2;
431 heightChanged = 1;
432 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
433 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
434 root->balance = 0;
435 } else {
436 *jtp = root = jt->kids[otherDir];
437 jt->kids[otherDir] = root->kids[dir];
438 root->kids[dir] = jt;
440 heightChanged = (root->balance != 0);
441 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
444 return heightChanged;
447 typedef struct AddJumpTargetArgs {
448 JSContext *cx;
449 JSCodeGenerator *cg;
450 ptrdiff_t offset;
451 JSJumpTarget *node;
452 } AddJumpTargetArgs;
454 static int
455 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
457 JSJumpTarget *jt;
458 int balanceDelta;
460 jt = *jtp;
461 if (!jt) {
462 JSCodeGenerator *cg = args->cg;
464 jt = cg->jtFreeList;
465 if (jt) {
466 cg->jtFreeList = jt->kids[JT_LEFT];
467 } else {
468 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
469 sizeof *jt);
470 if (!jt) {
471 js_ReportOutOfScriptQuota(args->cx);
472 return 0;
475 jt->offset = args->offset;
476 jt->balance = 0;
477 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
478 cg->numJumpTargets++;
479 args->node = jt;
480 *jtp = jt;
481 return 1;
484 if (jt->offset == args->offset) {
485 args->node = jt;
486 return 0;
489 if (args->offset < jt->offset)
490 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
491 else
492 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
493 if (!args->node)
494 return 0;
496 jt->balance += balanceDelta;
497 return (balanceDelta && jt->balance)
498 ? 1 - BalanceJumpTargets(jtp)
499 : 0;
502 #ifdef DEBUG_brendan
503 static int AVLCheck(JSJumpTarget *jt)
505 int lh, rh;
507 if (!jt) return 0;
508 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
509 lh = AVLCheck(jt->kids[JT_LEFT]);
510 rh = AVLCheck(jt->kids[JT_RIGHT]);
511 JS_ASSERT(jt->balance == rh - lh);
512 return 1 + JS_MAX(lh, rh);
514 #endif
516 static JSBool
517 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
518 ptrdiff_t off)
520 AddJumpTargetArgs args;
522 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
523 ReportStatementTooLarge(cx, cg);
524 return JS_FALSE;
527 args.cx = cx;
528 args.cg = cg;
529 args.offset = sd->top + off;
530 args.node = NULL;
531 AddJumpTarget(&args, &cg->jumpTargets);
532 if (!args.node)
533 return JS_FALSE;
535 #ifdef DEBUG_brendan
536 AVLCheck(cg->jumpTargets);
537 #endif
539 SD_SET_TARGET(sd, args.node);
540 return JS_TRUE;
543 #define SPANDEPS_MIN 256
544 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
545 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
547 static JSBool
548 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
549 ptrdiff_t off)
551 uintN index;
552 JSSpanDep *sdbase, *sd;
553 size_t size;
555 index = cg->numSpanDeps;
556 if (index + 1 == 0) {
557 ReportStatementTooLarge(cx, cg);
558 return JS_FALSE;
561 if ((index & (index - 1)) == 0 &&
562 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
563 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
564 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
565 if (!sdbase)
566 return JS_FALSE;
567 cg->spanDeps = sdbase;
570 cg->numSpanDeps = index + 1;
571 sd = cg->spanDeps + index;
572 sd->top = pc - CG_BASE(cg);
573 sd->offset = sd->before = pc2 - CG_BASE(cg);
575 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
576 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
577 if (off != 0) {
578 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
579 if (off > BPDELTA_MAX) {
580 ReportStatementTooLarge(cx, cg);
581 return JS_FALSE;
584 SD_SET_BPDELTA(sd, off);
585 } else if (off == 0) {
586 /* Jump offset will be patched directly, without backpatch chaining. */
587 SD_SET_TARGET(sd, 0);
588 } else {
589 /* The jump offset in off is non-zero, therefore it's already known. */
590 if (!SetSpanDepTarget(cx, cg, sd, off))
591 return JS_FALSE;
594 if (index > SPANDEP_INDEX_MAX)
595 index = SPANDEP_INDEX_HUGE;
596 SET_SPANDEP_INDEX(pc2, index);
597 return JS_TRUE;
600 static jsbytecode *
601 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
603 JSOp op;
604 jsbytecode *pc2;
605 ptrdiff_t off;
606 jsint low, high;
607 uintN njumps, indexlen;
609 op = (JSOp) *pc;
610 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
611 pc2 = pc;
612 off = GET_JUMP_OFFSET(pc2);
613 if (!AddSpanDep(cx, cg, pc, pc2, off))
614 return NULL;
615 pc2 += JUMP_OFFSET_LEN;
616 if (op == JSOP_TABLESWITCH) {
617 low = GET_JUMP_OFFSET(pc2);
618 pc2 += JUMP_OFFSET_LEN;
619 high = GET_JUMP_OFFSET(pc2);
620 pc2 += JUMP_OFFSET_LEN;
621 njumps = (uintN) (high - low + 1);
622 indexlen = 0;
623 } else {
624 njumps = GET_UINT16(pc2);
625 pc2 += UINT16_LEN;
626 indexlen = INDEX_LEN;
628 while (njumps) {
629 --njumps;
630 pc2 += indexlen;
631 off = GET_JUMP_OFFSET(pc2);
632 if (!AddSpanDep(cx, cg, pc, pc2, off))
633 return NULL;
634 pc2 += JUMP_OFFSET_LEN;
636 return 1 + pc2;
639 static JSBool
640 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
642 jsbytecode *pc, *end;
643 JSOp op;
644 const JSCodeSpec *cs;
645 ptrdiff_t off;
647 pc = CG_BASE(cg) + cg->spanDepTodo;
648 end = CG_NEXT(cg);
649 while (pc != end) {
650 JS_ASSERT(pc < end);
651 op = (JSOp)*pc;
652 cs = &js_CodeSpec[op];
654 switch (JOF_TYPE(cs->format)) {
655 case JOF_TABLESWITCH:
656 case JOF_LOOKUPSWITCH:
657 pc = AddSwitchSpanDeps(cx, cg, pc);
658 if (!pc)
659 return JS_FALSE;
660 break;
662 case JOF_JUMP:
663 off = GET_JUMP_OFFSET(pc);
664 if (!AddSpanDep(cx, cg, pc, pc, off))
665 return JS_FALSE;
666 /* FALL THROUGH */
667 default:
668 pc += cs->length;
669 break;
673 return JS_TRUE;
676 static JSSpanDep *
677 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
679 uintN index;
680 ptrdiff_t offset;
681 int lo, hi, mid;
682 JSSpanDep *sd;
684 index = GET_SPANDEP_INDEX(pc);
685 if (index != SPANDEP_INDEX_HUGE)
686 return cg->spanDeps + index;
688 offset = pc - CG_BASE(cg);
689 lo = 0;
690 hi = cg->numSpanDeps - 1;
691 while (lo <= hi) {
692 mid = (lo + hi) / 2;
693 sd = cg->spanDeps + mid;
694 if (sd->before == offset)
695 return sd;
696 if (sd->before < offset)
697 lo = mid + 1;
698 else
699 hi = mid - 1;
702 JS_ASSERT(0);
703 return NULL;
706 static JSBool
707 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
708 ptrdiff_t delta)
710 JSSpanDep *sd;
712 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
713 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
714 SET_JUMP_OFFSET(pc, delta);
715 return JS_TRUE;
718 if (delta > BPDELTA_MAX) {
719 ReportStatementTooLarge(cx, cg);
720 return JS_FALSE;
723 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
724 return JS_FALSE;
726 sd = GetSpanDep(cg, pc);
727 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
728 SD_SET_BPDELTA(sd, delta);
729 return JS_TRUE;
732 static void
733 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
735 if (jt->offset > pivot) {
736 jt->offset += delta;
737 if (jt->kids[JT_LEFT])
738 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
740 if (jt->kids[JT_RIGHT])
741 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
744 static JSSpanDep *
745 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
746 JSSpanDep *guard)
748 int num, hi, mid;
749 JSSpanDep *sdbase, *sd;
751 num = cg->numSpanDeps;
752 JS_ASSERT(num > 0);
753 hi = num - 1;
754 sdbase = cg->spanDeps;
755 while (lo <= hi) {
756 mid = (lo + hi) / 2;
757 sd = sdbase + mid;
758 if (sd->before == offset)
759 return sd;
760 if (sd->before < offset)
761 lo = mid + 1;
762 else
763 hi = mid - 1;
765 if (lo == num)
766 return guard;
767 sd = sdbase + lo;
768 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
769 return sd;
772 static void
773 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
775 if (jt->kids[JT_LEFT])
776 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
777 if (jt->kids[JT_RIGHT])
778 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
779 jt->kids[JT_LEFT] = cg->jtFreeList;
780 cg->jtFreeList = jt;
783 static JSBool
784 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
786 jsbytecode *pc, *oldpc, *base, *limit, *next;
787 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
788 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
789 JSBool done;
790 JSOp op;
791 uint32 type;
792 size_t size, incr;
793 jssrcnote *sn, *snlimit;
794 JSSrcNoteSpec *spec;
795 uintN i, n, noteIndex;
796 JSTryNode *tryNode;
797 #ifdef DEBUG_brendan
798 int passes = 0;
799 #endif
801 base = CG_BASE(cg);
802 sdbase = cg->spanDeps;
803 sdlimit = sdbase + cg->numSpanDeps;
804 offset = CG_OFFSET(cg);
805 growth = 0;
807 do {
808 done = JS_TRUE;
809 delta = 0;
810 top = pivot = -1;
811 sdtop = NULL;
812 pc = NULL;
813 op = JSOP_NOP;
814 type = 0;
815 #ifdef DEBUG_brendan
816 passes++;
817 #endif
819 for (sd = sdbase; sd < sdlimit; sd++) {
820 JS_ASSERT(JT_HAS_TAG(sd->target));
821 sd->offset += delta;
823 if (sd->top != top) {
824 sdtop = sd;
825 top = sd->top;
826 JS_ASSERT(top == sd->before);
827 pivot = sd->offset;
828 pc = base + top;
829 op = (JSOp) *pc;
830 type = JOF_OPTYPE(op);
831 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
833 * We already extended all the jump offset operands for
834 * the opcode at sd->top. Jumps and branches have only
835 * one jump offset operand, but switches have many, all
836 * of which are adjacent in cg->spanDeps.
838 continue;
841 JS_ASSERT(type == JOF_JUMP ||
842 type == JOF_TABLESWITCH ||
843 type == JOF_LOOKUPSWITCH);
846 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
847 span = SD_SPAN(sd, pivot);
848 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
849 ptrdiff_t deltaFromTop = 0;
851 done = JS_FALSE;
853 switch (op) {
854 case JSOP_GOTO: op = JSOP_GOTOX; break;
855 case JSOP_IFEQ: op = JSOP_IFEQX; break;
856 case JSOP_IFNE: op = JSOP_IFNEX; break;
857 case JSOP_OR: op = JSOP_ORX; break;
858 case JSOP_AND: op = JSOP_ANDX; break;
859 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
860 case JSOP_CASE: op = JSOP_CASEX; break;
861 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
862 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
863 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
864 default:
865 ReportStatementTooLarge(cx, cg);
866 return JS_FALSE;
868 *pc = (jsbytecode) op;
870 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
871 if (sd2 <= sd) {
873 * sd2->offset already includes delta as it stood
874 * before we entered this loop, but it must also
875 * include the delta relative to top due to all the
876 * extended jump offset immediates for the opcode
877 * starting at top, which we extend in this loop.
879 * If there is only one extended jump offset, then
880 * sd2->offset won't change and this for loop will
881 * iterate once only.
883 sd2->offset += deltaFromTop;
884 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
885 } else {
887 * sd2 comes after sd, and won't be revisited by
888 * the outer for loop, so we have to increase its
889 * offset by delta, not merely by deltaFromTop.
891 sd2->offset += delta;
894 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
895 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
896 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
898 sd = sd2 - 1;
903 growth += delta;
904 } while (!done);
906 if (growth) {
907 #ifdef DEBUG_brendan
908 TokenStream *ts = &cg->parser->tokenStream;
910 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
911 ts->filename ? ts->filename : "stdin", cg->firstLine,
912 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
913 passes, offset + growth, offset, growth);
914 #endif
917 * Ensure that we have room for the extended jumps, but don't round up
918 * to a power of two -- we're done generating code, so we cut to fit.
920 limit = CG_LIMIT(cg);
921 length = offset + growth;
922 next = base + length;
923 if (next > limit) {
924 JS_ASSERT(length > BYTECODE_CHUNK);
925 size = BYTECODE_SIZE(limit - base);
926 incr = BYTECODE_SIZE(length) - size;
927 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
928 if (!base) {
929 js_ReportOutOfScriptQuota(cx);
930 return JS_FALSE;
932 CG_BASE(cg) = base;
933 CG_LIMIT(cg) = next = base + length;
935 CG_NEXT(cg) = next;
938 * Set up a fake span dependency record to guard the end of the code
939 * being generated. This guard record is returned as a fencepost by
940 * FindNearestSpanDep if there is no real spandep at or above a given
941 * unextended code offset.
943 guard.top = -1;
944 guard.offset = offset + growth;
945 guard.before = offset;
946 guard.target = NULL;
950 * Now work backwards through the span dependencies, copying chunks of
951 * bytecode between each extended jump toward the end of the grown code
952 * space, and restoring immediate offset operands for all jump bytecodes.
953 * The first chunk of bytecodes, starting at base and ending at the first
954 * extended jump offset (NB: this chunk includes the operation bytecode
955 * just before that immediate jump offset), doesn't need to be copied.
957 JS_ASSERT(sd == sdlimit);
958 top = -1;
959 while (--sd >= sdbase) {
960 if (sd->top != top) {
961 top = sd->top;
962 op = (JSOp) base[top];
963 type = JOF_OPTYPE(op);
965 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
966 continue;
967 sd2++;
968 pivot = sd2->offset;
969 JS_ASSERT(top == sd2->before);
972 oldpc = base + sd->before;
973 span = SD_SPAN(sd, pivot);
976 * If this jump didn't need to be extended, restore its span immediate
977 * offset operand now, overwriting the index of sd within cg->spanDeps
978 * that was stored temporarily after *pc when BuildSpanDepTable ran.
980 * Note that span might fit in 16 bits even for an extended jump op,
981 * if the op has multiple span operands, not all of which overflowed
982 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
983 * range for a short jump, but others are not).
985 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
986 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
987 SET_JUMP_OFFSET(oldpc, span);
988 continue;
992 * Set up parameters needed to copy the next run of bytecode starting
993 * at offset (which is a cursor into the unextended, original bytecode
994 * vector), down to sd->before (a cursor of the same scale as offset,
995 * it's the index of the original jump pc). Reuse delta to count the
996 * nominal number of bytes to copy.
998 pc = base + sd->offset;
999 delta = offset - sd->before;
1000 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
1003 * Don't bother copying the jump offset we're about to reset, but do
1004 * copy the bytecode at oldpc (which comes just before its immediate
1005 * jump offset operand), on the next iteration through the loop, by
1006 * including it in offset's new value.
1008 offset = sd->before + 1;
1009 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
1010 if (size) {
1011 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1012 oldpc + 1 + JUMP_OFFSET_LEN,
1013 size);
1016 SET_JUMPX_OFFSET(pc, span);
1019 if (growth) {
1021 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1022 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1023 * at each sd that moved. The future may bring different offset sizes
1024 * for span-dependent instruction operands. However, we fix only main
1025 * notes here, not prolog notes -- we know that prolog opcodes are not
1026 * span-dependent, and aren't likely ever to be.
1028 offset = growth = 0;
1029 sd = sdbase;
1030 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1031 sn < snlimit;
1032 sn = SN_NEXT(sn)) {
1034 * Recall that the offset of a given note includes its delta, and
1035 * tells the offset of the annotated bytecode from the main entry
1036 * point of the script.
1038 offset += SN_DELTA(sn);
1039 while (sd < sdlimit && sd->before < offset) {
1041 * To compute the delta to add to sn, we need to look at the
1042 * spandep after sd, whose offset - (before + growth) tells by
1043 * how many bytes sd's instruction grew.
1045 sd2 = sd + 1;
1046 if (sd2 == sdlimit)
1047 sd2 = &guard;
1048 delta = sd2->offset - (sd2->before + growth);
1049 if (delta > 0) {
1050 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1051 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1052 if (!sn)
1053 return JS_FALSE;
1054 snlimit = cg->main.notes + cg->main.noteCount;
1055 growth += delta;
1057 sd++;
1061 * If sn has span-dependent offset operands, check whether each
1062 * covers further span-dependencies, and increase those operands
1063 * accordingly. Some source notes measure offset not from the
1064 * annotated pc, but from that pc plus some small bias. NB: we
1065 * assume that spec->offsetBias can't itself span span-dependent
1066 * instructions!
1068 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1069 if (spec->isSpanDep) {
1070 pivot = offset + spec->offsetBias;
1071 n = spec->arity;
1072 for (i = 0; i < n; i++) {
1073 span = js_GetSrcNoteOffset(sn, i);
1074 if (span == 0)
1075 continue;
1076 target = pivot + span * spec->isSpanDep;
1077 sd2 = FindNearestSpanDep(cg, target,
1078 (target >= pivot)
1079 ? sd - sdbase
1080 : 0,
1081 &guard);
1084 * Increase target by sd2's before-vs-after offset delta,
1085 * which is absolute (i.e., relative to start of script,
1086 * as is target). Recompute the span by subtracting its
1087 * adjusted pivot from target.
1089 target += sd2->offset - sd2->before;
1090 span = target - (pivot + growth);
1091 span *= spec->isSpanDep;
1092 noteIndex = sn - cg->main.notes;
1093 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1094 return JS_FALSE;
1095 sn = cg->main.notes + noteIndex;
1096 snlimit = cg->main.notes + cg->main.noteCount;
1100 cg->main.lastNoteOffset += growth;
1103 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1104 * not clear how we can beat that).
1106 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1108 * First, look for the nearest span dependency at/above tn->start.
1109 * There may not be any such spandep, in which case the guard will
1110 * be returned.
1112 offset = tryNode->note.start;
1113 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1114 delta = sd->offset - sd->before;
1115 tryNode->note.start = offset + delta;
1118 * Next, find the nearest spandep at/above tn->start + tn->length.
1119 * Use its delta minus tn->start's delta to increase tn->length.
1121 length = tryNode->note.length;
1122 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1123 if (sd2 != sd) {
1124 tryNode->note.length =
1125 length + sd2->offset - sd2->before - delta;
1130 #ifdef DEBUG_brendan
1132 uintN bigspans = 0;
1133 top = -1;
1134 for (sd = sdbase; sd < sdlimit; sd++) {
1135 offset = sd->offset;
1137 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1138 if (sd->top != top) {
1139 JS_ASSERT(top == -1 ||
1140 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1141 bigspans != 0);
1142 bigspans = 0;
1143 top = sd->top;
1144 JS_ASSERT(top == sd->before);
1145 op = (JSOp) base[offset];
1146 type = JOF_OPTYPE(op);
1147 JS_ASSERT(type == JOF_JUMP ||
1148 type == JOF_JUMPX ||
1149 type == JOF_TABLESWITCH ||
1150 type == JOF_TABLESWITCHX ||
1151 type == JOF_LOOKUPSWITCH ||
1152 type == JOF_LOOKUPSWITCHX);
1153 pivot = offset;
1156 pc = base + offset;
1157 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1158 span = GET_JUMPX_OFFSET(pc);
1159 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1160 bigspans++;
1161 } else {
1162 JS_ASSERT(type == JOF_TABLESWITCHX ||
1163 type == JOF_LOOKUPSWITCHX);
1165 } else {
1166 span = GET_JUMP_OFFSET(pc);
1168 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1170 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1172 #endif
1175 * Reset so we optimize at most once -- cg may be used for further code
1176 * generation of successive, independent, top-level statements. No jump
1177 * can span top-level statements, because JS lacks goto.
1179 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1180 cx->free(cg->spanDeps);
1181 cg->spanDeps = NULL;
1182 FreeJumpTargets(cg, cg->jumpTargets);
1183 cg->jumpTargets = NULL;
1184 cg->numSpanDeps = cg->numJumpTargets = 0;
1185 cg->spanDepTodo = CG_OFFSET(cg);
1186 return JS_TRUE;
1189 static ptrdiff_t
1190 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1192 JSBool extend;
1193 ptrdiff_t jmp;
1194 jsbytecode *pc;
1196 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1197 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1198 return -1;
1200 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1201 if (jmp >= 0 && (extend || cg->spanDeps)) {
1202 pc = CG_CODE(cg, jmp);
1203 if (!AddSpanDep(cx, cg, pc, pc, off))
1204 return -1;
1206 return jmp;
1209 static ptrdiff_t
1210 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1212 JSSpanDep *sd;
1213 JSJumpTarget *jt;
1214 ptrdiff_t top;
1216 if (!cg->spanDeps)
1217 return GET_JUMP_OFFSET(pc);
1219 sd = GetSpanDep(cg, pc);
1220 jt = sd->target;
1221 if (!JT_HAS_TAG(jt))
1222 return JT_TO_BPDELTA(jt);
1224 top = sd->top;
1225 while (--sd >= cg->spanDeps && sd->top == top)
1226 continue;
1227 sd++;
1228 return JT_CLR_TAG(jt)->offset - sd->offset;
1231 JSBool
1232 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1233 ptrdiff_t off)
1235 if (!cg->spanDeps) {
1236 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1237 SET_JUMP_OFFSET(pc, off);
1238 return JS_TRUE;
1241 if (!BuildSpanDepTable(cx, cg))
1242 return JS_FALSE;
1245 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1248 bool
1249 JSTreeContext::inStatement(JSStmtType type)
1251 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1252 if (stmt->type == type)
1253 return true;
1255 return false;
1258 bool
1259 JSTreeContext::ensureSharpSlots()
1261 #if JS_HAS_SHARP_VARS
1262 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1264 if (sharpSlotBase >= 0) {
1265 JS_ASSERT(flags & TCF_HAS_SHARPS);
1266 return true;
1269 JS_ASSERT(!(flags & TCF_HAS_SHARPS));
1270 if (inFunction()) {
1271 JSContext *cx = parser->context;
1272 JSAtom *sharpArrayAtom = js_Atomize(cx, "#array", 6, 0);
1273 JSAtom *sharpDepthAtom = js_Atomize(cx, "#depth", 6, 0);
1274 if (!sharpArrayAtom || !sharpDepthAtom)
1275 return false;
1277 sharpSlotBase = fun->u.i.nvars;
1278 if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
1279 return false;
1280 if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
1281 return false;
1282 } else {
1284 * Compiler::compileScript will rebase immediate operands indexing
1285 * the sharp slots to come at the end of the global script's |nfixed|
1286 * slots storage, after gvars and regexps.
1288 sharpSlotBase = 0;
1290 flags |= TCF_HAS_SHARPS;
1291 #endif
1292 return true;
1295 bool
1296 JSTreeContext::skipSpansGenerator(unsigned skip)
1298 JSTreeContext *tc = this;
1299 for (unsigned i = 0; i < skip; ++i, tc = tc->parent) {
1300 if (!tc)
1301 return false;
1302 if (tc->flags & TCF_FUN_IS_GENERATOR)
1303 return true;
1305 return false;
1308 void
1309 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1310 ptrdiff_t top)
1312 stmt->type = type;
1313 stmt->flags = 0;
1314 stmt->blockid = tc->blockid();
1315 SET_STATEMENT_TOP(stmt, top);
1316 stmt->label = NULL;
1317 JS_ASSERT(!stmt->blockObj);
1318 stmt->down = tc->topStmt;
1319 tc->topStmt = stmt;
1320 if (STMT_LINKS_SCOPE(stmt)) {
1321 stmt->downScope = tc->topScopeStmt;
1322 tc->topScopeStmt = stmt;
1323 } else {
1324 stmt->downScope = NULL;
1328 void
1329 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1330 ptrdiff_t top)
1332 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1333 stmt->flags |= SIF_SCOPE;
1334 blockObj->setParent(tc->blockChain);
1335 stmt->downScope = tc->topScopeStmt;
1336 tc->topScopeStmt = stmt;
1337 tc->blockChain = blockObj;
1338 stmt->blockObj = blockObj;
1342 * Emit a backpatch op with offset pointing to the previous jump of this type,
1343 * so that we can walk back up the chain fixing up the op and jump offset.
1345 static ptrdiff_t
1346 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1348 ptrdiff_t offset, delta;
1350 offset = CG_OFFSET(cg);
1351 delta = offset - *lastp;
1352 *lastp = offset;
1353 JS_ASSERT(delta > 0);
1354 return EmitJump(cx, cg, op, delta);
1358 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1359 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1360 * NB: We use cx and cg from our caller's lexical environment, and return
1361 * false on error.
1363 #define EMIT_UINT16_IMM_OP(op, i) \
1364 JS_BEGIN_MACRO \
1365 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1366 return JS_FALSE; \
1367 JS_END_MACRO
1369 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1370 JS_BEGIN_MACRO \
1371 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1372 if (off_ < 0) \
1373 return JS_FALSE; \
1374 jsbytecode *pc_ = CG_CODE(cg, off_); \
1375 SET_UINT16(pc_, i); \
1376 pc_ += UINT16_LEN; \
1377 SET_UINT16(pc_, j); \
1378 JS_END_MACRO
1380 static JSBool
1381 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1383 JS_ASSERT(*npops != 0);
1384 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1385 return JS_FALSE;
1386 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1387 *npops = 0;
1388 return JS_TRUE;
1392 * Emit additional bytecode(s) for non-local jumps.
1394 static JSBool
1395 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1397 intN depth, npops;
1398 JSStmtInfo *stmt;
1401 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1402 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1403 * end of a with statement, so we save cg->stackDepth here and restore it
1404 * just before a successful return.
1406 depth = cg->stackDepth;
1407 npops = 0;
1409 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1411 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1412 switch (stmt->type) {
1413 case STMT_FINALLY:
1414 FLUSH_POPS();
1415 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1416 return JS_FALSE;
1417 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1418 return JS_FALSE;
1419 break;
1421 case STMT_WITH:
1422 /* There's a With object on the stack that we need to pop. */
1423 FLUSH_POPS();
1424 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1425 return JS_FALSE;
1426 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1427 return JS_FALSE;
1428 break;
1430 case STMT_FOR_IN_LOOP:
1432 * The iterator and the object being iterated need to be popped.
1434 FLUSH_POPS();
1435 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1436 return JS_FALSE;
1437 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1438 return JS_FALSE;
1439 break;
1441 case STMT_SUBROUTINE:
1443 * There's a [exception or hole, retsub pc-index] pair on the
1444 * stack that we need to pop.
1446 npops += 2;
1447 break;
1449 default:;
1452 if (stmt->flags & SIF_SCOPE) {
1453 uintN i;
1455 /* There is a Block object with locals on the stack to pop. */
1456 FLUSH_POPS();
1457 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1458 return JS_FALSE;
1459 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1460 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1464 FLUSH_POPS();
1465 cg->stackDepth = depth;
1466 return JS_TRUE;
1468 #undef FLUSH_POPS
1471 static ptrdiff_t
1472 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1473 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1475 intN index;
1477 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1478 return -1;
1480 if (label)
1481 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1482 else if (noteType != SRC_NULL)
1483 index = js_NewSrcNote(cx, cg, noteType);
1484 else
1485 index = 0;
1486 if (index < 0)
1487 return -1;
1489 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1492 static JSBool
1493 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1494 jsbytecode *target, jsbytecode op)
1496 jsbytecode *pc, *stop;
1497 ptrdiff_t delta, span;
1499 pc = CG_CODE(cg, last);
1500 stop = CG_CODE(cg, -1);
1501 while (pc != stop) {
1502 delta = GetJumpOffset(cg, pc);
1503 span = target - pc;
1504 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1507 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1508 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1509 * and need to see the JSOP_BACKPATCH* op at *pc).
1511 *pc = op;
1512 pc -= delta;
1514 return JS_TRUE;
1517 void
1518 js_PopStatement(JSTreeContext *tc)
1520 JSStmtInfo *stmt;
1522 stmt = tc->topStmt;
1523 tc->topStmt = stmt->down;
1524 if (STMT_LINKS_SCOPE(stmt)) {
1525 tc->topScopeStmt = stmt->downScope;
1526 if (stmt->flags & SIF_SCOPE) {
1527 tc->blockChain = stmt->blockObj->getParent();
1528 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1533 JSBool
1534 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1536 JSStmtInfo *stmt;
1538 stmt = cg->topStmt;
1539 if (!STMT_IS_TRYING(stmt) &&
1540 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1541 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1542 JSOP_GOTO))) {
1543 return JS_FALSE;
1545 js_PopStatement(cg);
1546 return JS_TRUE;
1549 JSBool
1550 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1551 JSParseNode *pn)
1553 /* XXX just do numbers for now */
1554 if (pn->pn_type == TOK_NUMBER) {
1555 if (!cg->constMap.put(atom, NumberValue(pn->pn_dval)))
1556 return JS_FALSE;
1558 return JS_TRUE;
1561 JSStmtInfo *
1562 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1564 JSObject *obj;
1565 JSScope *scope;
1566 JSScopeProperty *sprop;
1568 if (!stmt)
1569 stmt = tc->topScopeStmt;
1570 for (; stmt; stmt = stmt->downScope) {
1571 if (stmt->type == STMT_WITH)
1572 break;
1574 /* Skip "maybe scope" statements that don't contain let bindings. */
1575 if (!(stmt->flags & SIF_SCOPE))
1576 continue;
1578 obj = stmt->blockObj;
1579 JS_ASSERT(obj->getClass() == &js_BlockClass);
1580 scope = obj->scope();
1581 sprop = scope->lookup(ATOM_TO_JSID(atom));
1582 if (sprop) {
1583 JS_ASSERT(sprop->hasShortID());
1585 if (slotp) {
1586 JS_ASSERT(obj->fslots[JSSLOT_BLOCK_DEPTH].isInt32());
1587 *slotp = obj->fslots[JSSLOT_BLOCK_DEPTH].toInt32() +
1588 sprop->shortid;
1590 return stmt;
1594 if (slotp)
1595 *slotp = -1;
1596 return stmt;
1600 * The function sets vp to NO_CONSTANT when the atom does not corresponds to a
1601 * name defining a constant.
1603 static JSBool
1604 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1605 Value *constp)
1607 JSStmtInfo *stmt;
1608 JSObject *obj;
1611 * Chase down the cg stack, but only until we reach the outermost cg.
1612 * This enables propagating consts from top-level into switch cases in a
1613 * function compiled along with the top-level script.
1615 constp->setMagic(JS_NO_CONSTANT);
1616 do {
1617 if (cg->inFunction() || cg->compileAndGo()) {
1618 /* XXX this will need revising if 'const' becomes block-scoped. */
1619 stmt = js_LexicalLookup(cg, atom, NULL);
1620 if (stmt)
1621 return JS_TRUE;
1623 if (JSCodeGenerator::ConstMap::Ptr p = cg->constMap.lookup(atom)) {
1624 JS_ASSERT(!p->value.isMagic(JS_NO_CONSTANT));
1625 *constp = p->value;
1626 return JS_TRUE;
1630 * Try looking in the variable object for a direct property that
1631 * is readonly and permanent. We know such a property can't be
1632 * shadowed by another property on obj's prototype chain, or a
1633 * with object or catch variable; nor can prop's value be changed,
1634 * nor can prop be deleted.
1636 if (cg->inFunction()) {
1637 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1638 break;
1639 } else {
1640 JS_ASSERT(cg->compileAndGo());
1641 obj = cg->scopeChain;
1643 JS_LOCK_OBJ(cx, obj);
1644 JSScope *scope = obj->scope();
1645 JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(atom));
1646 if (sprop) {
1648 * We're compiling code that will be executed immediately,
1649 * not re-executed against a different scope chain and/or
1650 * variable object. Therefore we can get constant values
1651 * from our variable object here.
1653 if (!sprop->writable() && !sprop->configurable() &&
1654 sprop->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop, scope)) {
1655 *constp = obj->lockedGetSlot(sprop->slot);
1658 JS_UNLOCK_SCOPE(cx, scope);
1660 if (sprop)
1661 break;
1664 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1665 return JS_TRUE;
1669 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1670 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1671 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1672 * after the main bytecode sequence.
1674 static JSOp
1675 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1677 uintN indexBase;
1680 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1681 * for big indexes.
1683 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1684 JS_STATIC_ASSERT(INDEX_LIMIT >=
1685 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1687 if (index < JS_BIT(16))
1688 return JSOP_NOP;
1689 indexBase = index >> 16;
1690 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1691 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1692 return JSOP_FALSE;
1693 return JSOP_RESETBASE0;
1696 if (index >= INDEX_LIMIT) {
1697 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1698 JSMSG_TOO_MANY_LITERALS);
1699 return JSOP_FALSE;
1702 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1703 return JSOP_FALSE;
1704 return JSOP_RESETBASE;
1708 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1709 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1710 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1711 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1712 * with single-byte JSOP_INDEXBASE[123] codes.
1714 * Such prefixing currently requires a suffix to restore the "zero segment"
1715 * register setting, but this could be optimized further.
1717 static JSBool
1718 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1720 JSOp bigSuffix;
1722 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1723 if (bigSuffix == JSOP_FALSE)
1724 return JS_FALSE;
1725 EMIT_UINT16_IMM_OP(op, index);
1726 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1730 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1731 * caller's lexical environment, and embedding a false return on error.
1733 #define EMIT_INDEX_OP(op, index) \
1734 JS_BEGIN_MACRO \
1735 if (!EmitIndexOp(cx, op, index, cg)) \
1736 return JS_FALSE; \
1737 JS_END_MACRO
1739 static JSBool
1740 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1742 JSAtomListElement *ale;
1744 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1745 if (op == JSOP_GETPROP &&
1746 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1747 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1749 ale = cg->atomList.add(cg->parser, pn->pn_atom);
1750 if (!ale)
1751 return JS_FALSE;
1752 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1755 static JSBool
1756 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1757 JSCodeGenerator *cg)
1759 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1760 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1764 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1765 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1766 * other parts of the code there's no necessary relationship between the two.
1767 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1768 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1770 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1771 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1773 static JSBool
1774 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1775 JSCodeGenerator *cg)
1777 JSOp bigSuffix;
1778 ptrdiff_t off;
1779 jsbytecode *pc;
1781 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1782 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1783 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1784 if (bigSuffix == JSOP_FALSE)
1785 return JS_FALSE;
1787 /* Emit [op, slot, index]. */
1788 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1789 if (off < 0)
1790 return JS_FALSE;
1791 pc = CG_CODE(cg, off);
1792 SET_UINT16(pc, slot);
1793 pc += 2;
1794 SET_INDEX(pc, index);
1795 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1799 * Adjust the slot for a block local to account for the number of variables
1800 * that share the same index space with locals. Due to the incremental code
1801 * generation for top-level script, we do the adjustment via code patching in
1802 * Compiler::compileScript; see comments there.
1804 * The function returns -1 on failures.
1806 static jsint
1807 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1809 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1810 if (cg->inFunction()) {
1811 slot += cg->fun->u.i.nvars;
1812 if ((uintN) slot >= SLOTNO_LIMIT) {
1813 ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
1814 slot = -1;
1817 return slot;
1820 static bool
1821 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1823 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1824 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1825 return false;
1827 JSObject *blockObj = pn->pn_objbox->object;
1828 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1829 if (depth < 0)
1830 return false;
1832 uintN base = JSSLOT_FREE(&js_BlockClass);
1833 for (uintN slot = base, limit = base + OBJ_BLOCK_COUNT(cx, blockObj); slot < limit; slot++) {
1834 const Value &v = blockObj->getSlot(slot);
1836 /* Beware the empty destructuring dummy. */
1837 if (v.isUndefined()) {
1838 JS_ASSERT(slot + 1 <= limit);
1839 continue;
1842 JSDefinition *dn = (JSDefinition *) v.toPrivate();
1843 JS_ASSERT(dn->pn_defn);
1844 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1845 dn->pn_cookie.set(dn->pn_cookie.level(), dn->frameSlot() + depth);
1846 #ifdef DEBUG
1847 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1848 JS_ASSERT(pnu->pn_lexdef == dn);
1849 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1850 JS_ASSERT(pnu->pn_cookie.isFree());
1852 #endif
1855 blockObj->scope()->freeslot = base;
1856 return blockObj->growSlots(cx, base);
1860 * When eval is called from a function, the eval code or function code it
1861 * compiles may reference upvars that live in the eval-calling function. The
1862 * eval-invoked compiler does not have explicit definitions for these upvars
1863 * and we do not attempt to create them a-priori (by inspecting the function's
1864 * args and vars) -- we could, but we'd take an avoidable penalty for each
1865 * function local not referenced by any upvar. Instead, we map such upvars
1866 * lazily, growing upvarMap.vector by powers of two.
1868 * This function knows that it is called with pn pointing to a PN_NAME-arity
1869 * node, and cg->parser->callerFrame having a non-null fun member, and the
1870 * static level of cg at least one greater than the eval-calling function's
1871 * static level.
1873 static bool
1874 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1876 JSContext *cx = cg->parser->context;
1877 JSFunction *fun = cg->parser->callerFrame->fun;
1878 uintN upvarLevel = fun->u.i.script->staticLevel;
1880 JSFunctionBox *funbox = cg->funbox;
1881 if (funbox) {
1883 * Treat top-level function definitions as escaping (i.e., as funargs),
1884 * required since we compile each such top level function or statement
1885 * and throw away the AST, so we can't yet see all funarg uses of this
1886 * function being compiled (cg->funbox->object). See bug 493177.
1888 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1889 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1890 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1891 ((JSFunction *) funbox->object)->atom);
1892 return true;
1895 while (funbox->level >= upvarLevel) {
1896 if (funbox->node->pn_dflags & PND_FUNARG)
1897 return true;
1898 funbox = funbox->parent;
1899 if (!funbox)
1900 break;
1904 JSAtom *atom = pn->pn_atom;
1906 uintN index;
1907 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1908 if (localKind == JSLOCAL_NONE)
1909 return true;
1911 JS_ASSERT(cg->staticLevel > upvarLevel);
1912 if (cg->staticLevel >= UpvarCookie::UPVAR_LEVEL_LIMIT)
1913 return true;
1915 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1916 if (!ale) {
1917 if (cg->inFunction() &&
1918 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1919 return false;
1922 ale = cg->upvarList.add(cg->parser, atom);
1923 if (!ale)
1924 return false;
1925 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1927 UpvarCookie *vector = cg->upvarMap.vector;
1928 uint32 length = cg->upvarMap.length;
1930 JS_ASSERT(ALE_INDEX(ale) <= length);
1931 if (ALE_INDEX(ale) == length) {
1932 length = 2 * JS_MAX(2, length);
1933 vector = reinterpret_cast<UpvarCookie *>(cx->realloc(vector, length * sizeof *vector));
1934 if (!vector)
1935 return false;
1936 cg->upvarMap.vector = vector;
1937 cg->upvarMap.length = length;
1940 if (localKind != JSLOCAL_ARG)
1941 index += fun->nargs;
1942 JS_ASSERT(index < JS_BIT(16));
1944 uintN skip = cg->staticLevel - upvarLevel;
1945 vector[ALE_INDEX(ale)].set(skip, index);
1948 pn->pn_op = JSOP_GETUPVAR;
1949 pn->pn_cookie.set(cg->staticLevel, ALE_INDEX(ale));
1950 pn->pn_dflags |= PND_BOUND;
1951 return true;
1955 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1956 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1957 * It returns false on error, true on success.
1959 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1960 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1961 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1962 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1963 * pn->pn_op was modified, if this function finds an argument or local variable
1964 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1965 * successful return.
1967 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1968 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1969 * in js_EmitTree.
1971 static JSBool
1972 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1974 JSDefinition *dn;
1975 JSOp op;
1976 JSAtom *atom;
1977 JSDefinition::Kind dn_kind;
1978 JSAtomListElement *ale;
1979 uintN index;
1981 JS_ASSERT(pn->pn_type == TOK_NAME);
1983 /* Idempotency tests come first, since we may be called more than once. */
1984 if (pn->pn_dflags & PND_BOUND)
1985 return JS_TRUE;
1987 /* No cookie initialized for these two, they're pre-bound by definition. */
1988 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
1991 * The parser linked all uses (including forward references) to their
1992 * definitions, unless a with statement or direct eval intervened.
1994 if (pn->pn_used) {
1995 JS_ASSERT(pn->pn_cookie.isFree());
1996 dn = pn->pn_lexdef;
1997 JS_ASSERT(dn->pn_defn);
1998 if (pn->isDeoptimized())
1999 return JS_TRUE;
2000 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
2001 } else {
2002 if (!pn->pn_defn)
2003 return JS_TRUE;
2004 dn = (JSDefinition *) pn;
2007 op = PN_OP(pn);
2008 if (op == JSOP_NOP)
2009 return JS_TRUE;
2011 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2012 atom = pn->pn_atom;
2013 UpvarCookie cookie = dn->pn_cookie;
2014 dn_kind = dn->kind();
2017 * Turn attempts to mutate const-declared bindings into get ops (for
2018 * pre-increment and pre-decrement ops, our caller will have to emit
2019 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2021 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2022 * bindings visible to the compiler are permanent in JS unless the
2023 * declaration originates in eval code. We detect eval code by testing
2024 * cg->parser->callerFrame, which is set only by eval or a debugger
2025 * equivalent.
2027 * Note that this callerFrame non-null test must be qualified by testing
2028 * !cg->funbox to exclude function code nested in eval code, which is not
2029 * subject to the deletable binding exception.
2031 switch (op) {
2032 case JSOP_NAME:
2033 case JSOP_SETCONST:
2034 break;
2035 case JSOP_DELNAME:
2036 if (dn_kind != JSDefinition::UNKNOWN) {
2037 if (cg->parser->callerFrame && !cg->funbox)
2038 JS_ASSERT(cg->compileAndGo());
2039 else
2040 pn->pn_op = JSOP_FALSE;
2041 pn->pn_dflags |= PND_BOUND;
2042 return JS_TRUE;
2044 break;
2045 default:
2046 if (pn->isConst())
2047 pn->pn_op = op = JSOP_NAME;
2050 if (cookie.isFree()) {
2051 JSStackFrame *caller = cg->parser->callerFrame;
2052 if (caller) {
2053 JS_ASSERT(cg->compileAndGo());
2056 * Don't generate upvars on the left side of a for loop. See
2057 * bug 470758.
2059 if (cg->flags & TCF_IN_FOR_INIT)
2060 return JS_TRUE;
2062 JS_ASSERT(caller->script);
2063 if (!caller->fun)
2064 return JS_TRUE;
2067 * Make sure the variable object used by the compiler to initialize
2068 * parent links matches the caller's varobj. Compile-n-go compiler-
2069 * created function objects have the top-level cg's scopeChain set
2070 * as their parent by Parser::newFunction.
2072 JSObject *scopeobj = cg->inFunction()
2073 ? FUN_OBJECT(cg->fun)->getParent()
2074 : cg->scopeChain;
2075 if (scopeobj != cg->parser->callerVarObj)
2076 return JS_TRUE;
2079 * We are compiling eval or debug script inside a function frame
2080 * and the scope chain matches the function's variable object.
2081 * Optimize access to function's arguments and variable and the
2082 * arguments object.
2084 if (op != JSOP_NAME)
2085 return JS_TRUE;
2088 * Generator functions may be resumed from any call stack, which
2089 * defeats the display optimization to static link searching used
2090 * by JSOP_{GET,CALL}UPVAR.
2092 JSFunction *fun = cg->parser->callerFrame->fun;
2093 JS_ASSERT(cg->staticLevel >= fun->u.i.script->staticLevel);
2094 unsigned skip = cg->staticLevel - fun->u.i.script->staticLevel;
2095 if (cg->skipSpansGenerator(skip))
2096 return JS_TRUE;
2098 return MakeUpvarForEval(pn, cg);
2100 return JS_TRUE;
2103 if (dn->pn_dflags & PND_GVAR) {
2105 * If this is a global reference from within a function, leave pn_op as
2106 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2107 * only we could depend on the global frame's slots being valid for all
2108 * calls to the function, and if we could equate the atom index in the
2109 * function's atom map for every global name with its frame slot.
2111 if (cg->inFunction())
2112 return JS_TRUE;
2115 * We are optimizing global variables and there may be no pre-existing
2116 * global property named atom when this global script runs. If atom was
2117 * declared via const or var, optimize pn to access fp->vars using the
2118 * appropriate JSOP_*GVAR op.
2120 * FIXME: should be able to optimize global function access too.
2122 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2124 switch (op) {
2125 case JSOP_NAME: op = JSOP_GETGVAR; break;
2126 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2127 case JSOP_SETCONST: /* NB: no change */ break;
2128 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2129 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2130 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2131 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2132 case JSOP_FORNAME: /* NB: no change */ break;
2133 case JSOP_DELNAME: /* NB: no change */ break;
2134 default: JS_NOT_REACHED("gvar");
2136 pn->pn_op = op;
2137 pn->pn_cookie.set(cookie);
2138 pn->pn_dflags |= PND_BOUND;
2139 return JS_TRUE;
2142 uintN level = cookie.level();
2143 JS_ASSERT(cg->staticLevel >= level);
2146 * A JSDefinition witnessed as a declaration by the parser cannot be an
2147 * upvar, unless it is the degenerate kind of upvar selected above (in the
2148 * code before the PND_GVAR test) for the special case of compile-and-go
2149 * code generated from eval called from a function, where the eval code
2150 * uses local vars defined in the function. We detect this upvar-for-eval
2151 * case by checking dn's op.
2153 if (PN_OP(dn) == JSOP_GETUPVAR) {
2154 JS_ASSERT(cg->staticLevel >= level);
2155 if (op != JSOP_NAME)
2156 return JS_TRUE;
2158 #ifdef DEBUG
2159 JSStackFrame *caller = cg->parser->callerFrame;
2160 #endif
2161 JS_ASSERT(caller);
2162 JS_ASSERT(caller->script);
2164 JSTreeContext *tc = cg;
2165 while (tc->staticLevel != level)
2166 tc = tc->parent;
2167 JS_ASSERT(tc->compiling());
2169 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2170 JS_ASSERT(evalcg->compileAndGo());
2171 JS_ASSERT(caller->fun && cg->parser->callerVarObj == evalcg->scopeChain);
2174 * Don't generate upvars on the left side of a for loop. See
2175 * bug 470758 and bug 520513.
2177 if (evalcg->flags & TCF_IN_FOR_INIT)
2178 return JS_TRUE;
2180 if (cg->staticLevel == level) {
2181 pn->pn_op = JSOP_GETUPVAR;
2182 pn->pn_cookie = cookie;
2183 pn->pn_dflags |= PND_BOUND;
2184 return JS_TRUE;
2187 return MakeUpvarForEval(pn, cg);
2190 const uintN skip = cg->staticLevel - level;
2191 if (skip != 0) {
2192 JS_ASSERT(cg->inFunction());
2193 JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->lexdeps.lookup(atom));
2194 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2195 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2198 * If op is a mutating opcode, this upvar's lookup skips too many levels,
2199 * or the function is heavyweight, we fall back on JSOP_*NAME*.
2201 if (op != JSOP_NAME)
2202 return JS_TRUE;
2203 if (level >= UpvarCookie::UPVAR_LEVEL_LIMIT)
2204 return JS_TRUE;
2205 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2206 return JS_TRUE;
2208 if (FUN_FLAT_CLOSURE(cg->fun)) {
2209 op = JSOP_GETDSLOT;
2210 } else {
2212 * The function we're compiling may not be heavyweight, but if it
2213 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2214 * Parser::analyzeFunctions has arranged for this function's
2215 * enclosing functions to be heavyweight, so we can safely stick
2216 * with JSOP_NAME/JSOP_CALLNAME.
2218 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2219 return JS_TRUE;
2222 * Generator functions may be resumed from any call stack, which
2223 * defeats the display optimization to static link searching used
2224 * by JSOP_{GET,CALL}UPVAR.
2226 if (cg->skipSpansGenerator(skip))
2227 return JS_TRUE;
2229 op = JSOP_GETUPVAR;
2232 ale = cg->upvarList.lookup(atom);
2233 if (ale) {
2234 index = ALE_INDEX(ale);
2235 } else {
2236 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2237 return JS_FALSE;
2239 ale = cg->upvarList.add(cg->parser, atom);
2240 if (!ale)
2241 return JS_FALSE;
2242 index = ALE_INDEX(ale);
2243 JS_ASSERT(index == cg->upvarList.count - 1);
2245 UpvarCookie *vector = cg->upvarMap.vector;
2246 if (!vector) {
2247 uint32 length = cg->lexdeps.count;
2249 vector = (UpvarCookie *) js_calloc(length * sizeof *vector);
2250 if (!vector) {
2251 JS_ReportOutOfMemory(cx);
2252 return JS_FALSE;
2254 cg->upvarMap.vector = vector;
2255 cg->upvarMap.length = length;
2258 uintN slot = cookie.slot();
2259 if (slot != UpvarCookie::CALLEE_SLOT && dn_kind != JSDefinition::ARG) {
2260 JSTreeContext *tc = cg;
2261 do {
2262 tc = tc->parent;
2263 } while (tc->staticLevel != level);
2264 if (tc->inFunction())
2265 slot += tc->fun->nargs;
2268 vector[index].set(skip, slot);
2271 pn->pn_op = op;
2272 JS_ASSERT((index & JS_BITMASK(16)) == index);
2273 pn->pn_cookie.set(0, index);
2274 pn->pn_dflags |= PND_BOUND;
2275 return JS_TRUE;
2279 * We are compiling a function body and may be able to optimize name
2280 * to stack slot. Look for an argument or variable in the function and
2281 * rewrite pn_op and update pn accordingly.
2283 switch (dn_kind) {
2284 case JSDefinition::UNKNOWN:
2285 return JS_TRUE;
2287 case JSDefinition::LET:
2288 switch (op) {
2289 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2290 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2291 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2292 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2293 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2294 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2295 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2296 default: JS_NOT_REACHED("let");
2298 break;
2300 case JSDefinition::ARG:
2301 switch (op) {
2302 case JSOP_NAME: op = JSOP_GETARG; break;
2303 case JSOP_SETNAME: op = JSOP_SETARG; break;
2304 case JSOP_INCNAME: op = JSOP_INCARG; break;
2305 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2306 case JSOP_DECNAME: op = JSOP_DECARG; break;
2307 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2308 case JSOP_FORNAME: op = JSOP_FORARG; break;
2309 default: JS_NOT_REACHED("arg");
2311 JS_ASSERT(!pn->isConst());
2312 break;
2314 case JSDefinition::VAR:
2315 if (PN_OP(dn) == JSOP_CALLEE) {
2316 JS_ASSERT(op != JSOP_CALLEE);
2317 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2320 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2321 * cannot be sure cg->fun is not something of the form:
2323 * var ff = (function f(s) { eval(s); return f; });
2325 * where a caller invokes ff("var f = 42"). The result returned for
2326 * such an invocation must be 42, since the callee name is
2327 * lexically bound in an outer declarative environment from the
2328 * function's activation. See jsfun.cpp:call_resolve.
2330 JS_ASSERT(op != JSOP_DELNAME);
2331 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2332 op = JSOP_CALLEE;
2333 pn->pn_dflags |= PND_CONST;
2336 pn->pn_op = op;
2337 pn->pn_dflags |= PND_BOUND;
2338 return JS_TRUE;
2340 /* FALL THROUGH */
2342 default:
2343 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2344 dn_kind == JSDefinition::VAR ||
2345 dn_kind == JSDefinition::CONST);
2346 switch (op) {
2347 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2348 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2349 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2350 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2351 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2352 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2353 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2354 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2355 default: JS_NOT_REACHED("local");
2357 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2358 break;
2361 JS_ASSERT(op != PN_OP(pn));
2362 pn->pn_op = op;
2363 pn->pn_cookie.set(0, cookie.slot());
2364 pn->pn_dflags |= PND_BOUND;
2365 return JS_TRUE;
2369 * If pn contains a useful expression, return true with *answer set to true.
2370 * If pn contains a useless expression, return true with *answer set to false.
2371 * Return false on error.
2373 * The caller should initialize *answer to false and invoke this function on
2374 * an expression statement or similar subtree to decide whether the tree could
2375 * produce code that has any side effects. For an expression statement, we
2376 * define useless code as code with no side effects, because the main effect,
2377 * the value left on the stack after the code executes, will be discarded by a
2378 * pop bytecode.
2380 static JSBool
2381 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2382 JSBool *answer)
2384 JSBool ok;
2385 JSParseNode *pn2;
2387 ok = JS_TRUE;
2388 if (!pn || *answer)
2389 return ok;
2391 switch (pn->pn_arity) {
2392 case PN_FUNC:
2394 * A named function, contrary to ES3, is no longer useful, because we
2395 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2396 * Object instance and binding a readonly, permanent property in it
2397 * (the object and binding can be detected and hijacked or captured).
2398 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2400 *answer = JS_FALSE;
2401 break;
2403 case PN_LIST:
2404 if (pn->pn_op == JSOP_NOP ||
2405 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2406 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2408 * Non-operators along with ||, &&, ===, and !== never invoke
2409 * toString or valueOf.
2411 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2412 ok &= CheckSideEffects(cx, cg, pn2, answer);
2413 } else {
2415 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2416 * are presumed to be useful, because they may have side effects
2417 * even if their main effect (their return value) is discarded.
2419 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2420 * to avoid too much recursion. All such lists must be presumed
2421 * to be useful because each index operation could invoke a getter
2422 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2423 * does not apply here: arguments[i][j] might invoke a getter).
2425 * Likewise, array and object initialisers may call prototype
2426 * setters (the __defineSetter__ built-in, and writable __proto__
2427 * on Array.prototype create this hazard). Initialiser list nodes
2428 * have JSOP_NEWINIT in their pn_op.
2430 *answer = JS_TRUE;
2432 break;
2434 case PN_TERNARY:
2435 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2436 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2437 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2438 break;
2440 case PN_BINARY:
2441 if (pn->pn_type == TOK_ASSIGN) {
2443 * Assignment is presumed to be useful, even if the next operation
2444 * is another assignment overwriting this one's ostensible effect,
2445 * because the left operand may be a property with a setter that
2446 * has side effects.
2448 * The only exception is assignment of a useless value to a const
2449 * declared in the function currently being compiled.
2451 pn2 = pn->pn_left;
2452 if (pn2->pn_type != TOK_NAME) {
2453 *answer = JS_TRUE;
2454 } else {
2455 if (!BindNameToSlot(cx, cg, pn2))
2456 return JS_FALSE;
2457 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2458 return JS_FALSE;
2459 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2460 *answer = JS_TRUE;
2462 } else {
2463 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2464 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2466 * ||, &&, ===, and !== do not convert their operands via
2467 * toString or valueOf method calls.
2469 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2470 CheckSideEffects(cx, cg, pn->pn_right, answer);
2471 } else {
2473 * We can't easily prove that neither operand ever denotes an
2474 * object with a toString or valueOf method.
2476 *answer = JS_TRUE;
2479 break;
2481 case PN_UNARY:
2482 switch (pn->pn_type) {
2483 case TOK_DELETE:
2484 pn2 = pn->pn_kid;
2485 switch (pn2->pn_type) {
2486 case TOK_NAME:
2487 if (!BindNameToSlot(cx, cg, pn2))
2488 return JS_FALSE;
2489 if (pn2->isConst()) {
2490 *answer = JS_FALSE;
2491 break;
2493 /* FALL THROUGH */
2494 case TOK_DOT:
2495 #if JS_HAS_XML_SUPPORT
2496 case TOK_DBLDOT:
2497 #endif
2498 case TOK_LP:
2499 case TOK_LB:
2500 /* All these delete addressing modes have effects too. */
2501 *answer = JS_TRUE;
2502 break;
2503 default:
2504 ok = CheckSideEffects(cx, cg, pn2, answer);
2505 break;
2507 break;
2509 case TOK_UNARYOP:
2510 if (pn->pn_op == JSOP_NOT) {
2511 /* ! does not convert its operand via toString or valueOf. */
2512 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2513 break;
2515 /* FALL THROUGH */
2517 default:
2519 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2520 * have direct effects. Of the remaining unary-arity node types,
2521 * we can't easily prove that the operand never denotes an object
2522 * with a toString or valueOf method.
2524 *answer = JS_TRUE;
2525 break;
2527 break;
2529 case PN_NAME:
2531 * Take care to avoid trying to bind a label name (labels, both for
2532 * statements and property values in object initialisers, have pn_op
2533 * defaulted to JSOP_NOP).
2535 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2536 if (!BindNameToSlot(cx, cg, pn))
2537 return JS_FALSE;
2538 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2539 pn->pn_cookie.isFree()) {
2541 * Not an argument or local variable use, and not a use of a
2542 * unshadowed named function expression's given name, so this
2543 * expression could invoke a getter that has side effects.
2545 *answer = JS_TRUE;
2548 pn2 = pn->maybeExpr();
2549 if (pn->pn_type == TOK_DOT) {
2550 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2551 return JS_FALSE;
2552 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2553 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2555 * Any dotted property reference could call a getter, except
2556 * for arguments.length where arguments is unambiguous.
2558 *answer = JS_TRUE;
2561 ok = CheckSideEffects(cx, cg, pn2, answer);
2562 break;
2564 case PN_NAMESET:
2565 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2566 break;
2568 case PN_NULLARY:
2569 if (pn->pn_type == TOK_DEBUGGER)
2570 *answer = JS_TRUE;
2571 break;
2573 return ok;
2576 static JSBool
2577 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2578 JSBool callContext)
2580 JSOp op;
2582 if (!BindNameToSlot(cx, cg, pn))
2583 return JS_FALSE;
2584 op = PN_OP(pn);
2586 if (callContext) {
2587 switch (op) {
2588 case JSOP_NAME:
2589 op = JSOP_CALLNAME;
2590 break;
2591 case JSOP_GETGVAR:
2592 JS_ASSERT(!cg->funbox);
2593 op = JSOP_CALLGVAR;
2594 break;
2595 case JSOP_GETARG:
2596 op = JSOP_CALLARG;
2597 break;
2598 case JSOP_GETLOCAL:
2599 op = JSOP_CALLLOCAL;
2600 break;
2601 case JSOP_GETUPVAR:
2602 op = JSOP_CALLUPVAR;
2603 break;
2604 case JSOP_GETDSLOT:
2605 op = JSOP_CALLDSLOT;
2606 break;
2607 default:
2608 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2609 break;
2613 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2614 if (js_Emit1(cx, cg, op) < 0)
2615 return JS_FALSE;
2616 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2617 return JS_FALSE;
2618 } else {
2619 if (!pn->pn_cookie.isFree()) {
2620 EMIT_UINT16_IMM_OP(op, pn->pn_cookie.asInteger());
2621 } else {
2622 if (!EmitAtomOp(cx, pn, op, cg))
2623 return JS_FALSE;
2627 return JS_TRUE;
2630 #if JS_HAS_XML_SUPPORT
2631 static JSBool
2632 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2634 JSParseNode *pn2;
2635 uintN oldflags;
2637 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2638 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2639 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2641 pn2 = pn->pn_kid;
2642 oldflags = cg->flags;
2643 cg->flags &= ~TCF_IN_FOR_INIT;
2644 if (!js_EmitTree(cx, cg, pn2))
2645 return JS_FALSE;
2646 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2647 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2648 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2649 return JS_FALSE;
2652 return js_Emit1(cx, cg, op) >= 0;
2654 #endif
2656 static JSBool
2657 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2660 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2661 * interpreter and trace recorder, which skip dense array instances by
2662 * going up to Array.prototype before looking up the property name.
2664 JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
2665 if (!ale)
2666 return JS_FALSE;
2667 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2668 return JS_FALSE;
2669 if (js_Emit1(cx, cg, op) < 0)
2670 return JS_FALSE;
2671 return JS_TRUE;
2674 static JSBool
2675 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2676 JSBool callContext)
2678 JSParseNode *pn2, *pndot, *pnup, *pndown;
2679 ptrdiff_t top;
2681 JS_ASSERT(pn->pn_arity == PN_NAME);
2682 pn2 = pn->maybeExpr();
2684 /* Special case deoptimization for __proto__. */
2685 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2686 pn->pn_atom == cx->runtime->atomState.protoAtom) {
2687 if (pn2 && !js_EmitTree(cx, cg, pn2))
2688 return JS_FALSE;
2689 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2692 if (callContext) {
2693 JS_ASSERT(pn->pn_type == TOK_DOT);
2694 JS_ASSERT(op == JSOP_GETPROP);
2695 op = JSOP_CALLPROP;
2696 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2697 if (pn2->pn_op == JSOP_THIS) {
2698 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2699 /* Fast path for gets of |this.foo|. */
2700 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2702 } else if (pn2->pn_type == TOK_NAME) {
2704 * Try to optimize:
2705 * - arguments.length into JSOP_ARGCNT
2706 * - argname.prop into JSOP_GETARGPROP
2707 * - localname.prop into JSOP_GETLOCALPROP
2708 * but don't do this if the property is 'length' -- prefer to emit
2709 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2711 if (!BindNameToSlot(cx, cg, pn2))
2712 return JS_FALSE;
2713 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2714 if (pn2->pn_op == JSOP_ARGUMENTS)
2715 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2716 } else {
2717 switch (pn2->pn_op) {
2718 case JSOP_GETARG:
2719 op = JSOP_GETARGPROP;
2720 goto do_indexconst;
2721 case JSOP_GETLOCAL:
2722 op = JSOP_GETLOCALPROP;
2723 do_indexconst: {
2724 JSAtomListElement *ale;
2725 jsatomid atomIndex;
2727 ale = cg->atomList.add(cg->parser, pn->pn_atom);
2728 if (!ale)
2729 return JS_FALSE;
2730 atomIndex = ALE_INDEX(ale);
2731 return EmitSlotIndexOp(cx, op, pn2->pn_cookie.asInteger(), atomIndex, cg);
2734 default:;
2741 * If the object operand is also a dotted property reference, reverse the
2742 * list linked via pn_expr temporarily so we can iterate over it from the
2743 * bottom up (reversing again as we go), to avoid excessive recursion.
2745 if (pn2->pn_type == TOK_DOT) {
2746 pndot = pn2;
2747 pnup = NULL;
2748 top = CG_OFFSET(cg);
2749 for (;;) {
2750 /* Reverse pndot->pn_expr to point up, not down. */
2751 pndot->pn_offset = top;
2752 JS_ASSERT(!pndot->pn_used);
2753 pndown = pndot->pn_expr;
2754 pndot->pn_expr = pnup;
2755 if (pndown->pn_type != TOK_DOT)
2756 break;
2757 pnup = pndot;
2758 pndot = pndown;
2761 /* pndown is a primary expression, not a dotted property reference. */
2762 if (!js_EmitTree(cx, cg, pndown))
2763 return JS_FALSE;
2765 do {
2766 /* Walk back up the list, emitting annotated name ops. */
2767 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2768 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2769 return JS_FALSE;
2772 /* Special case deoptimization on __proto__, as above. */
2773 if (pndot->pn_arity == PN_NAME && pndot->pn_atom == cx->runtime->atomState.protoAtom) {
2774 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2775 return JS_FALSE;
2776 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2777 return JS_FALSE;
2780 /* Reverse the pn_expr link again. */
2781 pnup = pndot->pn_expr;
2782 pndot->pn_expr = pndown;
2783 pndown = pndot;
2784 } while ((pndot = pnup) != NULL);
2785 } else {
2786 if (!js_EmitTree(cx, cg, pn2))
2787 return JS_FALSE;
2790 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2791 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2792 return JS_FALSE;
2795 return EmitAtomOp(cx, pn, op, cg);
2798 static JSBool
2799 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2801 ptrdiff_t top;
2802 JSParseNode *left, *right, *next, ltmp, rtmp;
2803 int32_t slot;
2805 top = CG_OFFSET(cg);
2806 if (pn->pn_arity == PN_LIST) {
2807 /* Left-associative operator chain to avoid too much recursion. */
2808 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2809 JS_ASSERT(pn->pn_count >= 3);
2810 left = pn->pn_head;
2811 right = pn->last();
2812 next = left->pn_next;
2813 JS_ASSERT(next != right);
2816 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2817 * one or more index expression and JSOP_GETELEM op pairs.
2819 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2820 if (!BindNameToSlot(cx, cg, left))
2821 return JS_FALSE;
2822 if (left->pn_op == JSOP_ARGUMENTS &&
2823 JSDOUBLE_IS_INT32(next->pn_dval, &slot) &&
2824 (jsuint)slot < JS_BIT(16)) {
2826 * arguments[i]() requires arguments object as "this".
2827 * Check that we never generates list for that usage.
2829 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2830 left->pn_offset = next->pn_offset = top;
2831 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2832 left = next;
2833 next = left->pn_next;
2838 * Check whether we generated JSOP_ARGSUB, just above, and have only
2839 * one more index expression to emit. Given arguments[0][j], we must
2840 * skip the while loop altogether, falling through to emit code for j
2841 * (in the subtree referenced by right), followed by the annotated op,
2842 * at the bottom of this function.
2844 JS_ASSERT(next != right || pn->pn_count == 3);
2845 if (left == pn->pn_head) {
2846 if (!js_EmitTree(cx, cg, left))
2847 return JS_FALSE;
2849 while (next != right) {
2850 if (!js_EmitTree(cx, cg, next))
2851 return JS_FALSE;
2852 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2853 return JS_FALSE;
2854 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2855 return JS_FALSE;
2856 next = next->pn_next;
2858 } else {
2859 if (pn->pn_arity == PN_NAME) {
2861 * Set left and right so pn appears to be a TOK_LB node, instead
2862 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2863 * EmitDestructuringOps nearer below. In the destructuring case,
2864 * the base expression (pn_expr) of the name may be null, which
2865 * means we have to emit a JSOP_BINDNAME.
2867 left = pn->maybeExpr();
2868 if (!left) {
2869 left = &ltmp;
2870 left->pn_type = TOK_STRING;
2871 left->pn_op = JSOP_BINDNAME;
2872 left->pn_arity = PN_NULLARY;
2873 left->pn_pos = pn->pn_pos;
2874 left->pn_atom = pn->pn_atom;
2876 right = &rtmp;
2877 right->pn_type = TOK_STRING;
2878 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2879 ? JSOP_QNAMEPART
2880 : JSOP_STRING;
2881 right->pn_arity = PN_NULLARY;
2882 right->pn_pos = pn->pn_pos;
2883 right->pn_atom = pn->pn_atom;
2884 } else {
2885 JS_ASSERT(pn->pn_arity == PN_BINARY);
2886 left = pn->pn_left;
2887 right = pn->pn_right;
2890 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2891 if (op == JSOP_GETELEM &&
2892 left->pn_type == TOK_NAME &&
2893 right->pn_type == TOK_NUMBER) {
2894 if (!BindNameToSlot(cx, cg, left))
2895 return JS_FALSE;
2896 if (left->pn_op == JSOP_ARGUMENTS &&
2897 JSDOUBLE_IS_INT32(right->pn_dval, &slot) &&
2898 (jsuint)slot < JS_BIT(16)) {
2899 left->pn_offset = right->pn_offset = top;
2900 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2901 return JS_TRUE;
2905 if (!js_EmitTree(cx, cg, left))
2906 return JS_FALSE;
2909 /* The right side of the descendant operator is implicitly quoted. */
2910 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2911 right->pn_op == JSOP_QNAMEPART);
2912 if (!js_EmitTree(cx, cg, right))
2913 return JS_FALSE;
2914 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2915 return JS_FALSE;
2916 return js_Emit1(cx, cg, op) >= 0;
2919 static JSBool
2920 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2922 int32_t ival;
2923 uint32 u;
2924 ptrdiff_t off;
2925 jsbytecode *pc;
2927 if (JSDOUBLE_IS_INT32(dval, &ival)) {
2928 if (ival == 0)
2929 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2930 if (ival == 1)
2931 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2932 if ((jsint)(int8)ival == ival)
2933 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2935 u = (uint32)ival;
2936 if (u < JS_BIT(16)) {
2937 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2938 } else if (u < JS_BIT(24)) {
2939 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2940 if (off < 0)
2941 return JS_FALSE;
2942 pc = CG_CODE(cg, off);
2943 SET_UINT24(pc, u);
2944 } else {
2945 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2946 if (off < 0)
2947 return JS_FALSE;
2948 pc = CG_CODE(cg, off);
2949 SET_INT32(pc, ival);
2951 return JS_TRUE;
2954 if (!cg->constList.append(DoubleValue(dval)))
2955 return JS_FALSE;
2957 return EmitIndexOp(cx, JSOP_DOUBLE, cg->constList.length() - 1, cg);
2961 * To avoid bloating all parse nodes for the special case of switch, values are
2962 * allocated in the temp pool and pointed to by the parse node. These values
2963 * are not currently recycled (like parse nodes) and the temp pool is only
2964 * flushed at the end of compiling a script, so these values are technically
2965 * leaked. This would only be a problem for scripts containing a large number
2966 * of large switches, which seems unlikely.
2968 static Value *
2969 AllocateSwitchConstant(JSContext *cx)
2971 Value *pv;
2972 JS_ARENA_ALLOCATE_TYPE(pv, Value, &cx->tempPool);
2973 return pv;
2976 static JSBool
2977 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2978 JSStmtInfo *stmtInfo)
2980 JSOp switchOp;
2981 JSBool ok, hasDefault, constPropagated;
2982 ptrdiff_t top, off, defaultOffset;
2983 JSParseNode *pn2, *pn3, *pn4;
2984 uint32 caseCount, tableLength;
2985 JSParseNode **table;
2986 int32_t i, low, high;
2987 JSAtomListElement *ale;
2988 intN noteIndex;
2989 size_t switchSize, tableSize;
2990 jsbytecode *pc, *savepc;
2991 #if JS_HAS_BLOCK_SCOPE
2992 jsint count;
2993 #endif
2995 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2996 switchOp = JSOP_TABLESWITCH;
2997 ok = JS_TRUE;
2998 hasDefault = constPropagated = JS_FALSE;
2999 defaultOffset = -1;
3002 * If the switch contains let variables scoped by its body, model the
3003 * resulting block on the stack first, before emitting the discriminant's
3004 * bytecode (in case the discriminant contains a stack-model dependency
3005 * such as a let expression).
3007 pn2 = pn->pn_right;
3008 #if JS_HAS_BLOCK_SCOPE
3009 if (pn2->pn_type == TOK_LEXICALSCOPE) {
3011 * Push the body's block scope before discriminant code-gen for proper
3012 * static block scope linkage in case the discriminant contains a let
3013 * expression. The block's locals must lie under the discriminant on
3014 * the stack so that case-dispatch bytecodes can find the discriminant
3015 * on top of stack.
3017 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
3018 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
3019 stmtInfo->type = STMT_SWITCH;
3021 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3022 if (!EmitEnterBlock(cx, pn2, cg))
3023 return JS_FALSE;
3026 * Pop the switch's statement info around discriminant code-gen. Note
3027 * how this leaves cg->blockChain referencing the switch's
3028 * block scope object, which is necessary for correct block parenting
3029 * in the case where the discriminant contains a let expression.
3031 cg->topStmt = stmtInfo->down;
3032 cg->topScopeStmt = stmtInfo->downScope;
3034 #ifdef __GNUC__
3035 else {
3036 count = 0;
3038 #endif
3039 #endif
3042 * Emit code for the discriminant first (or nearly first, in the case of a
3043 * switch whose body is a block scope).
3045 if (!js_EmitTree(cx, cg, pn->pn_left))
3046 return JS_FALSE;
3048 /* Switch bytecodes run from here till end of final case. */
3049 top = CG_OFFSET(cg);
3050 #if !JS_HAS_BLOCK_SCOPE
3051 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3052 #else
3053 if (pn2->pn_type == TOK_LC) {
3054 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3055 } else {
3056 /* Re-push the switch's statement info record. */
3057 cg->topStmt = cg->topScopeStmt = stmtInfo;
3059 /* Set the statement info record's idea of top. */
3060 stmtInfo->update = top;
3062 /* Advance pn2 to refer to the switch case list. */
3063 pn2 = pn2->expr();
3065 #endif
3067 caseCount = pn2->pn_count;
3068 tableLength = 0;
3069 table = NULL;
3071 if (caseCount == 0 ||
3072 (caseCount == 1 &&
3073 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3074 caseCount = 0;
3075 low = 0;
3076 high = -1;
3077 } else {
3078 #define INTMAP_LENGTH 256
3079 jsbitmap intmap_space[INTMAP_LENGTH];
3080 jsbitmap *intmap = NULL;
3081 int32 intmap_bitlen = 0;
3083 low = JSVAL_INT_MAX;
3084 high = JSVAL_INT_MIN;
3086 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3087 if (pn3->pn_type == TOK_DEFAULT) {
3088 hasDefault = JS_TRUE;
3089 caseCount--; /* one of the "cases" was the default */
3090 continue;
3093 JS_ASSERT(pn3->pn_type == TOK_CASE);
3094 if (switchOp == JSOP_CONDSWITCH)
3095 continue;
3097 pn4 = pn3->pn_left;
3098 while (pn4->pn_type == TOK_RP)
3099 pn4 = pn4->pn_kid;
3101 Value constVal;
3102 switch (pn4->pn_type) {
3103 case TOK_NUMBER:
3104 constVal.setNumber(pn4->pn_dval);
3105 break;
3106 case TOK_STRING:
3107 constVal.setString(ATOM_TO_STRING(pn4->pn_atom));
3108 break;
3109 case TOK_NAME:
3110 if (!pn4->maybeExpr()) {
3111 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &constVal);
3112 if (!ok)
3113 goto release;
3114 if (!constVal.isMagic(JS_NO_CONSTANT)) {
3115 if (constVal.isObject()) {
3117 * XXX JSOP_LOOKUPSWITCH does not support const-
3118 * propagated object values, see bug 407186.
3120 switchOp = JSOP_CONDSWITCH;
3121 continue;
3123 constPropagated = JS_TRUE;
3124 break;
3127 /* FALL THROUGH */
3128 case TOK_PRIMARY:
3129 if (pn4->pn_op == JSOP_TRUE) {
3130 constVal.setBoolean(true);
3131 break;
3133 if (pn4->pn_op == JSOP_FALSE) {
3134 constVal.setBoolean(false);
3135 break;
3137 if (pn4->pn_op == JSOP_NULL) {
3138 constVal.setNull();
3139 break;
3141 /* FALL THROUGH */
3142 default:
3143 switchOp = JSOP_CONDSWITCH;
3144 continue;
3146 JS_ASSERT(constVal.isPrimitive());
3148 pn3->pn_pval = AllocateSwitchConstant(cx);
3149 if (!pn3->pn_pval) {
3150 ok = JS_FALSE;
3151 goto release;
3154 *pn3->pn_pval = constVal;
3156 if (switchOp != JSOP_TABLESWITCH)
3157 continue;
3158 if (!pn3->pn_pval->isInt32()) {
3159 switchOp = JSOP_LOOKUPSWITCH;
3160 continue;
3162 i = pn3->pn_pval->toInt32();
3163 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3164 switchOp = JSOP_LOOKUPSWITCH;
3165 continue;
3167 if (i < low)
3168 low = i;
3169 if (high < i)
3170 high = i;
3173 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3174 * We bias i by 65536 if it's negative, and hope that's a rare
3175 * case (because it requires a malloc'd bitmap).
3177 if (i < 0)
3178 i += JS_BIT(16);
3179 if (i >= intmap_bitlen) {
3180 if (!intmap &&
3181 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3182 intmap = intmap_space;
3183 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3184 } else {
3185 /* Just grab 8K for the worst-case bitmap. */
3186 intmap_bitlen = JS_BIT(16);
3187 intmap = (jsbitmap *)
3188 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3189 * sizeof(jsbitmap));
3190 if (!intmap) {
3191 JS_ReportOutOfMemory(cx);
3192 return JS_FALSE;
3195 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3197 if (JS_TEST_BIT(intmap, i)) {
3198 switchOp = JSOP_LOOKUPSWITCH;
3199 continue;
3201 JS_SET_BIT(intmap, i);
3204 release:
3205 if (intmap && intmap != intmap_space)
3206 cx->free(intmap);
3207 if (!ok)
3208 return JS_FALSE;
3211 * Compute table length and select lookup instead if overlarge or
3212 * more than half-sparse.
3214 if (switchOp == JSOP_TABLESWITCH) {
3215 tableLength = (uint32)(high - low + 1);
3216 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3217 switchOp = JSOP_LOOKUPSWITCH;
3218 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3220 * Lookup switch supports only atom indexes below 64K limit.
3221 * Conservatively estimate the maximum possible index during
3222 * switch generation and use conditional switch if it exceeds
3223 * the limit.
3225 if (caseCount + cg->constList.length() > JS_BIT(16))
3226 switchOp = JSOP_CONDSWITCH;
3231 * Emit a note with two offsets: first tells total switch code length,
3232 * second tells offset to first JSOP_CASE if condswitch.
3234 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3235 if (noteIndex < 0)
3236 return JS_FALSE;
3238 if (switchOp == JSOP_CONDSWITCH) {
3240 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3242 switchSize = 0;
3243 } else if (switchOp == JSOP_TABLESWITCH) {
3245 * 3 offsets (len, low, high) before the table, 1 per entry.
3247 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3248 } else {
3250 * JSOP_LOOKUPSWITCH:
3251 * 1 offset (len) and 1 atom index (npairs) before the table,
3252 * 1 atom index and 1 jump offset per entry.
3254 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3255 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3259 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3261 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3262 * to emit the immediate operand(s) by which bytecode readers such as
3263 * BuildSpanDepTable discover the length of the switch opcode *before*
3264 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3265 * also important to zero all unknown jump offset immediate operands,
3266 * so they can be converted to span dependencies with null targets to
3267 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3269 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3270 return JS_FALSE;
3272 off = -1;
3273 if (switchOp == JSOP_CONDSWITCH) {
3274 intN caseNoteIndex = -1;
3275 JSBool beforeCases = JS_TRUE;
3277 /* Emit code for evaluating cases and jumping to case statements. */
3278 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3279 pn4 = pn3->pn_left;
3280 if (pn4 && !js_EmitTree(cx, cg, pn4))
3281 return JS_FALSE;
3282 if (caseNoteIndex >= 0) {
3283 /* off is the previous JSOP_CASE's bytecode offset. */
3284 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3285 CG_OFFSET(cg) - off)) {
3286 return JS_FALSE;
3289 if (!pn4) {
3290 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3291 continue;
3293 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3294 if (caseNoteIndex < 0)
3295 return JS_FALSE;
3296 off = EmitJump(cx, cg, JSOP_CASE, 0);
3297 if (off < 0)
3298 return JS_FALSE;
3299 pn3->pn_offset = off;
3300 if (beforeCases) {
3301 uintN noteCount, noteCountDelta;
3303 /* Switch note's second offset is to first JSOP_CASE. */
3304 noteCount = CG_NOTE_COUNT(cg);
3305 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3306 off - top)) {
3307 return JS_FALSE;
3309 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3310 if (noteCountDelta != 0)
3311 caseNoteIndex += noteCountDelta;
3312 beforeCases = JS_FALSE;
3317 * If we didn't have an explicit default (which could fall in between
3318 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3319 * call in the loop above), link the last case to the implicit default
3320 * for the decompiler.
3322 if (!hasDefault &&
3323 caseNoteIndex >= 0 &&
3324 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3325 CG_OFFSET(cg) - off)) {
3326 return JS_FALSE;
3329 /* Emit default even if no explicit default statement. */
3330 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3331 if (defaultOffset < 0)
3332 return JS_FALSE;
3333 } else {
3334 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3336 if (switchOp == JSOP_TABLESWITCH) {
3337 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3338 SET_JUMP_OFFSET(pc, low);
3339 pc += JUMP_OFFSET_LEN;
3340 SET_JUMP_OFFSET(pc, high);
3341 pc += JUMP_OFFSET_LEN;
3344 * Use malloc to avoid arena bloat for programs with many switches.
3345 * We free table if non-null at label out, so all control flow must
3346 * exit this function through goto out or goto bad.
3348 if (tableLength != 0) {
3349 tableSize = (size_t)tableLength * sizeof *table;
3350 table = (JSParseNode **) cx->malloc(tableSize);
3351 if (!table)
3352 return JS_FALSE;
3353 memset(table, 0, tableSize);
3354 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3355 if (pn3->pn_type == TOK_DEFAULT)
3356 continue;
3357 i = pn3->pn_pval->toInt32();
3358 i -= low;
3359 JS_ASSERT((uint32)i < tableLength);
3360 table[i] = pn3;
3363 } else {
3364 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3366 /* Fill in the number of cases. */
3367 SET_INDEX(pc, caseCount);
3368 pc += INDEX_LEN;
3372 * After this point, all control flow involving JSOP_TABLESWITCH
3373 * must set ok and goto out to exit this function. To keep things
3374 * simple, all switchOp cases exit that way.
3376 MUST_FLOW_THROUGH("out");
3377 if (cg->spanDeps) {
3379 * We have already generated at least one big jump so we must
3380 * explicitly add span dependencies for the switch jumps. When
3381 * called below, js_SetJumpOffset can only do it when patching
3382 * the first big jump or when cg->spanDeps is null.
3384 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3385 goto bad;
3388 if (constPropagated) {
3390 * Skip switchOp, as we are not setting jump offsets in the two
3391 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3392 * unless there was an error.
3394 savepc = CG_NEXT(cg);
3395 CG_NEXT(cg) = pc + 1;
3396 if (switchOp == JSOP_TABLESWITCH) {
3397 for (i = 0; i < (jsint)tableLength; i++) {
3398 pn3 = table[i];
3399 if (pn3 &&
3400 (pn4 = pn3->pn_left) != NULL &&
3401 pn4->pn_type == TOK_NAME) {
3402 /* Note a propagated constant with the const's name. */
3403 JS_ASSERT(!pn4->maybeExpr());
3404 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3405 if (!ale)
3406 goto bad;
3407 CG_NEXT(cg) = pc;
3408 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3409 ALE_INDEX(ale)) < 0) {
3410 goto bad;
3413 pc += JUMP_OFFSET_LEN;
3415 } else {
3416 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3417 pn4 = pn3->pn_left;
3418 if (pn4 && pn4->pn_type == TOK_NAME) {
3419 /* Note a propagated constant with the const's name. */
3420 JS_ASSERT(!pn4->maybeExpr());
3421 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3422 if (!ale)
3423 goto bad;
3424 CG_NEXT(cg) = pc;
3425 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3426 ALE_INDEX(ale)) < 0) {
3427 goto bad;
3430 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3433 CG_NEXT(cg) = savepc;
3437 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3438 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3439 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3440 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3441 pn4 = pn3->pn_right;
3442 ok = js_EmitTree(cx, cg, pn4);
3443 if (!ok)
3444 goto out;
3445 pn3->pn_offset = pn4->pn_offset;
3446 if (pn3->pn_type == TOK_DEFAULT)
3447 off = pn3->pn_offset - top;
3450 if (!hasDefault) {
3451 /* If no default case, offset for default is to end of switch. */
3452 off = CG_OFFSET(cg) - top;
3455 /* We better have set "off" by now. */
3456 JS_ASSERT(off != -1);
3458 /* Set the default offset (to end of switch if no default). */
3459 if (switchOp == JSOP_CONDSWITCH) {
3460 pc = NULL;
3461 JS_ASSERT(defaultOffset != -1);
3462 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3463 off - (defaultOffset - top));
3464 if (!ok)
3465 goto out;
3466 } else {
3467 pc = CG_CODE(cg, top);
3468 ok = js_SetJumpOffset(cx, cg, pc, off);
3469 if (!ok)
3470 goto out;
3471 pc += JUMP_OFFSET_LEN;
3474 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3475 off = CG_OFFSET(cg) - top;
3476 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3477 if (!ok)
3478 goto out;
3480 if (switchOp == JSOP_TABLESWITCH) {
3481 /* Skip over the already-initialized switch bounds. */
3482 pc += 2 * JUMP_OFFSET_LEN;
3484 /* Fill in the jump table, if there is one. */
3485 for (i = 0; i < (jsint)tableLength; i++) {
3486 pn3 = table[i];
3487 off = pn3 ? pn3->pn_offset - top : 0;
3488 ok = js_SetJumpOffset(cx, cg, pc, off);
3489 if (!ok)
3490 goto out;
3491 pc += JUMP_OFFSET_LEN;
3493 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3494 /* Skip over the already-initialized number of cases. */
3495 pc += INDEX_LEN;
3497 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3498 if (pn3->pn_type == TOK_DEFAULT)
3499 continue;
3500 if (!cg->constList.append(*pn3->pn_pval))
3501 goto bad;
3502 SET_INDEX(pc, cg->constList.length() - 1);
3503 pc += INDEX_LEN;
3505 off = pn3->pn_offset - top;
3506 ok = js_SetJumpOffset(cx, cg, pc, off);
3507 if (!ok)
3508 goto out;
3509 pc += JUMP_OFFSET_LEN;
3513 out:
3514 if (table)
3515 cx->free(table);
3516 if (ok) {
3517 ok = js_PopStatementCG(cx, cg);
3519 #if JS_HAS_BLOCK_SCOPE
3520 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3521 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3522 #endif
3524 return ok;
3526 bad:
3527 ok = JS_FALSE;
3528 goto out;
3531 JSBool
3532 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3534 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3535 /* JSOP_GENERATOR must be the first instruction. */
3536 CG_SWITCH_TO_PROLOG(cg);
3537 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3538 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3539 return false;
3540 CG_SWITCH_TO_MAIN(cg);
3541 } else {
3543 * Emit a trace hint opcode only if not in a generator, since generators
3544 * are not yet traced and both want to be the first instruction.
3546 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
3547 return false;
3550 if (cg->flags & TCF_FUN_UNBRAND_THIS) {
3551 if (js_Emit1(cx, cg, JSOP_UNBRANDTHIS) < 0)
3552 return false;
3555 return js_EmitTree(cx, cg, body) &&
3556 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3557 js_NewScriptFromCG(cx, cg);
3560 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3561 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3562 JS_BEGIN_MACRO \
3563 uintN line_ = (line); \
3564 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3565 if (delta_ != 0) { \
3566 /* \
3567 * Encode any change in the current source line number by using \
3568 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3569 * whichever consumes less space. \
3571 * NB: We handle backward line number deltas (possible with for \
3572 * loops where the update part is emitted after the body, but its \
3573 * line number is <= any line number in the body) here by letting \
3574 * unsigned delta_ wrap to a very large number, which triggers a \
3575 * SRC_SETLINE. \
3576 */ \
3577 CG_CURRENT_LINE(cg) = line_; \
3578 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3579 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3580 return JS_FALSE; \
3581 } else { \
3582 do { \
3583 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3584 return JS_FALSE; \
3585 } while (--delta_ != 0); \
3588 JS_END_MACRO
3590 /* A function, so that we avoid macro-bloating all the other callsites. */
3591 static JSBool
3592 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3594 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3595 return JS_TRUE;
3598 static JSBool
3599 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3600 JSParseNode *pn, jsatomid *result)
3602 jsatomid atomIndex;
3603 JSAtomListElement *ale;
3605 if (!pn->pn_cookie.isFree()) {
3606 atomIndex = (jsatomid) pn->pn_cookie.slot();
3607 } else {
3608 ale = cg->atomList.add(cg->parser, pn->pn_atom);
3609 if (!ale)
3610 return JS_FALSE;
3611 atomIndex = ALE_INDEX(ale);
3614 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3615 (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
3616 CG_SWITCH_TO_PROLOG(cg);
3617 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3618 return JS_FALSE;
3619 EMIT_INDEX_OP(prologOp, atomIndex);
3620 CG_SWITCH_TO_MAIN(cg);
3623 if (result)
3624 *result = atomIndex;
3625 return JS_TRUE;
3628 #if JS_HAS_DESTRUCTURING
3630 typedef JSBool
3631 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3632 JSParseNode *pn);
3634 static JSBool
3635 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3636 JSParseNode *pn)
3638 JS_ASSERT(pn->pn_type == TOK_NAME);
3639 if (!BindNameToSlot(cx, cg, pn))
3640 return JS_FALSE;
3642 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3643 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3646 static JSBool
3647 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3648 JSParseNode *pn)
3650 JSParseNode *pn2, *pn3;
3651 DestructuringDeclEmitter emitter;
3653 if (pn->pn_type == TOK_RB) {
3654 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3655 if (pn2->pn_type == TOK_COMMA)
3656 continue;
3657 emitter = (pn2->pn_type == TOK_NAME)
3658 ? EmitDestructuringDecl
3659 : EmitDestructuringDecls;
3660 if (!emitter(cx, cg, prologOp, pn2))
3661 return JS_FALSE;
3663 } else {
3664 JS_ASSERT(pn->pn_type == TOK_RC);
3665 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3666 pn3 = pn2->pn_right;
3667 emitter = (pn3->pn_type == TOK_NAME)
3668 ? EmitDestructuringDecl
3669 : EmitDestructuringDecls;
3670 if (!emitter(cx, cg, prologOp, pn3))
3671 return JS_FALSE;
3674 return JS_TRUE;
3677 static JSBool
3678 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3680 static JSBool
3681 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3684 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3685 * destructuring initialiser-form, call ourselves to handle it, then
3686 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3687 * ending with a JSOP_ENUMELEM or equivalent op.
3689 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3690 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3691 return JS_FALSE;
3692 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3693 return JS_FALSE;
3694 } else {
3695 if (pn->pn_type == TOK_NAME) {
3696 if (!BindNameToSlot(cx, cg, pn))
3697 return JS_FALSE;
3698 if (pn->isConst() && !pn->isInitialized())
3699 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3702 switch (pn->pn_op) {
3703 case JSOP_SETNAME:
3705 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3706 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3707 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3709 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3710 return JS_FALSE;
3711 break;
3713 case JSOP_SETCONST:
3714 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3715 return JS_FALSE;
3716 break;
3718 case JSOP_SETLOCAL:
3720 jsuint slot = pn->pn_cookie.asInteger();
3721 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3722 break;
3725 case JSOP_SETARG:
3726 case JSOP_SETGVAR:
3728 jsuint slot = pn->pn_cookie.asInteger();
3729 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3730 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3731 return JS_FALSE;
3732 break;
3735 default:
3737 ptrdiff_t top;
3739 top = CG_OFFSET(cg);
3740 if (!js_EmitTree(cx, cg, pn))
3741 return JS_FALSE;
3742 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3743 return JS_FALSE;
3744 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3745 return JS_FALSE;
3746 break;
3749 case JSOP_ENUMELEM:
3750 JS_ASSERT(0);
3754 return JS_TRUE;
3758 * Recursive helper for EmitDestructuringOps.
3760 * Given a value to destructure on the stack, walk over an object or array
3761 * initialiser at pn, emitting bytecodes to match property values and store
3762 * them in the lvalues identified by the matched property names.
3764 static JSBool
3765 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3767 jsuint index;
3768 JSParseNode *pn2, *pn3;
3769 JSBool doElemOp;
3771 #ifdef DEBUG
3772 intN stackDepth = cg->stackDepth;
3773 JS_ASSERT(stackDepth != 0);
3774 JS_ASSERT(pn->pn_arity == PN_LIST);
3775 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3776 #endif
3778 if (pn->pn_count == 0) {
3779 /* Emit a DUP;POP sequence for the decompiler. */
3780 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3781 js_Emit1(cx, cg, JSOP_POP) >= 0;
3784 index = 0;
3785 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3787 * Duplicate the value being destructured to use as a reference base.
3788 * If dup is not the first one, annotate it for the decompiler.
3790 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3791 return JS_FALSE;
3792 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3793 return JS_FALSE;
3796 * Now push the property name currently being matched, which is either
3797 * the array initialiser's current index, or the current property name
3798 * "label" on the left of a colon in the object initialiser. Set pn3
3799 * to the lvalue node, which is in the value-initializing position.
3801 doElemOp = JS_TRUE;
3802 if (pn->pn_type == TOK_RB) {
3803 if (!EmitNumberOp(cx, index, cg))
3804 return JS_FALSE;
3805 pn3 = pn2;
3806 } else {
3807 JS_ASSERT(pn->pn_type == TOK_RC);
3808 JS_ASSERT(pn2->pn_type == TOK_COLON);
3809 pn3 = pn2->pn_left;
3810 if (pn3->pn_type == TOK_NUMBER) {
3812 * If we are emitting an object destructuring initialiser,
3813 * annotate the index op with SRC_INITPROP so we know we are
3814 * not decompiling an array initialiser.
3816 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3817 return JS_FALSE;
3818 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3819 return JS_FALSE;
3820 } else {
3821 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3822 pn3->pn_type == TOK_NAME);
3823 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3824 return JS_FALSE;
3825 doElemOp = JS_FALSE;
3827 pn3 = pn2->pn_right;
3830 if (doElemOp) {
3832 * Ok, get the value of the matching property name. This leaves
3833 * that value on top of the value being destructured, so the stack
3834 * is one deeper than when we started.
3836 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3837 return JS_FALSE;
3838 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3841 /* Nullary comma node makes a hole in the array destructurer. */
3842 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3843 JS_ASSERT(pn->pn_type == TOK_RB);
3844 JS_ASSERT(pn2 == pn3);
3845 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3846 return JS_FALSE;
3847 } else {
3848 if (!EmitDestructuringLHS(cx, cg, pn3))
3849 return JS_FALSE;
3852 JS_ASSERT(cg->stackDepth == stackDepth);
3853 ++index;
3856 return JS_TRUE;
3859 static ptrdiff_t
3860 OpToDeclType(JSOp op)
3862 switch (op) {
3863 case JSOP_NOP:
3864 return SRC_DECL_LET;
3865 case JSOP_DEFCONST:
3866 return SRC_DECL_CONST;
3867 case JSOP_DEFVAR:
3868 return SRC_DECL_VAR;
3869 default:
3870 return SRC_DECL_NONE;
3874 static JSBool
3875 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3876 JSParseNode *pn)
3879 * If we're called from a variable declaration, help the decompiler by
3880 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3881 * If the destructuring initialiser is empty, our helper will emit a
3882 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3884 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
3885 return JS_FALSE;
3888 * Call our recursive helper to emit the destructuring assignments and
3889 * related stack manipulations.
3891 return EmitDestructuringOpsHelper(cx, cg, pn);
3894 static JSBool
3895 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3896 JSParseNode *lhs, JSParseNode *rhs)
3898 jsuint depth, limit, i, nslots;
3899 JSParseNode *pn;
3901 depth = limit = (uintN) cg->stackDepth;
3902 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3903 if (limit == JS_BIT(16)) {
3904 ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
3905 return JS_FALSE;
3908 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3909 JS_ASSERT(!(pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY));
3910 if (!js_EmitTree(cx, cg, pn))
3911 return JS_FALSE;
3912 ++limit;
3915 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
3916 return JS_FALSE;
3918 i = depth;
3919 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
3920 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3921 JS_ASSERT(i < limit);
3922 jsint slot = AdjustBlockSlot(cx, cg, i);
3923 if (slot < 0)
3924 return JS_FALSE;
3925 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3927 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3928 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3929 return JS_FALSE;
3930 } else {
3931 if (!EmitDestructuringLHS(cx, cg, pn))
3932 return JS_FALSE;
3936 nslots = limit - depth;
3937 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
3938 cg->stackDepth = (uintN) depth;
3939 return JS_TRUE;
3943 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3944 * can emit a group assignment sequence, which results in 0 stack depth delta,
3945 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3947 static JSBool
3948 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3949 JSParseNode *pn, JSOp *pop)
3951 JSParseNode *lhs, *rhs;
3953 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3954 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3955 lhs = pn->pn_left;
3956 rhs = pn->pn_right;
3957 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3958 !(rhs->pn_xflags & PNX_HOLEY) &&
3959 lhs->pn_count <= rhs->pn_count) {
3960 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
3961 return JS_FALSE;
3962 *pop = JSOP_NOP;
3964 return JS_TRUE;
3967 #endif /* JS_HAS_DESTRUCTURING */
3969 static JSBool
3970 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3971 JSBool inLetHead, ptrdiff_t *headNoteIndex)
3973 bool let, forInVar, first;
3974 #if JS_HAS_BLOCK_SCOPE
3975 bool forInLet, popScope;
3976 JSStmtInfo *stmt, *scopeStmt;
3977 #endif
3978 ptrdiff_t off, noteIndex, tmp;
3979 JSParseNode *pn2, *pn3, *next;
3980 JSOp op;
3981 jsatomid atomIndex;
3982 uintN oldflags;
3984 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3985 *headNoteIndex = -1;
3988 * Let blocks and expressions have a parenthesized head in which the new
3989 * scope is not yet open. Initializer evaluation uses the parent node's
3990 * lexical scope. If popScope is true below, then we hide the top lexical
3991 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3992 * it won't find any names in the new let block.
3994 * The same goes for let declarations in the head of any kind of for loop.
3995 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3996 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3997 * in the containing scope, and puts x in the loop body's scope.
3999 let = (pn->pn_op == JSOP_NOP);
4000 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
4001 #if JS_HAS_BLOCK_SCOPE
4002 forInLet = let && forInVar;
4003 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
4004 if (popScope) {
4005 stmt = cg->topStmt;
4006 scopeStmt = cg->topScopeStmt;
4008 # ifdef __GNUC__
4009 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
4010 # endif
4011 JS_ASSERT(!popScope || let);
4012 #endif
4014 off = noteIndex = -1;
4015 for (pn2 = pn->pn_head; ; pn2 = next) {
4016 first = pn2 == pn->pn_head;
4017 next = pn2->pn_next;
4019 if (pn2->pn_type != TOK_NAME) {
4020 #if JS_HAS_DESTRUCTURING
4021 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
4023 * Emit variable binding ops, but not destructuring ops.
4024 * The parser (see Variables, jsparse.c) has ensured that
4025 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4026 * and that case will emit the destructuring code only after
4027 * emitting an enumerating opcode and a branch that tests
4028 * whether the enumeration ended.
4030 JS_ASSERT(forInVar);
4031 JS_ASSERT(pn->pn_count == 1);
4032 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
4033 return JS_FALSE;
4034 break;
4036 #endif
4039 * A destructuring initialiser assignment preceded by var will
4040 * never occur to the left of 'in' in a for-in loop. As with 'for
4041 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4042 * i' to be hoisted out of the loop.
4044 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
4045 JS_ASSERT(!forInVar);
4048 * To allow the front end to rewrite var f = x; as f = x; when a
4049 * function f(){} precedes the var, detect simple name assignment
4050 * here and initialize the name.
4052 #if !JS_HAS_DESTRUCTURING
4053 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4054 #else
4055 if (pn2->pn_left->pn_type == TOK_NAME)
4056 #endif
4058 pn3 = pn2->pn_right;
4059 pn2 = pn2->pn_left;
4060 goto do_name;
4063 #if JS_HAS_DESTRUCTURING
4064 if (pn->pn_count == 1) {
4066 * If this is the only destructuring assignment in the list,
4067 * try to optimize to a group assignment. If we're in a let
4068 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4069 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4071 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4072 op = JSOP_POP;
4073 if (!MaybeEmitGroupAssignment(cx, cg,
4074 inLetHead ? JSOP_POP : PN_OP(pn),
4075 pn2, &op)) {
4076 return JS_FALSE;
4078 if (op == JSOP_NOP) {
4079 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4080 break;
4084 pn3 = pn2->pn_left;
4085 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4086 return JS_FALSE;
4088 if (!js_EmitTree(cx, cg, pn2->pn_right))
4089 return JS_FALSE;
4092 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4093 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4094 * we will emit at the bottom of this function.
4096 if (!EmitDestructuringOps(cx, cg,
4097 inLetHead ? JSOP_POP : PN_OP(pn),
4098 pn3)) {
4099 return JS_FALSE;
4101 goto emit_note_pop;
4102 #endif
4106 * Load initializer early to share code above that jumps to do_name.
4107 * NB: if this var redeclares an existing binding, then pn2 is linked
4108 * on its definition's use-chain and pn_expr has been overlayed with
4109 * pn_lexdef.
4111 pn3 = pn2->maybeExpr();
4113 do_name:
4114 if (!BindNameToSlot(cx, cg, pn2))
4115 return JS_FALSE;
4117 op = PN_OP(pn2);
4118 if (op == JSOP_ARGUMENTS) {
4119 /* JSOP_ARGUMENTS => no initializer */
4120 JS_ASSERT(!pn3 && !let);
4121 pn3 = NULL;
4122 #ifdef __GNUC__
4123 atomIndex = 0; /* quell GCC overwarning */
4124 #endif
4125 } else {
4126 JS_ASSERT(op != JSOP_CALLEE);
4127 JS_ASSERT(!pn2->pn_cookie.isFree() || !let);
4128 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4129 return JS_FALSE;
4131 if (pn3) {
4132 JS_ASSERT(!forInVar);
4133 if (op == JSOP_SETNAME) {
4134 JS_ASSERT(!let);
4135 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4137 if (pn->pn_op == JSOP_DEFCONST &&
4138 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4139 return JS_FALSE;
4142 #if JS_HAS_BLOCK_SCOPE
4143 /* Evaluate expr in the outer lexical scope if requested. */
4144 if (popScope) {
4145 cg->topStmt = stmt->down;
4146 cg->topScopeStmt = scopeStmt->downScope;
4148 #endif
4150 oldflags = cg->flags;
4151 cg->flags &= ~TCF_IN_FOR_INIT;
4152 if (!js_EmitTree(cx, cg, pn3))
4153 return JS_FALSE;
4154 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4156 #if JS_HAS_BLOCK_SCOPE
4157 if (popScope) {
4158 cg->topStmt = stmt;
4159 cg->topScopeStmt = scopeStmt;
4161 #endif
4166 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4167 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4168 * a TOK_SEQ node to make the two statements appear as one. Therefore
4169 * if this declaration is part of a for-in loop head, we do not need to
4170 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4171 * js_EmitTree, will annotate appropriately.
4173 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4174 if (forInVar) {
4175 JS_ASSERT(pn->pn_count == 1);
4176 JS_ASSERT(!pn3);
4177 break;
4180 if (first &&
4181 !inLetHead &&
4182 js_NewSrcNote2(cx, cg, SRC_DECL,
4183 (pn->pn_op == JSOP_DEFCONST)
4184 ? SRC_DECL_CONST
4185 : (pn->pn_op == JSOP_DEFVAR)
4186 ? SRC_DECL_VAR
4187 : SRC_DECL_LET) < 0) {
4188 return JS_FALSE;
4190 if (op == JSOP_ARGUMENTS) {
4191 if (js_Emit1(cx, cg, op) < 0)
4192 return JS_FALSE;
4193 } else if (!pn2->pn_cookie.isFree()) {
4194 EMIT_UINT16_IMM_OP(op, atomIndex);
4195 } else {
4196 EMIT_INDEX_OP(op, atomIndex);
4199 #if JS_HAS_DESTRUCTURING
4200 emit_note_pop:
4201 #endif
4202 tmp = CG_OFFSET(cg);
4203 if (noteIndex >= 0) {
4204 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4205 return JS_FALSE;
4207 if (!next)
4208 break;
4209 off = tmp;
4210 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4211 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4212 return JS_FALSE;
4215 /* If this is a let head, emit and return a srcnote on the pop. */
4216 if (inLetHead) {
4217 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4218 if (*headNoteIndex < 0)
4219 return JS_FALSE;
4220 if (!(pn->pn_xflags & PNX_POPVAR))
4221 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4224 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4227 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4228 static JSBool
4229 GettableNoteForNextOp(JSCodeGenerator *cg)
4231 ptrdiff_t offset, target;
4232 jssrcnote *sn, *end;
4234 offset = 0;
4235 target = CG_OFFSET(cg);
4236 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4237 sn = SN_NEXT(sn)) {
4238 if (offset == target && SN_IS_GETTABLE(sn))
4239 return JS_TRUE;
4240 offset += SN_DELTA(sn);
4242 return JS_FALSE;
4244 #endif
4246 /* Top-level named functions need a nop for decompilation. */
4247 static JSBool
4248 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4250 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4251 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4254 static bool
4255 EmitNewInit(JSContext *cx, JSCodeGenerator *cg, JSProtoKey key, JSParseNode *pn, int sharpnum)
4257 if (js_Emit2(cx, cg, JSOP_NEWINIT, (jsbytecode) key) < 0)
4258 return false;
4259 #if JS_HAS_SHARP_VARS
4260 if (cg->hasSharps()) {
4261 if (pn->pn_count != 0)
4262 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4263 if (sharpnum >= 0)
4264 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
4265 } else {
4266 JS_ASSERT(sharpnum < 0);
4268 #endif
4269 return true;
4272 static bool
4273 EmitEndInit(JSContext *cx, JSCodeGenerator *cg, uint32 count)
4275 #if JS_HAS_SHARP_VARS
4276 /* Emit an op for sharp array cleanup and decompilation. */
4277 if (cg->hasSharps() && count != 0)
4278 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4279 #endif
4280 return js_Emit1(cx, cg, JSOP_ENDINIT) >= 0;
4283 /* See the SRC_FOR source note offsetBias comments later in this file. */
4284 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4285 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4287 JSBool
4288 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4290 JSBool ok, useful, wantval;
4291 JSStmtInfo *stmt, stmtInfo;
4292 ptrdiff_t top, off, tmp, beq, jmp;
4293 JSParseNode *pn2, *pn3;
4294 JSAtom *atom;
4295 JSAtomListElement *ale;
4296 jsatomid atomIndex;
4297 uintN index;
4298 ptrdiff_t noteIndex;
4299 JSSrcNoteType noteType;
4300 jsbytecode *pc;
4301 JSOp op;
4302 TokenKind type;
4303 uint32 argc;
4304 #if JS_HAS_SHARP_VARS
4305 jsint sharpnum;
4306 #endif
4308 JS_CHECK_RECURSION(cx, return JS_FALSE);
4310 ok = JS_TRUE;
4311 cg->emitLevel++;
4312 pn->pn_offset = top = CG_OFFSET(cg);
4314 /* Emit notes to tell the current bytecode's source line number. */
4315 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4317 switch (pn->pn_type) {
4318 case TOK_FUNCTION:
4320 JSFunction *fun;
4321 uintN slot;
4323 #if JS_HAS_XML_SUPPORT
4324 if (pn->pn_arity == PN_NULLARY) {
4325 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4326 return JS_FALSE;
4327 break;
4329 #endif
4331 fun = (JSFunction *) pn->pn_funbox->object;
4332 JS_ASSERT(FUN_INTERPRETED(fun));
4333 if (fun->u.i.script) {
4335 * This second pass is needed to emit JSOP_NOP with a source note
4336 * for the already-emitted function definition prolog opcode. See
4337 * comments in the TOK_LC case.
4339 JS_ASSERT(pn->pn_op == JSOP_NOP);
4340 JS_ASSERT(cg->inFunction());
4341 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4342 return JS_FALSE;
4343 break;
4346 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
4347 pn->pn_defn ||
4348 (!pn->pn_used && !pn->isTopLevel()) ||
4349 (fun->flags & JSFUN_LAMBDA));
4351 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4352 FUN_KIND(fun) == JSFUN_INTERPRETED);
4354 /* Generate code for the function's body. */
4355 void *cg2mark = JS_ARENA_MARK(cg->codePool);
4356 void *cg2space;
4357 JS_ARENA_ALLOCATE_TYPE(cg2space, JSCodeGenerator, cg->codePool);
4358 if (!cg2space) {
4359 js_ReportOutOfScriptQuota(cx);
4360 return JS_FALSE;
4362 JSCodeGenerator *cg2 =
4363 new (cg2space) JSCodeGenerator(cg->parser,
4364 cg->codePool, cg->notePool,
4365 pn->pn_pos.begin.lineno);
4367 if (!cg2->init())
4368 return JS_FALSE;
4370 cg2->flags = pn->pn_funbox->tcflags | TCF_IN_FUNCTION;
4371 #if JS_HAS_SHARP_VARS
4372 if (cg2->flags & TCF_HAS_SHARPS) {
4373 cg2->sharpSlotBase = fun->sharpSlotBase(cx);
4374 if (cg2->sharpSlotBase < 0)
4375 return JS_FALSE;
4377 #endif
4378 cg2->fun = fun;
4379 cg2->funbox = pn->pn_funbox;
4380 cg2->parent = cg;
4383 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4384 * bits and to reserve the all-ones value, thereby reserving the magic
4385 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4387 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4388 cg2->staticLevel = cg->staticLevel + 1;
4390 /* We measured the max scope depth when we parsed the function. */
4391 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = uint16(-1));
4392 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4393 pn = NULL;
4395 cg2->~JSCodeGenerator();
4396 JS_ARENA_RELEASE(cg->codePool, cg2mark);
4397 cg2 = NULL;
4398 if (!pn)
4399 return JS_FALSE;
4401 /* Make the function object a literal in the outer script's pool. */
4402 index = cg->objectList.index(pn->pn_funbox);
4404 /* Emit a bytecode pointing to the closure object in its immediate. */
4405 op = PN_OP(pn);
4406 if (op != JSOP_NOP) {
4407 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4408 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4409 return JS_FALSE;
4411 EMIT_INDEX_OP(op, index);
4412 break;
4416 * For a script we emit the code as we parse. Thus the bytecode for
4417 * top-level functions should go in the prolog to predefine their
4418 * names in the variable object before the already-generated main code
4419 * is executed. This extra work for top-level scripts is not necessary
4420 * when we emit the code for a function. It is fully parsed prior to
4421 * invocation of the emitter and calls to js_EmitTree for function
4422 * definitions can be scheduled before generating the rest of code.
4424 if (!cg->inFunction()) {
4425 JS_ASSERT(!cg->topStmt);
4426 CG_SWITCH_TO_PROLOG(cg);
4427 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4428 EMIT_INDEX_OP(op, index);
4429 CG_SWITCH_TO_MAIN(cg);
4431 /* Emit NOP for the decompiler. */
4432 if (!EmitFunctionDefNop(cx, cg, index))
4433 return JS_FALSE;
4434 } else {
4435 #ifdef DEBUG
4436 JSLocalKind localKind =
4437 #endif
4438 js_LookupLocal(cx, cg->fun, fun->atom, &slot);
4439 JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
4440 JS_ASSERT(index < JS_BIT(20));
4441 pn->pn_index = index;
4442 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4443 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4444 return JS_FALSE;
4446 break;
4449 case TOK_ARGSBODY:
4450 ok = js_EmitTree(cx, cg, pn->last());
4451 break;
4453 case TOK_UPVARS:
4454 JS_ASSERT(cg->lexdeps.count == 0);
4455 JS_ASSERT(pn->pn_names.count != 0);
4456 cg->lexdeps = pn->pn_names;
4457 ok = js_EmitTree(cx, cg, pn->pn_tree);
4458 break;
4460 case TOK_IF:
4461 /* Initialize so we can detect else-if chains and avoid recursion. */
4462 stmtInfo.type = STMT_IF;
4463 beq = jmp = -1;
4464 noteIndex = -1;
4466 if_again:
4467 /* Emit code for the condition before pushing stmtInfo. */
4468 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4469 return JS_FALSE;
4470 top = CG_OFFSET(cg);
4471 if (stmtInfo.type == STMT_IF) {
4472 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4473 } else {
4475 * We came here from the goto further below that detects else-if
4476 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4477 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4478 * to help the decompiler. Actually, we need two offsets, one for
4479 * decompiling any else clause and the second for decompiling an
4480 * else-if chain without bracing, overindenting, or incorrectly
4481 * scoping let declarations.
4483 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4484 stmtInfo.type = STMT_IF;
4485 stmtInfo.update = top;
4486 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4487 return JS_FALSE;
4488 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - beq))
4489 return JS_FALSE;
4492 /* Emit an annotated branch-if-false around the then part. */
4493 pn3 = pn->pn_kid3;
4494 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4495 if (noteIndex < 0)
4496 return JS_FALSE;
4497 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4498 if (beq < 0)
4499 return JS_FALSE;
4501 /* Emit code for the then and optional else parts. */
4502 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4503 return JS_FALSE;
4504 if (pn3) {
4505 /* Modify stmtInfo so we know we're in the else part. */
4506 stmtInfo.type = STMT_ELSE;
4509 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4510 * around the else part. The js_PopStatementCG call at the bottom
4511 * of this switch case will fix up the backpatch chain linked from
4512 * stmtInfo.breaks.
4514 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4515 if (jmp < 0)
4516 return JS_FALSE;
4518 /* Ensure the branch-if-false comes here, then emit the else. */
4519 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4520 if (pn3->pn_type == TOK_IF) {
4521 pn = pn3;
4522 goto if_again;
4525 if (!js_EmitTree(cx, cg, pn3))
4526 return JS_FALSE;
4529 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4530 * the decompiler's benefit. We can't just "back up" from the pc
4531 * of the else clause, because we don't know whether an extended
4532 * jump was required to leap from the end of the then clause over
4533 * the else clause.
4535 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4536 return JS_FALSE;
4537 } else {
4538 /* No else part, fixup the branch-if-false to come here. */
4539 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4541 ok = js_PopStatementCG(cx, cg);
4542 break;
4544 case TOK_SWITCH:
4545 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4546 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4547 break;
4549 case TOK_WHILE:
4551 * Minimize bytecodes issued for one or more iterations by jumping to
4552 * the condition below the body and closing the loop if the condition
4553 * is true with a backward branch. For iteration count i:
4555 * i test at the top test at the bottom
4556 * = =============== ==================
4557 * 0 ifeq-pass goto; ifne-fail
4558 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4559 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4560 * . . .
4561 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4563 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4564 * test at the top. When JSParseNode trees were added during the ES3
4565 * work (1998-9), the code generation scheme was not optimized, and
4566 * the decompiler continued to take advantage of the branch and jump
4567 * that bracketed the body. But given the SRC_WHILE note, it is easy
4568 * to support the more efficient scheme.
4570 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4571 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4572 if (noteIndex < 0)
4573 return JS_FALSE;
4574 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4575 if (jmp < 0)
4576 return JS_FALSE;
4577 top = js_Emit1(cx, cg, JSOP_TRACE);
4578 if (top < 0)
4579 return JS_FALSE;
4580 if (!js_EmitTree(cx, cg, pn->pn_right))
4581 return JS_FALSE;
4582 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4583 if (!js_EmitTree(cx, cg, pn->pn_left))
4584 return JS_FALSE;
4585 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4586 if (beq < 0)
4587 return JS_FALSE;
4588 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4589 return JS_FALSE;
4590 ok = js_PopStatementCG(cx, cg);
4591 break;
4593 case TOK_DO:
4594 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4595 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4596 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4597 return JS_FALSE;
4599 /* Compile the loop body. */
4600 top = js_Emit1(cx, cg, JSOP_TRACE);
4601 if (top < 0)
4602 return JS_FALSE;
4603 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4604 if (!js_EmitTree(cx, cg, pn->pn_left))
4605 return JS_FALSE;
4607 /* Set loop and enclosing label update offsets, for continue. */
4608 stmt = &stmtInfo;
4609 do {
4610 stmt->update = CG_OFFSET(cg);
4611 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4613 /* Compile the loop condition, now that continues know where to go. */
4614 if (!js_EmitTree(cx, cg, pn->pn_right))
4615 return JS_FALSE;
4618 * Since we use JSOP_IFNE for other purposes as well as for do-while
4619 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4620 * and the decompiler must get that delta and decompile recursively.
4622 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4623 if (beq < 0)
4624 return JS_FALSE;
4625 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4626 return JS_FALSE;
4627 ok = js_PopStatementCG(cx, cg);
4628 break;
4630 case TOK_FOR:
4631 beq = 0; /* suppress gcc warnings */
4632 jmp = -1;
4633 pn2 = pn->pn_left;
4634 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4636 if (pn2->pn_type == TOK_IN) {
4637 /* Set stmtInfo type for later testing. */
4638 stmtInfo.type = STMT_FOR_IN_LOOP;
4641 * If the left part is 'var x', emit code to define x if necessary
4642 * using a prolog opcode, but do not emit a pop. If the left part
4643 * is 'var x = i', emit prolog code to define x if necessary; then
4644 * emit code to evaluate i, assign the result to x, and pop the
4645 * result off the stack.
4647 * All the logic to do this is implemented in the outer switch's
4648 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4650 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4651 * called here will generate the proper note for the assignment
4652 * op that sets x = i, hoisting the initialized var declaration
4653 * out of the loop: 'var x = i; for (x in o) ...'.
4655 * In the 'for (var x in o) ...' case, nothing but the prolog op
4656 * (if needed) should be generated here, we must emit the note
4657 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4658 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4660 * A 'for (let x = i in o)' loop must not be hoisted, since in
4661 * this form the let variable is scoped by the loop body (but not
4662 * the head). The initializer expression i must be evaluated for
4663 * any side effects. So we hoist only i in the let case.
4665 pn3 = pn2->pn_left;
4666 type = PN_TYPE(pn3);
4667 cg->flags |= TCF_IN_FOR_INIT;
4668 if (TokenKindIsDecl(type) && !js_EmitTree(cx, cg, pn3))
4669 return JS_FALSE;
4670 cg->flags &= ~TCF_IN_FOR_INIT;
4672 /* Compile the object expression to the right of 'in'. */
4673 if (!js_EmitTree(cx, cg, pn2->pn_right))
4674 return JS_FALSE;
4677 * Emit a bytecode to convert top of stack value to the iterator
4678 * object depending on the loop variant (for-in, for-each-in, or
4679 * destructuring for-in).
4681 JS_ASSERT(pn->pn_op == JSOP_ITER);
4682 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4683 return JS_FALSE;
4685 /* Annotate so the decompiler can find the loop-closing jump. */
4686 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4687 if (noteIndex < 0)
4688 return JS_FALSE;
4691 * Jump down to the loop condition to minimize overhead assuming at
4692 * least one iteration, as the other loop forms do.
4694 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4695 if (jmp < 0)
4696 return JS_FALSE;
4698 top = CG_OFFSET(cg);
4699 SET_STATEMENT_TOP(&stmtInfo, top);
4700 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4701 return JS_FALSE;
4703 #ifdef DEBUG
4704 intN loopDepth = cg->stackDepth;
4705 #endif
4708 * Compile a JSOP_FOR* bytecode based on the left hand side.
4710 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4711 * or similar, to signify assignment, rather than declaration, to
4712 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4713 * parameter and emits the appropriate source note, defaulting to
4714 * assignment, so JSOP_SETNAME is not critical here; many similar
4715 * ops could be used -- just not JSOP_NOP (which means 'let').
4717 op = JSOP_SETNAME;
4718 switch (type) {
4719 #if JS_HAS_BLOCK_SCOPE
4720 case TOK_LET:
4721 #endif
4722 case TOK_VAR:
4723 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4724 pn3 = pn3->pn_head;
4725 #if JS_HAS_DESTRUCTURING
4726 if (pn3->pn_type == TOK_ASSIGN) {
4727 pn3 = pn3->pn_left;
4728 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4730 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4731 op = PN_OP(pn2->pn_left);
4732 goto destructuring_for;
4734 #else
4735 JS_ASSERT(pn3->pn_type == TOK_NAME);
4736 #endif
4737 /* FALL THROUGH */
4739 case TOK_NAME:
4741 * Always annotate JSOP_FORLOCAL if given input of the form
4742 * 'for (let x in * o)' -- the decompiler must not hoist the
4743 * 'let x' out of the loop head, or x will be bound in the
4744 * wrong scope. Likewise, but in this case only for the sake
4745 * of higher decompilation fidelity only, do not hoist 'var x'
4746 * when given 'for (var x in o)'.
4748 if ((
4749 #if JS_HAS_BLOCK_SCOPE
4750 type == TOK_LET ||
4751 #endif
4752 (type == TOK_VAR && !pn3->maybeExpr())) &&
4753 js_NewSrcNote2(cx, cg, SRC_DECL,
4754 (type == TOK_VAR)
4755 ? SRC_DECL_VAR
4756 : SRC_DECL_LET) < 0) {
4757 return JS_FALSE;
4759 if (!pn3->pn_cookie.isFree()) {
4760 op = PN_OP(pn3);
4761 switch (op) {
4762 case JSOP_GETARG: /* FALL THROUGH */
4763 case JSOP_SETARG: op = JSOP_FORARG; break;
4764 case JSOP_GETGVAR: /* FALL THROUGH */
4765 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4766 case JSOP_GETLOCAL: /* FALL THROUGH */
4767 case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4768 default: JS_ASSERT(0);
4770 } else {
4771 pn3->pn_op = JSOP_FORNAME;
4772 if (!BindNameToSlot(cx, cg, pn3))
4773 return JS_FALSE;
4774 op = PN_OP(pn3);
4776 if (pn3->isConst()) {
4777 ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
4778 JSMSG_BAD_FOR_LEFTSIDE);
4779 return JS_FALSE;
4781 if (!pn3->pn_cookie.isFree()) {
4782 atomIndex = (jsatomid) pn3->pn_cookie.asInteger();
4783 EMIT_UINT16_IMM_OP(op, atomIndex);
4784 } else {
4785 if (!EmitAtomOp(cx, pn3, op, cg))
4786 return JS_FALSE;
4788 break;
4790 case TOK_DOT:
4792 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4793 * has no side effects.
4795 useful = JS_FALSE;
4796 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
4797 return JS_FALSE;
4798 if (!useful) {
4799 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
4800 return JS_FALSE;
4801 break;
4803 /* FALL THROUGH */
4805 #if JS_HAS_DESTRUCTURING
4806 destructuring_for:
4807 #endif
4808 default:
4809 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
4810 return JS_FALSE;
4811 JS_ASSERT(cg->stackDepth >= 2);
4813 #if JS_HAS_DESTRUCTURING
4814 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4815 if (!EmitDestructuringOps(cx, cg, op, pn3))
4816 return JS_FALSE;
4817 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4818 return JS_FALSE;
4819 } else
4820 #endif
4821 if (pn3->pn_type == TOK_LP) {
4822 JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4823 if (!js_EmitTree(cx, cg, pn3))
4824 return JS_FALSE;
4825 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4826 return JS_FALSE;
4827 } else
4828 #if JS_HAS_XML_SUPPORT
4829 if (pn3->pn_type == TOK_UNARYOP) {
4830 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4831 if (!js_EmitTree(cx, cg, pn3))
4832 return JS_FALSE;
4833 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4834 return JS_FALSE;
4835 } else
4836 #endif
4837 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4838 return JS_FALSE;
4839 break;
4842 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4843 JS_ASSERT(cg->stackDepth == loopDepth);
4845 /* Set the first srcnote offset so we can find the start of the loop body. */
4846 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - jmp))
4847 return JS_FALSE;
4849 /* Emit code for the loop body. */
4850 if (!js_EmitTree(cx, cg, pn->pn_right))
4851 return JS_FALSE;
4853 /* Set loop and enclosing "update" offsets, for continue. */
4854 stmt = &stmtInfo;
4855 do {
4856 stmt->update = CG_OFFSET(cg);
4857 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4860 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4862 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4863 if (js_Emit1(cx, cg, JSOP_MOREITER) < 0)
4864 return JS_FALSE;
4865 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4866 if (beq < 0)
4867 return JS_FALSE;
4869 /* Set the second srcnote offset so we can find the closing jump. */
4870 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
4871 return JS_FALSE;
4872 } else {
4873 /* C-style for (init; cond; update) ... loop. */
4874 op = JSOP_POP;
4875 pn3 = pn2->pn_kid1;
4876 if (!pn3) {
4877 /* No initializer: emit an annotated nop for the decompiler. */
4878 op = JSOP_NOP;
4879 } else {
4880 cg->flags |= TCF_IN_FOR_INIT;
4881 #if JS_HAS_DESTRUCTURING
4882 if (pn3->pn_type == TOK_ASSIGN &&
4883 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4884 return JS_FALSE;
4886 #endif
4887 if (op == JSOP_POP) {
4888 if (!js_EmitTree(cx, cg, pn3))
4889 return JS_FALSE;
4890 if (TokenKindIsDecl(PN_TYPE(pn3))) {
4892 * Check whether a destructuring-initialized var decl
4893 * was optimized to a group assignment. If so, we do
4894 * not need to emit a pop below, so switch to a nop,
4895 * just for the decompiler.
4897 JS_ASSERT(pn3->pn_arity == PN_LIST);
4898 if (pn3->pn_xflags & PNX_GROUPINIT)
4899 op = JSOP_NOP;
4902 cg->flags &= ~TCF_IN_FOR_INIT;
4906 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4907 * Use tmp to hold the biased srcnote "top" offset, which differs
4908 * from the top local variable by the length of the JSOP_GOTO{,X}
4909 * emitted in between tmp and top if this loop has a condition.
4911 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4912 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
4913 return JS_FALSE;
4914 tmp = CG_OFFSET(cg);
4916 if (pn2->pn_kid2) {
4917 /* Goto the loop condition, which branches back to iterate. */
4918 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4919 if (jmp < 0)
4920 return JS_FALSE;
4923 top = CG_OFFSET(cg);
4924 SET_STATEMENT_TOP(&stmtInfo, top);
4926 /* Emit code for the loop body. */
4927 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4928 return JS_FALSE;
4929 if (!js_EmitTree(cx, cg, pn->pn_right))
4930 return JS_FALSE;
4932 /* Set the second note offset so we can find the update part. */
4933 JS_ASSERT(noteIndex != -1);
4934 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4935 CG_OFFSET(cg) - tmp)) {
4936 return JS_FALSE;
4939 /* Set loop and enclosing "update" offsets, for continue. */
4940 stmt = &stmtInfo;
4941 do {
4942 stmt->update = CG_OFFSET(cg);
4943 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4945 /* Check for update code to do before the condition (if any). */
4946 pn3 = pn2->pn_kid3;
4947 if (pn3) {
4948 op = JSOP_POP;
4949 #if JS_HAS_DESTRUCTURING
4950 if (pn3->pn_type == TOK_ASSIGN &&
4951 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4952 return JS_FALSE;
4954 #endif
4955 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
4956 return JS_FALSE;
4958 /* Always emit the POP or NOP, to help the decompiler. */
4959 if (js_Emit1(cx, cg, op) < 0)
4960 return JS_FALSE;
4962 /* Restore the absolute line number for source note readers. */
4963 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4964 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4965 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4966 return JS_FALSE;
4967 CG_CURRENT_LINE(cg) = (uintN) off;
4971 /* Set the first note offset so we can find the loop condition. */
4972 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
4973 CG_OFFSET(cg) - tmp)) {
4974 return JS_FALSE;
4977 if (pn2->pn_kid2) {
4978 /* Fix up the goto from top to target the loop condition. */
4979 JS_ASSERT(jmp >= 0);
4980 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4982 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
4983 return JS_FALSE;
4986 /* The third note offset helps us find the loop-closing jump. */
4987 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
4988 CG_OFFSET(cg) - tmp)) {
4989 return JS_FALSE;
4992 if (pn2->pn_kid2) {
4993 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4994 if (beq < 0)
4995 return JS_FALSE;
4996 } else {
4997 /* No loop condition -- emit the loop-closing jump. */
4998 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4999 if (jmp < 0)
5000 return JS_FALSE;
5004 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5005 if (!js_PopStatementCG(cx, cg))
5006 return JS_FALSE;
5008 if (pn2->pn_type == TOK_IN) {
5009 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
5010 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
5011 return JS_FALSE;
5014 break;
5016 case TOK_BREAK:
5017 stmt = cg->topStmt;
5018 atom = pn->pn_atom;
5019 if (atom) {
5020 ale = cg->atomList.add(cg->parser, atom);
5021 if (!ale)
5022 return JS_FALSE;
5023 while (stmt->type != STMT_LABEL || stmt->label != atom)
5024 stmt = stmt->down;
5025 noteType = SRC_BREAK2LABEL;
5026 } else {
5027 ale = NULL;
5028 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
5029 stmt = stmt->down;
5030 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
5033 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
5034 return JS_FALSE;
5035 break;
5037 case TOK_CONTINUE:
5038 stmt = cg->topStmt;
5039 atom = pn->pn_atom;
5040 if (atom) {
5041 /* Find the loop statement enclosed by the matching label. */
5042 JSStmtInfo *loop = NULL;
5043 ale = cg->atomList.add(cg->parser, atom);
5044 if (!ale)
5045 return JS_FALSE;
5046 while (stmt->type != STMT_LABEL || stmt->label != atom) {
5047 if (STMT_IS_LOOP(stmt))
5048 loop = stmt;
5049 stmt = stmt->down;
5051 stmt = loop;
5052 noteType = SRC_CONT2LABEL;
5053 } else {
5054 ale = NULL;
5055 while (!STMT_IS_LOOP(stmt))
5056 stmt = stmt->down;
5057 noteType = SRC_CONTINUE;
5060 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
5061 return JS_FALSE;
5062 break;
5064 case TOK_WITH:
5065 if (!js_EmitTree(cx, cg, pn->pn_left))
5066 return JS_FALSE;
5067 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
5068 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
5069 return JS_FALSE;
5070 if (!js_EmitTree(cx, cg, pn->pn_right))
5071 return JS_FALSE;
5072 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
5073 return JS_FALSE;
5074 ok = js_PopStatementCG(cx, cg);
5075 break;
5077 case TOK_TRY:
5079 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5080 intN depth;
5081 JSParseNode *lastCatch;
5083 catchJump = -1;
5086 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5087 * for later fixup.
5089 * When a finally block is active (STMT_FINALLY in our tree context),
5090 * non-local jumps (including jumps-over-catches) result in a GOSUB
5091 * being written into the bytecode stream and fixed-up later (c.f.
5092 * EmitBackPatchOp and BackPatch).
5094 js_PushStatement(cg, &stmtInfo,
5095 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5096 CG_OFFSET(cg));
5099 * Since an exception can be thrown at any place inside the try block,
5100 * we need to restore the stack and the scope chain before we transfer
5101 * the control to the exception handler.
5103 * For that we store in a try note associated with the catch or
5104 * finally block the stack depth upon the try entry. The interpreter
5105 * uses this depth to properly unwind the stack and the scope chain.
5107 depth = cg->stackDepth;
5109 /* Mark try location for decompilation, then emit try block. */
5110 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5111 return JS_FALSE;
5112 tryStart = CG_OFFSET(cg);
5113 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5114 return JS_FALSE;
5115 JS_ASSERT(depth == cg->stackDepth);
5117 /* GOSUB to finally, if present. */
5118 if (pn->pn_kid3) {
5119 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5120 return JS_FALSE;
5121 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5122 if (jmp < 0)
5123 return JS_FALSE;
5126 /* Emit (hidden) jump over catch and/or finally. */
5127 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5128 return JS_FALSE;
5129 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5130 if (jmp < 0)
5131 return JS_FALSE;
5133 tryEnd = CG_OFFSET(cg);
5135 /* If this try has a catch block, emit it. */
5136 pn2 = pn->pn_kid2;
5137 lastCatch = NULL;
5138 if (pn2) {
5139 jsint count = 0; /* previous catch block's population */
5142 * The emitted code for a catch block looks like:
5144 * [throwing] only if 2nd+ catch block
5145 * [leaveblock] only if 2nd+ catch block
5146 * enterblock with SRC_CATCH
5147 * exception
5148 * [dup] only if catchguard
5149 * setlocalpop <slot> or destructuring code
5150 * [< catchguard code >] if there's a catchguard
5151 * [ifeq <offset to next catch block>] " "
5152 * [pop] only if catchguard
5153 * < catch block contents >
5154 * leaveblock
5155 * goto <end of catch blocks> non-local; finally applies
5157 * If there's no catch block without a catchguard, the last
5158 * <offset to next catch block> points to rethrow code. This
5159 * code will [gosub] to the finally code if appropriate, and is
5160 * also used for the catch-all trynote for capturing exceptions
5161 * thrown from catch{} blocks.
5163 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5164 ptrdiff_t guardJump, catchNote;
5166 JS_ASSERT(cg->stackDepth == depth);
5167 guardJump = GUARDJUMP(stmtInfo);
5168 if (guardJump != -1) {
5169 /* Fix up and clean up previous catch block. */
5170 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5173 * Account for JSOP_ENTERBLOCK (whose block object count
5174 * is saved below) and pushed exception object that we
5175 * still have after the jumping from the previous guard.
5177 cg->stackDepth = depth + count + 1;
5180 * Move exception back to cx->exception to prepare for
5181 * the next catch. We hide [throwing] from the decompiler
5182 * since it compensates for the hidden JSOP_DUP at the
5183 * start of the previous guarded catch.
5185 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5186 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5187 return JS_FALSE;
5189 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5190 return JS_FALSE;
5191 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
5192 JS_ASSERT(cg->stackDepth == depth);
5196 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5197 * by the call to js_EmitTree immediately below. Save this
5198 * source note's index in stmtInfo for use by the TOK_CATCH:
5199 * case, where the length of the catch guard is set as the
5200 * note's offset.
5202 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5203 if (catchNote < 0)
5204 return JS_FALSE;
5205 CATCHNOTE(stmtInfo) = catchNote;
5208 * Emit the lexical scope and catch body. Save the catch's
5209 * block object population via count, for use when targeting
5210 * guardJump at the next catch (the guard mismatch case).
5212 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5213 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5214 if (!js_EmitTree(cx, cg, pn3))
5215 return JS_FALSE;
5217 /* gosub <finally>, if required */
5218 if (pn->pn_kid3) {
5219 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5220 &GOSUBS(stmtInfo));
5221 if (jmp < 0)
5222 return JS_FALSE;
5223 JS_ASSERT(cg->stackDepth == depth);
5227 * Jump over the remaining catch blocks. This will get fixed
5228 * up to jump to after catch/finally.
5230 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5231 return JS_FALSE;
5232 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5233 if (jmp < 0)
5234 return JS_FALSE;
5237 * Save a pointer to the last catch node to handle try-finally
5238 * and try-catch(guard)-finally special cases.
5240 lastCatch = pn3->expr();
5245 * Last catch guard jumps to the rethrow code sequence if none of the
5246 * guards match. Target guardJump at the beginning of the rethrow
5247 * sequence, just in case a guard expression throws and leaves the
5248 * stack unbalanced.
5250 if (lastCatch && lastCatch->pn_kid2) {
5251 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5253 /* Sync the stack to take into account pushed exception. */
5254 JS_ASSERT(cg->stackDepth == depth);
5255 cg->stackDepth = depth + 1;
5258 * Rethrow the exception, delegating executing of finally if any
5259 * to the exception handler.
5261 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5262 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5263 return JS_FALSE;
5267 JS_ASSERT(cg->stackDepth == depth);
5269 /* Emit finally handler if any. */
5270 finallyStart = 0; /* to quell GCC uninitialized warnings */
5271 if (pn->pn_kid3) {
5273 * Fix up the gosubs that might have been emitted before non-local
5274 * jumps to the finally code.
5276 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5277 return JS_FALSE;
5279 finallyStart = CG_OFFSET(cg);
5281 /* Indicate that we're emitting a subroutine body. */
5282 stmtInfo.type = STMT_SUBROUTINE;
5283 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5284 return JS_FALSE;
5285 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5286 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5287 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5288 return JS_FALSE;
5290 JS_ASSERT(cg->stackDepth == depth);
5292 if (!js_PopStatementCG(cx, cg))
5293 return JS_FALSE;
5295 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5296 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5297 return JS_FALSE;
5300 /* Fix up the end-of-try/catch jumps to come here. */
5301 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5302 return JS_FALSE;
5305 * Add the try note last, to let post-order give us the right ordering
5306 * (first to last for a given nesting level, inner to outer by level).
5308 if (pn->pn_kid2 &&
5309 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5310 return JS_FALSE;
5314 * If we've got a finally, mark try+catch region with additional
5315 * trynote to catch exceptions (re)thrown from a catch block or
5316 * for the try{}finally{} case.
5318 if (pn->pn_kid3 &&
5319 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5320 return JS_FALSE;
5322 break;
5325 case TOK_CATCH:
5327 ptrdiff_t catchStart, guardJump;
5328 JSObject *blockObj;
5331 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5332 * and save the block object atom.
5334 stmt = cg->topStmt;
5335 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5336 stmt->type = STMT_CATCH;
5337 catchStart = stmt->update;
5338 blockObj = stmt->blockObj;
5340 /* Go up one statement info record to the TRY or FINALLY record. */
5341 stmt = stmt->down;
5342 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5344 /* Pick up the pending exception and bind it to the catch variable. */
5345 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5346 return JS_FALSE;
5349 * Dup the exception object if there is a guard for rethrowing to use
5350 * it later when rethrowing or in other catches.
5352 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5353 return JS_FALSE;
5355 pn2 = pn->pn_kid1;
5356 switch (pn2->pn_type) {
5357 #if JS_HAS_DESTRUCTURING
5358 case TOK_RB:
5359 case TOK_RC:
5360 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5361 return JS_FALSE;
5362 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5363 return JS_FALSE;
5364 break;
5365 #endif
5367 case TOK_NAME:
5368 /* Inline and specialize BindNameToSlot for pn2. */
5369 JS_ASSERT(!pn2->pn_cookie.isFree());
5370 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie.asInteger());
5371 break;
5373 default:
5374 JS_ASSERT(0);
5377 /* Emit the guard expression, if there is one. */
5378 if (pn->pn_kid2) {
5379 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5380 return JS_FALSE;
5381 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5382 CG_OFFSET(cg) - catchStart)) {
5383 return JS_FALSE;
5385 /* ifeq <next block> */
5386 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5387 if (guardJump < 0)
5388 return JS_FALSE;
5389 GUARDJUMP(*stmt) = guardJump;
5391 /* Pop duplicated exception object as we no longer need it. */
5392 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5393 return JS_FALSE;
5396 /* Emit the catch body. */
5397 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5398 return JS_FALSE;
5401 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5402 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5404 off = cg->stackDepth;
5405 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5406 return JS_FALSE;
5407 break;
5410 case TOK_VAR:
5411 if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5412 return JS_FALSE;
5413 break;
5415 case TOK_RETURN:
5416 /* Push a return value */
5417 pn2 = pn->pn_kid;
5418 if (pn2) {
5419 if (!js_EmitTree(cx, cg, pn2))
5420 return JS_FALSE;
5421 } else {
5422 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5423 return JS_FALSE;
5427 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5428 * blocks having finally clauses and to exit intermingled let blocks.
5429 * We can't simply transfer control flow to our caller in that case,
5430 * because we must gosub to those finally clauses from inner to outer,
5431 * with the correct stack pointer (i.e., after popping any with,
5432 * for/in, etc., slots nested inside the finally's try).
5434 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5435 * extra JSOP_RETRVAL after the fixups.
5437 top = CG_OFFSET(cg);
5438 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5439 return JS_FALSE;
5440 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5441 return JS_FALSE;
5442 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5443 CG_BASE(cg)[top] = JSOP_SETRVAL;
5444 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5445 return JS_FALSE;
5447 break;
5449 #if JS_HAS_GENERATORS
5450 case TOK_YIELD:
5451 if (!cg->inFunction()) {
5452 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5453 JSMSG_BAD_RETURN_OR_YIELD,
5454 js_yield_str);
5455 return JS_FALSE;
5457 if (pn->pn_kid) {
5458 if (!js_EmitTree(cx, cg, pn->pn_kid))
5459 return JS_FALSE;
5460 } else {
5461 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5462 return JS_FALSE;
5464 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5465 return JS_FALSE;
5466 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5467 return JS_FALSE;
5468 break;
5469 #endif
5471 case TOK_LC:
5473 #if JS_HAS_XML_SUPPORT
5474 if (pn->pn_arity == PN_UNARY) {
5475 if (!js_EmitTree(cx, cg, pn->pn_kid))
5476 return JS_FALSE;
5477 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5478 return JS_FALSE;
5479 break;
5481 #endif
5483 JS_ASSERT(pn->pn_arity == PN_LIST);
5485 noteIndex = -1;
5486 tmp = CG_OFFSET(cg);
5487 if (pn->pn_xflags & PNX_NEEDBRACES) {
5488 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5489 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5490 return JS_FALSE;
5493 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5495 JSParseNode *pnchild = pn->pn_head;
5496 if (pn->pn_xflags & PNX_FUNCDEFS) {
5498 * This block contains top-level function definitions. To ensure
5499 * that we emit the bytecode defining them before the rest of code
5500 * in the block we use a separate pass over functions. During the
5501 * main pass later the emitter will add JSOP_NOP with source notes
5502 * for the function to preserve the original functions position
5503 * when decompiling.
5505 * Currently this is used only for functions, as compile-as-we go
5506 * mode for scripts does not allow separate emitter passes.
5508 JS_ASSERT(cg->inFunction());
5509 if (pn->pn_xflags & PNX_DESTRUCT) {
5511 * Assign the destructuring arguments before defining any
5512 * functions, see bug 419662.
5514 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5515 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_COMMA);
5516 if (!js_EmitTree(cx, cg, pnchild))
5517 return JS_FALSE;
5518 pnchild = pnchild->pn_next;
5521 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5522 if (pn2->pn_type == TOK_FUNCTION) {
5523 if (pn2->pn_op == JSOP_NOP) {
5524 if (!js_EmitTree(cx, cg, pn2))
5525 return JS_FALSE;
5526 } else {
5528 * JSOP_DEFFUN in a top-level block with function
5529 * definitions appears, for example, when "if (true)"
5530 * is optimized away from "if (true) function x() {}".
5531 * See bug 428424.
5533 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5538 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5539 if (!js_EmitTree(cx, cg, pn2))
5540 return JS_FALSE;
5543 if (noteIndex >= 0 &&
5544 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5545 CG_OFFSET(cg) - tmp)) {
5546 return JS_FALSE;
5549 ok = js_PopStatementCG(cx, cg);
5550 break;
5553 case TOK_SEQ:
5554 JS_ASSERT(pn->pn_arity == PN_LIST);
5555 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5556 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5557 if (!js_EmitTree(cx, cg, pn2))
5558 return JS_FALSE;
5560 ok = js_PopStatementCG(cx, cg);
5561 break;
5563 case TOK_SEMI:
5564 pn2 = pn->pn_kid;
5565 if (pn2) {
5567 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5568 * debugger, and eval frames may need the value of the ultimate
5569 * expression statement as the script's result, despite the fact
5570 * that it appears useless to the compiler.
5572 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5573 * calling JS_Compile* to suppress JSOP_POPV.
5575 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5576 if (!useful) {
5577 if (!CheckSideEffects(cx, cg, pn2, &useful))
5578 return JS_FALSE;
5582 * Don't eliminate apparently useless expressions if they are
5583 * labeled expression statements. The tc->topStmt->update test
5584 * catches the case where we are nesting in js_EmitTree for a
5585 * labeled compound statement.
5587 if (!useful &&
5588 (!cg->topStmt ||
5589 cg->topStmt->type != STMT_LABEL ||
5590 cg->topStmt->update < CG_OFFSET(cg))) {
5591 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5592 if (!ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5593 JSREPORT_WARNING | JSREPORT_STRICT,
5594 JSMSG_USELESS_EXPR)) {
5595 return JS_FALSE;
5597 } else {
5598 op = wantval ? JSOP_POPV : JSOP_POP;
5599 #if JS_HAS_DESTRUCTURING
5600 if (!wantval &&
5601 pn2->pn_type == TOK_ASSIGN &&
5602 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5603 return JS_FALSE;
5605 #endif
5606 if (op != JSOP_NOP) {
5608 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5609 * avoid null closure cloning. Do this only for assignment
5610 * statements that are not completion values wanted by a
5611 * script evaluator, to ensure that the joined function
5612 * can't escape directly.
5614 if (!wantval &&
5615 PN_TYPE(pn2) == TOK_ASSIGN &&
5616 PN_OP(pn2) == JSOP_NOP &&
5617 PN_OP(pn2->pn_left) == JSOP_SETPROP &&
5618 PN_OP(pn2->pn_right) == JSOP_LAMBDA &&
5619 pn2->pn_right->pn_funbox->joinable()) {
5620 pn2->pn_left->pn_op = JSOP_SETMETHOD;
5622 if (!js_EmitTree(cx, cg, pn2))
5623 return JS_FALSE;
5624 if (js_Emit1(cx, cg, op) < 0)
5625 return JS_FALSE;
5629 break;
5631 case TOK_COLON:
5632 /* Emit an annotated nop so we know to decompile a label. */
5633 atom = pn->pn_atom;
5634 ale = cg->atomList.add(cg->parser, atom);
5635 if (!ale)
5636 return JS_FALSE;
5637 pn2 = pn->expr();
5638 noteType = (pn2->pn_type == TOK_LC ||
5639 (pn2->pn_type == TOK_LEXICALSCOPE &&
5640 pn2->expr()->pn_type == TOK_LC))
5641 ? SRC_LABELBRACE
5642 : SRC_LABEL;
5643 noteIndex = js_NewSrcNote2(cx, cg, noteType,
5644 (ptrdiff_t) ALE_INDEX(ale));
5645 if (noteIndex < 0 ||
5646 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5647 return JS_FALSE;
5650 /* Emit code for the labeled statement. */
5651 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
5652 stmtInfo.label = atom;
5653 if (!js_EmitTree(cx, cg, pn2))
5654 return JS_FALSE;
5655 if (!js_PopStatementCG(cx, cg))
5656 return JS_FALSE;
5658 /* If the statement was compound, emit a note for the end brace. */
5659 if (noteType == SRC_LABELBRACE) {
5660 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5661 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5662 return JS_FALSE;
5665 break;
5667 case TOK_COMMA:
5669 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5670 * These notes help the decompiler bracket the bytecodes generated
5671 * from each sub-expression that follows a comma.
5673 off = noteIndex = -1;
5674 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5675 if (!js_EmitTree(cx, cg, pn2))
5676 return JS_FALSE;
5677 tmp = CG_OFFSET(cg);
5678 if (noteIndex >= 0) {
5679 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5680 return JS_FALSE;
5682 if (!pn2->pn_next)
5683 break;
5684 off = tmp;
5685 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5686 if (noteIndex < 0 ||
5687 js_Emit1(cx, cg, JSOP_POP) < 0) {
5688 return JS_FALSE;
5691 break;
5693 case TOK_ASSIGN:
5695 * Check left operand type and generate specialized code for it.
5696 * Specialize to avoid ECMA "reference type" values on the operand
5697 * stack, which impose pervasive runtime "GetValue" costs.
5699 pn2 = pn->pn_left;
5700 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
5701 switch (PN_TYPE(pn2)) {
5702 case TOK_NAME:
5703 if (!BindNameToSlot(cx, cg, pn2))
5704 return JS_FALSE;
5705 if (!pn2->pn_cookie.isFree()) {
5706 atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
5707 } else {
5708 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5709 if (!ale)
5710 return JS_FALSE;
5711 atomIndex = ALE_INDEX(ale);
5712 if (!pn2->isConst())
5713 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
5715 break;
5716 case TOK_DOT:
5717 if (!js_EmitTree(cx, cg, pn2->expr()))
5718 return JS_FALSE;
5719 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5720 if (!ale)
5721 return JS_FALSE;
5722 atomIndex = ALE_INDEX(ale);
5723 break;
5724 case TOK_LB:
5725 JS_ASSERT(pn2->pn_arity == PN_BINARY);
5726 if (!js_EmitTree(cx, cg, pn2->pn_left))
5727 return JS_FALSE;
5728 if (!js_EmitTree(cx, cg, pn2->pn_right))
5729 return JS_FALSE;
5730 break;
5731 #if JS_HAS_DESTRUCTURING
5732 case TOK_RB:
5733 case TOK_RC:
5734 break;
5735 #endif
5736 case TOK_LP:
5737 if (!js_EmitTree(cx, cg, pn2))
5738 return JS_FALSE;
5739 break;
5740 #if JS_HAS_XML_SUPPORT
5741 case TOK_UNARYOP:
5742 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5743 if (!js_EmitTree(cx, cg, pn2->pn_kid))
5744 return JS_FALSE;
5745 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5746 return JS_FALSE;
5747 break;
5748 #endif
5749 default:
5750 JS_ASSERT(0);
5753 op = PN_OP(pn);
5754 if (op != JSOP_NOP) {
5755 switch (pn2->pn_type) {
5756 case TOK_NAME:
5757 if (pn2->isConst()) {
5758 if (PN_OP(pn2) == JSOP_CALLEE) {
5759 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
5760 return JS_FALSE;
5761 } else {
5762 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5764 } else if (PN_OP(pn2) == JSOP_SETNAME) {
5765 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5766 return JS_FALSE;
5767 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
5768 } else {
5769 JS_ASSERT(PN_OP(pn2) != JSOP_GETUPVAR);
5770 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
5771 ? JSOP_GETGVAR
5772 : (PN_OP(pn2) == JSOP_SETARG)
5773 ? JSOP_GETARG
5774 : JSOP_GETLOCAL,
5775 atomIndex);
5777 break;
5778 case TOK_DOT:
5779 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5780 return JS_FALSE;
5781 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
5782 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
5783 return JS_FALSE;
5784 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
5785 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
5786 return JS_FALSE;
5787 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5788 return JS_FALSE;
5789 } else {
5790 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
5792 break;
5793 case TOK_LB:
5794 case TOK_LP:
5795 #if JS_HAS_XML_SUPPORT
5796 case TOK_UNARYOP:
5797 #endif
5798 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5799 return JS_FALSE;
5800 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5801 return JS_FALSE;
5802 break;
5803 default:;
5807 /* Now emit the right operand (it may affect the namespace). */
5808 if (!js_EmitTree(cx, cg, pn->pn_right))
5809 return JS_FALSE;
5811 /* If += etc., emit the binary operator with a decompiler note. */
5812 if (op != JSOP_NOP) {
5814 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5815 * declared in the current compilation unit, as in this case (just
5816 * a bit further below) we will avoid emitting the assignment op.
5818 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
5819 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5820 return JS_FALSE;
5822 if (js_Emit1(cx, cg, op) < 0)
5823 return JS_FALSE;
5826 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5827 if (pn2->pn_type != TOK_NAME &&
5828 #if JS_HAS_DESTRUCTURING
5829 pn2->pn_type != TOK_RB &&
5830 pn2->pn_type != TOK_RC &&
5831 #endif
5832 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
5833 return JS_FALSE;
5836 /* Finally, emit the specialized assignment bytecode. */
5837 switch (pn2->pn_type) {
5838 case TOK_NAME:
5839 if (pn2->isConst())
5840 break;
5841 /* FALL THROUGH */
5842 case TOK_DOT:
5843 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5844 break;
5845 case TOK_LB:
5846 case TOK_LP:
5847 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5848 return JS_FALSE;
5849 break;
5850 #if JS_HAS_DESTRUCTURING
5851 case TOK_RB:
5852 case TOK_RC:
5853 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5854 return JS_FALSE;
5855 break;
5856 #endif
5857 #if JS_HAS_XML_SUPPORT
5858 case TOK_UNARYOP:
5859 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5860 return JS_FALSE;
5861 break;
5862 #endif
5863 default:
5864 JS_ASSERT(0);
5866 break;
5868 case TOK_HOOK:
5869 /* Emit the condition, then branch if false to the else part. */
5870 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5871 return JS_FALSE;
5872 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5873 if (noteIndex < 0)
5874 return JS_FALSE;
5875 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5876 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5877 return JS_FALSE;
5879 /* Jump around else, fixup the branch, emit else, fixup jump. */
5880 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5881 if (jmp < 0)
5882 return JS_FALSE;
5883 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5886 * Because each branch pushes a single value, but our stack budgeting
5887 * analysis ignores branches, we now have to adjust cg->stackDepth to
5888 * ignore the value pushed by the first branch. Execution will follow
5889 * only one path, so we must decrement cg->stackDepth.
5891 * Failing to do this will foil code, such as the try/catch/finally
5892 * exception handling code generator, that samples cg->stackDepth for
5893 * use at runtime (JSOP_SETSP), or in let expression and block code
5894 * generation, which must use the stack depth to compute local stack
5895 * indexes correctly.
5897 JS_ASSERT(cg->stackDepth > 0);
5898 cg->stackDepth--;
5899 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5900 return JS_FALSE;
5901 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5902 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5903 return JS_FALSE;
5904 break;
5906 case TOK_OR:
5907 case TOK_AND:
5909 * JSOP_OR converts the operand on the stack to boolean, and if true,
5910 * leaves the original operand value on the stack and jumps; otherwise
5911 * it pops and falls into the next bytecode, which evaluates the right
5912 * operand. The jump goes around the right operand evaluation.
5914 * JSOP_AND converts the operand on the stack to boolean, and if false,
5915 * leaves the original operand value on the stack and jumps; otherwise
5916 * it pops and falls into the right operand's bytecode.
5918 if (pn->pn_arity == PN_BINARY) {
5919 if (!js_EmitTree(cx, cg, pn->pn_left))
5920 return JS_FALSE;
5921 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5922 if (top < 0)
5923 return JS_FALSE;
5924 if (!js_EmitTree(cx, cg, pn->pn_right))
5925 return JS_FALSE;
5926 off = CG_OFFSET(cg);
5927 pc = CG_CODE(cg, top);
5928 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5929 *pc = pn->pn_op;
5930 } else {
5931 JS_ASSERT(pn->pn_arity == PN_LIST);
5932 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5934 /* Left-associative operator chain: avoid too much recursion. */
5935 pn2 = pn->pn_head;
5936 if (!js_EmitTree(cx, cg, pn2))
5937 return JS_FALSE;
5938 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5939 if (top < 0)
5940 return JS_FALSE;
5942 /* Emit nodes between the head and the tail. */
5943 jmp = top;
5944 while ((pn2 = pn2->pn_next)->pn_next) {
5945 if (!js_EmitTree(cx, cg, pn2))
5946 return JS_FALSE;
5947 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5948 if (off < 0)
5949 return JS_FALSE;
5950 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5951 return JS_FALSE;
5952 jmp = off;
5955 if (!js_EmitTree(cx, cg, pn2))
5956 return JS_FALSE;
5958 pn2 = pn->pn_head;
5959 off = CG_OFFSET(cg);
5960 do {
5961 pc = CG_CODE(cg, top);
5962 tmp = GetJumpOffset(cg, pc);
5963 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5964 *pc = pn->pn_op;
5965 top += tmp;
5966 } while ((pn2 = pn2->pn_next)->pn_next);
5968 break;
5970 case TOK_PLUS:
5971 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5972 if (pn->pn_arity == PN_LIST && pn->pn_count < JS_BIT(16) &&
5973 cg->inFunction()) {
5974 /* Emit up to the first string literal conventionally. */
5975 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5976 if (pn2->pn_type == TOK_STRING)
5977 break;
5978 if (!js_EmitTree(cx, cg, pn2))
5979 return JS_FALSE;
5980 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
5981 return JS_FALSE;
5984 if (!pn2)
5985 break;
5988 * Having seen a string literal, we know statically that the rest
5989 * of the additions are string concatenation, so we emit them as a
5990 * single concatn. First, do string conversion on the result of the
5991 * preceding zero or more additions so that any side effects of
5992 * string conversion occur before the next operand begins.
5994 if (pn2 == pn->pn_head) {
5995 index = 0;
5996 } else {
5997 if (!js_Emit1(cx, cg, JSOP_OBJTOSTR))
5998 return JS_FALSE;
5999 index = 1;
6002 for (; pn2; pn2 = pn2->pn_next, index++) {
6003 if (!js_EmitTree(cx, cg, pn2))
6004 return JS_FALSE;
6005 if (!pn2->isLiteral() && js_Emit1(cx, cg, JSOP_OBJTOSTR) < 0)
6006 return JS_FALSE;
6009 EMIT_UINT16_IMM_OP(JSOP_CONCATN, index);
6010 break;
6012 case TOK_BITOR:
6013 case TOK_BITXOR:
6014 case TOK_BITAND:
6015 case TOK_EQOP:
6016 case TOK_RELOP:
6017 case TOK_IN:
6018 case TOK_INSTANCEOF:
6019 case TOK_SHOP:
6020 case TOK_MINUS:
6021 case TOK_STAR:
6022 case TOK_DIVOP:
6023 if (pn->pn_arity == PN_LIST) {
6024 /* Left-associative operator chain: avoid too much recursion. */
6025 pn2 = pn->pn_head;
6026 if (!js_EmitTree(cx, cg, pn2))
6027 return JS_FALSE;
6028 op = PN_OP(pn);
6029 while ((pn2 = pn2->pn_next) != NULL) {
6030 if (!js_EmitTree(cx, cg, pn2))
6031 return JS_FALSE;
6032 if (js_Emit1(cx, cg, op) < 0)
6033 return JS_FALSE;
6035 } else {
6036 #if JS_HAS_XML_SUPPORT
6037 uintN oldflags;
6039 case TOK_DBLCOLON:
6040 if (pn->pn_arity == PN_NAME) {
6041 if (!js_EmitTree(cx, cg, pn->expr()))
6042 return JS_FALSE;
6043 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
6044 return JS_FALSE;
6045 break;
6049 * Binary :: has a right operand that brackets arbitrary code,
6050 * possibly including a let (a = b) ... expression. We must clear
6051 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6053 oldflags = cg->flags;
6054 cg->flags &= ~TCF_IN_FOR_INIT;
6055 #endif
6057 /* Binary operators that evaluate both operands unconditionally. */
6058 if (!js_EmitTree(cx, cg, pn->pn_left))
6059 return JS_FALSE;
6060 if (!js_EmitTree(cx, cg, pn->pn_right))
6061 return JS_FALSE;
6062 #if JS_HAS_XML_SUPPORT
6063 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6064 #endif
6065 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6066 return JS_FALSE;
6068 break;
6070 case TOK_THROW:
6071 #if JS_HAS_XML_SUPPORT
6072 case TOK_AT:
6073 case TOK_DEFAULT:
6074 JS_ASSERT(pn->pn_arity == PN_UNARY);
6075 /* FALL THROUGH */
6076 #endif
6077 case TOK_UNARYOP:
6079 uintN oldflags;
6081 /* Unary op, including unary +/-. */
6082 op = PN_OP(pn);
6083 #if JS_HAS_XML_SUPPORT
6084 if (op == JSOP_XMLNAME) {
6085 if (!EmitXMLName(cx, pn, op, cg))
6086 return JS_FALSE;
6087 break;
6089 #endif
6090 pn2 = pn->pn_kid;
6092 if (op == JSOP_TYPEOF && pn2->pn_type != TOK_NAME)
6093 op = JSOP_TYPEOFEXPR;
6095 oldflags = cg->flags;
6096 cg->flags &= ~TCF_IN_FOR_INIT;
6097 if (!js_EmitTree(cx, cg, pn2))
6098 return JS_FALSE;
6099 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6100 if (js_Emit1(cx, cg, op) < 0)
6101 return JS_FALSE;
6102 break;
6105 case TOK_INC:
6106 case TOK_DEC:
6107 /* Emit lvalue-specialized code for ++/-- operators. */
6108 pn2 = pn->pn_kid;
6109 JS_ASSERT(pn2->pn_type != TOK_RP);
6110 op = PN_OP(pn);
6111 switch (pn2->pn_type) {
6112 default:
6113 JS_ASSERT(pn2->pn_type == TOK_NAME);
6114 pn2->pn_op = op;
6115 if (!BindNameToSlot(cx, cg, pn2))
6116 return JS_FALSE;
6117 op = PN_OP(pn2);
6118 if (op == JSOP_CALLEE) {
6119 if (js_Emit1(cx, cg, op) < 0)
6120 return JS_FALSE;
6121 } else if (!pn2->pn_cookie.isFree()) {
6122 atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
6123 EMIT_UINT16_IMM_OP(op, atomIndex);
6124 } else {
6125 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6126 if (!EmitAtomOp(cx, pn2, op, cg))
6127 return JS_FALSE;
6128 break;
6130 if (pn2->isConst()) {
6131 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6132 return JS_FALSE;
6133 op = PN_OP(pn);
6134 if (!(js_CodeSpec[op].format & JOF_POST)) {
6135 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6136 return JS_FALSE;
6137 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6138 if (js_Emit1(cx, cg, op) < 0)
6139 return JS_FALSE;
6142 break;
6143 case TOK_DOT:
6144 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6145 return JS_FALSE;
6146 break;
6147 case TOK_LB:
6148 if (!EmitElemOp(cx, pn2, op, cg))
6149 return JS_FALSE;
6150 break;
6151 case TOK_LP:
6152 if (!js_EmitTree(cx, cg, pn2))
6153 return JS_FALSE;
6154 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6155 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6156 return JS_FALSE;
6158 if (js_Emit1(cx, cg, op) < 0)
6159 return JS_FALSE;
6160 break;
6161 #if JS_HAS_XML_SUPPORT
6162 case TOK_UNARYOP:
6163 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6164 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6165 return JS_FALSE;
6166 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6167 return JS_FALSE;
6168 if (js_Emit1(cx, cg, op) < 0)
6169 return JS_FALSE;
6170 break;
6171 #endif
6173 break;
6175 case TOK_DELETE:
6177 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6178 * must evaluate the operand if it appears it might have side effects.
6180 pn2 = pn->pn_kid;
6181 switch (pn2->pn_type) {
6182 case TOK_NAME:
6183 if (!BindNameToSlot(cx, cg, pn2))
6184 return JS_FALSE;
6185 op = PN_OP(pn2);
6186 if (op == JSOP_FALSE) {
6187 if (js_Emit1(cx, cg, op) < 0)
6188 return JS_FALSE;
6189 } else {
6190 if (!EmitAtomOp(cx, pn2, op, cg))
6191 return JS_FALSE;
6193 break;
6194 case TOK_DOT:
6195 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6196 return JS_FALSE;
6197 break;
6198 #if JS_HAS_XML_SUPPORT
6199 case TOK_DBLDOT:
6200 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6201 return JS_FALSE;
6202 break;
6203 #endif
6204 case TOK_LB:
6205 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6206 return JS_FALSE;
6207 break;
6208 default:
6210 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6211 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6213 useful = JS_FALSE;
6214 if (!CheckSideEffects(cx, cg, pn2, &useful))
6215 return JS_FALSE;
6216 if (!useful) {
6217 off = noteIndex = -1;
6218 } else {
6219 if (pn2->pn_op == JSOP_SETCALL)
6220 pn2->pn_op = JSOP_CALL;
6221 if (!js_EmitTree(cx, cg, pn2))
6222 return JS_FALSE;
6223 off = CG_OFFSET(cg);
6224 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6225 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6226 return JS_FALSE;
6228 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6229 return JS_FALSE;
6230 if (noteIndex >= 0) {
6231 tmp = CG_OFFSET(cg);
6232 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6233 return JS_FALSE;
6236 break;
6238 #if JS_HAS_XML_SUPPORT
6239 case TOK_FILTER:
6240 if (!js_EmitTree(cx, cg, pn->pn_left))
6241 return JS_FALSE;
6242 jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
6243 if (jmp < 0)
6244 return JS_FALSE;
6245 top = js_Emit1(cx, cg, JSOP_TRACE);
6246 if (top < 0)
6247 return JS_FALSE;
6248 if (!js_EmitTree(cx, cg, pn->pn_right))
6249 return JS_FALSE;
6250 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6251 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6252 return JS_FALSE;
6253 break;
6254 #endif
6256 case TOK_DOT:
6258 * Pop a stack operand, convert it to object, get a property named by
6259 * this bytecode's immediate-indexed atom operand, and push its value
6260 * (not a reference to it).
6262 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6263 break;
6265 case TOK_LB:
6266 #if JS_HAS_XML_SUPPORT
6267 case TOK_DBLDOT:
6268 #endif
6270 * Pop two operands, convert the left one to object and the right one
6271 * to property name (atom or tagged int), get the named property, and
6272 * push its value. Set the "obj" register to the result of ToObject
6273 * on the left operand.
6275 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6276 break;
6278 case TOK_NEW:
6279 case TOK_LP:
6281 bool callop = (PN_TYPE(pn) == TOK_LP);
6284 * Emit callable invocation or operator new (constructor call) code.
6285 * First, emit code for the left operand to evaluate the callable or
6286 * constructable object expression.
6288 * For operator new applied to other expressions than E4X ones, we emit
6289 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6290 * interpose the lambda-initialized method read barrier -- see the code
6291 * in jsinterp.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6293 * Then (or in a call case that has no explicit reference-base object)
6294 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6295 * parameter: in the operator new case, the newborn instance; in the
6296 * base-less call case, a cookie meaning "use the global object as the
6297 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6298 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6300 pn2 = pn->pn_head;
6301 switch (pn2->pn_type) {
6302 case TOK_NAME:
6303 if (!EmitNameOp(cx, cg, pn2, callop))
6304 return JS_FALSE;
6305 break;
6306 case TOK_DOT:
6307 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
6308 return JS_FALSE;
6309 break;
6310 case TOK_LB:
6311 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6312 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
6313 return JS_FALSE;
6314 break;
6315 case TOK_UNARYOP:
6316 #if JS_HAS_XML_SUPPORT
6317 if (pn2->pn_op == JSOP_XMLNAME) {
6318 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6319 return JS_FALSE;
6320 callop = true; /* suppress JSOP_NULL after */
6321 break;
6323 #endif
6324 /* FALL THROUGH */
6325 default:
6327 * Push null as a placeholder for the global object, per ECMA-262
6328 * 11.2.3 step 6.
6330 if (!js_EmitTree(cx, cg, pn2))
6331 return JS_FALSE;
6332 callop = false; /* trigger JSOP_NULL after */
6333 break;
6335 if (!callop && js_Emit1(cx, cg, JSOP_NULL) < 0)
6336 return JS_FALSE;
6338 /* Remember start of callable-object bytecode for decompilation hint. */
6339 off = top;
6342 * Emit code for each argument in order, then emit the JSOP_*CALL or
6343 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6344 * were pushed on the operand stack.
6346 uintN oldflags = cg->flags;
6347 cg->flags &= ~TCF_IN_FOR_INIT;
6348 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6349 if (!js_EmitTree(cx, cg, pn3))
6350 return JS_FALSE;
6352 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6353 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6354 return JS_FALSE;
6356 argc = pn->pn_count - 1;
6357 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6358 return JS_FALSE;
6359 if (PN_OP(pn) == JSOP_CALL) {
6360 /* Add a trace hint opcode for recursion. */
6361 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
6362 return JS_FALSE;
6364 if (PN_OP(pn) == JSOP_EVAL)
6365 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6366 break;
6369 case TOK_LEXICALSCOPE:
6371 JSObjectBox *objbox;
6372 uintN count;
6374 objbox = pn->pn_objbox;
6375 js_PushBlockScope(cg, &stmtInfo, objbox->object, CG_OFFSET(cg));
6378 * If this lexical scope is not for a catch block, let block or let
6379 * expression, or any kind of for loop (where the scope starts in the
6380 * head after the first part if for (;;), else in the body if for-in);
6381 * and if our container is top-level but not a function body, or else
6382 * a block statement; then emit a SRC_BRACE note. All other container
6383 * statements get braces by default from the decompiler.
6385 noteIndex = -1;
6386 type = PN_TYPE(pn->expr());
6387 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6388 (!(stmt = stmtInfo.down)
6389 ? !cg->inFunction()
6390 : stmt->type == STMT_BLOCK)) {
6391 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6392 /* There must be no source note already output for the next op. */
6393 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6394 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6395 !GettableNoteForNextOp(cg));
6396 #endif
6397 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6398 if (noteIndex < 0)
6399 return JS_FALSE;
6402 JS_ASSERT(CG_OFFSET(cg) == top);
6403 if (!EmitEnterBlock(cx, pn, cg))
6404 return JS_FALSE;
6406 if (!js_EmitTree(cx, cg, pn->pn_expr))
6407 return JS_FALSE;
6409 op = PN_OP(pn);
6410 if (op == JSOP_LEAVEBLOCKEXPR) {
6411 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6412 return JS_FALSE;
6413 } else {
6414 if (noteIndex >= 0 &&
6415 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6416 CG_OFFSET(cg) - top)) {
6417 return JS_FALSE;
6421 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6422 count = OBJ_BLOCK_COUNT(cx, objbox->object);
6423 EMIT_UINT16_IMM_OP(op, count);
6425 ok = js_PopStatementCG(cx, cg);
6426 break;
6429 #if JS_HAS_BLOCK_SCOPE
6430 case TOK_LET:
6431 /* Let statements have their variable declarations on the left. */
6432 if (pn->pn_arity == PN_BINARY) {
6433 pn2 = pn->pn_right;
6434 pn = pn->pn_left;
6435 } else {
6436 pn2 = NULL;
6439 /* Non-null pn2 means that pn is the variable list from a let head. */
6440 JS_ASSERT(pn->pn_arity == PN_LIST);
6441 if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
6442 return JS_FALSE;
6444 /* Thus non-null pn2 is the body of the let block or expression. */
6445 tmp = CG_OFFSET(cg);
6446 if (pn2 && !js_EmitTree(cx, cg, pn2))
6447 return JS_FALSE;
6449 if (noteIndex >= 0 &&
6450 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6451 CG_OFFSET(cg) - tmp)) {
6452 return JS_FALSE;
6454 break;
6455 #endif /* JS_HAS_BLOCK_SCOPE */
6457 #if JS_HAS_GENERATORS
6458 case TOK_ARRAYPUSH: {
6459 jsint slot;
6462 * The array object's stack index is in cg->arrayCompDepth. See below
6463 * under the array initialiser code generator for array comprehension
6464 * special casing.
6466 if (!js_EmitTree(cx, cg, pn->pn_kid))
6467 return JS_FALSE;
6468 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6469 if (slot < 0)
6470 return JS_FALSE;
6471 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6472 break;
6474 #endif
6476 case TOK_RB:
6477 #if JS_HAS_GENERATORS
6478 case TOK_ARRAYCOMP:
6479 #endif
6481 * Emit code for [a, b, c] that is equivalent to constructing a new
6482 * array and in source order evaluating each element value and adding
6483 * it to the array, without invoking latent setters. We use the
6484 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6485 * avoid dup'ing and popping the array as each element is added, as
6486 * JSOP_SETELEM/JSOP_SETPROP would do.
6488 * If no sharp variable is defined, the initializer is not for an array
6489 * comprehension, the initializer is not overlarge, and the initializer
6490 * is not in global code (whose stack growth cannot be precisely modeled
6491 * due to the need to reserve space for global variables and regular
6492 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6493 * array using a fast, all-at-once process rather than a slow, element-
6494 * by-element process.
6496 #if JS_HAS_SHARP_VARS
6497 sharpnum = -1;
6498 do_emit_array:
6499 #endif
6501 op = (JS_LIKELY(pn->pn_count < JS_BIT(16)) && cg->inFunction())
6502 ? JSOP_NEWARRAY
6503 : JSOP_NEWINIT;
6505 #if JS_HAS_GENERATORS
6506 if (pn->pn_type == TOK_ARRAYCOMP)
6507 op = JSOP_NEWINIT;
6508 #endif
6509 #if JS_HAS_SHARP_VARS
6510 JS_ASSERT_IF(sharpnum >= 0, cg->hasSharps());
6511 if (cg->hasSharps())
6512 op = JSOP_NEWINIT;
6513 #endif
6515 if (op == JSOP_NEWINIT && !EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6516 return JS_FALSE;
6518 #if JS_HAS_GENERATORS
6519 if (pn->pn_type == TOK_ARRAYCOMP) {
6520 uintN saveDepth;
6523 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6524 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6525 * its kids under pn2 to generate this comprehension.
6527 JS_ASSERT(cg->stackDepth > 0);
6528 saveDepth = cg->arrayCompDepth;
6529 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6530 if (!js_EmitTree(cx, cg, pn->pn_head))
6531 return JS_FALSE;
6532 cg->arrayCompDepth = saveDepth;
6534 /* Emit the usual op needed for decompilation. */
6535 if (!EmitEndInit(cx, cg, 1))
6536 return JS_FALSE;
6537 break;
6539 #endif /* JS_HAS_GENERATORS */
6541 pn2 = pn->pn_head;
6542 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6543 if (op == JSOP_NEWINIT && !EmitNumberOp(cx, atomIndex, cg))
6544 return JS_FALSE;
6545 if (pn2->pn_type == TOK_COMMA && pn2->pn_arity == PN_NULLARY) {
6546 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6547 return JS_FALSE;
6548 } else {
6549 if (!js_EmitTree(cx, cg, pn2))
6550 return JS_FALSE;
6552 if (op == JSOP_NEWINIT && js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6553 return JS_FALSE;
6555 JS_ASSERT(atomIndex == pn->pn_count);
6557 if (pn->pn_xflags & PNX_ENDCOMMA) {
6558 /* Emit a source note so we know to decompile an extra comma. */
6559 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6560 return JS_FALSE;
6563 if (op == JSOP_NEWINIT) {
6565 * Emit an op to finish the array and, secondarily, to aid in sharp
6566 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6568 if (!EmitEndInit(cx, cg, atomIndex))
6569 return JS_FALSE;
6570 break;
6573 JS_ASSERT(atomIndex < JS_BIT(16));
6574 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY, atomIndex);
6575 break;
6577 case TOK_RC: {
6578 #if JS_HAS_SHARP_VARS
6579 sharpnum = -1;
6580 do_emit_object:
6581 #endif
6582 #if JS_HAS_DESTRUCTURING_SHORTHAND
6583 if (pn->pn_xflags & PNX_DESTRUCT) {
6584 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
6585 return JS_FALSE;
6587 #endif
6589 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6590 * a new object and in source order evaluating each property value and
6591 * adding the property to the object, without invoking latent setters.
6592 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6593 * ignore setters and to avoid dup'ing and popping the object as each
6594 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6596 if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
6597 return JS_FALSE;
6599 uintN methodInits = 0, slowMethodInits = 0;
6600 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6601 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6602 pn3 = pn2->pn_left;
6603 if (pn3->pn_type == TOK_NUMBER) {
6604 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6605 return JS_FALSE;
6608 /* Emit code for the property initializer. */
6609 if (!js_EmitTree(cx, cg, pn2->pn_right))
6610 return JS_FALSE;
6612 op = PN_OP(pn2);
6613 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6614 if (js_Emit1(cx, cg, op) < 0)
6615 return JS_FALSE;
6618 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6619 if (pn3->pn_type == TOK_NUMBER) {
6620 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6621 return JS_FALSE;
6622 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6623 return JS_FALSE;
6624 } else {
6625 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6626 pn3->pn_type == TOK_STRING);
6627 ale = cg->atomList.add(cg->parser, pn3->pn_atom);
6628 if (!ale)
6629 return JS_FALSE;
6631 /* Check whether we can optimize to JSOP_INITMETHOD. */
6632 JSParseNode *init = pn2->pn_right;
6633 bool lambda = PN_OP(init) == JSOP_LAMBDA;
6634 if (lambda)
6635 ++methodInits;
6636 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable())
6638 op = JSOP_INITMETHOD;
6639 pn2->pn_op = uint8(op);
6640 } else {
6641 op = JSOP_INITPROP;
6642 if (lambda)
6643 ++slowMethodInits;
6646 EMIT_INDEX_OP(op, ALE_INDEX(ale));
6650 if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
6651 if (js_Emit1(cx, cg, JSOP_UNBRAND) < 0)
6652 return JS_FALSE;
6654 if (!EmitEndInit(cx, cg, pn->pn_count))
6655 return JS_FALSE;
6656 break;
6659 #if JS_HAS_SHARP_VARS
6660 case TOK_DEFSHARP:
6661 JS_ASSERT(cg->hasSharps());
6662 sharpnum = pn->pn_num;
6663 pn = pn->pn_kid;
6664 if (pn->pn_type == TOK_RB)
6665 goto do_emit_array;
6666 # if JS_HAS_GENERATORS
6667 if (pn->pn_type == TOK_ARRAYCOMP)
6668 goto do_emit_array;
6669 # endif
6670 if (pn->pn_type == TOK_RC)
6671 goto do_emit_object;
6673 if (!js_EmitTree(cx, cg, pn))
6674 return JS_FALSE;
6675 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
6676 break;
6678 case TOK_USESHARP:
6679 JS_ASSERT(cg->hasSharps());
6680 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
6681 break;
6682 #endif /* JS_HAS_SHARP_VARS */
6684 case TOK_NAME:
6686 * Cope with a left-over function definition that was replaced by a use
6687 * of a later function definition of the same name. See FunctionDef and
6688 * MakeDefIntoUse in jsparse.cpp.
6690 if (pn->pn_op == JSOP_NOP)
6691 return JS_TRUE;
6692 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
6693 return JS_FALSE;
6694 break;
6696 #if JS_HAS_XML_SUPPORT
6697 case TOK_XMLATTR:
6698 case TOK_XMLSPACE:
6699 case TOK_XMLTEXT:
6700 case TOK_XMLCDATA:
6701 case TOK_XMLCOMMENT:
6702 #endif
6703 case TOK_STRING:
6704 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
6705 break;
6707 case TOK_NUMBER:
6708 ok = EmitNumberOp(cx, pn->pn_dval, cg);
6709 break;
6711 case TOK_REGEXP: {
6713 * If the regexp's script is one-shot and the regexp is not used in a
6714 * loop, we can avoid the extra fork-on-exec costs of JSOP_REGEXP by
6715 * selecting JSOP_OBJECT. Otherwise, to avoid incorrect proto, parent,
6716 * and lastIndex sharing, select JSOP_REGEXP.
6718 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
6719 bool singleton = !cg->fun && cg->compileAndGo();
6720 if (singleton) {
6721 for (JSStmtInfo *stmt = cg->topStmt; stmt; stmt = stmt->down) {
6722 if (STMT_IS_LOOP(stmt)) {
6723 singleton = false;
6724 break;
6728 if (singleton) {
6729 ok = EmitObjectOp(cx, pn->pn_objbox, JSOP_OBJECT, cg);
6730 } else {
6731 ok = EmitIndexOp(cx, JSOP_REGEXP,
6732 cg->regexpList.index(pn->pn_objbox),
6733 cg);
6735 break;
6738 #if JS_HAS_XML_SUPPORT
6739 case TOK_ANYNAME:
6740 #endif
6741 case TOK_PRIMARY:
6742 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6743 return JS_FALSE;
6744 break;
6746 #if JS_HAS_DEBUGGER_KEYWORD
6747 case TOK_DEBUGGER:
6748 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6749 return JS_FALSE;
6750 break;
6751 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6753 #if JS_HAS_XML_SUPPORT
6754 case TOK_XMLELEM:
6755 case TOK_XMLLIST:
6756 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
6757 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
6758 case TOK_XMLETAGO:
6759 JS_ASSERT(0);
6760 /* FALL THROUGH */
6761 case TOK_XMLPTAGC:
6762 case TOK_XMLSTAGO:
6763 break;
6764 default:
6765 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6766 return JS_FALSE;
6769 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6770 if (pn2->pn_type == TOK_LC &&
6771 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6772 return JS_FALSE;
6774 if (!js_EmitTree(cx, cg, pn2))
6775 return JS_FALSE;
6776 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6777 return JS_FALSE;
6780 if (pn->pn_xflags & PNX_XMLROOT) {
6781 if (pn->pn_count == 0) {
6782 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6783 atom = cx->runtime->atomState.emptyAtom;
6784 ale = cg->atomList.add(cg->parser, atom);
6785 if (!ale)
6786 return JS_FALSE;
6787 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6789 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6790 return JS_FALSE;
6792 #ifdef DEBUG
6793 else
6794 JS_ASSERT(pn->pn_count != 0);
6795 #endif
6796 break;
6798 case TOK_XMLPTAGC:
6799 case TOK_XMLSTAGO:
6800 case TOK_XMLETAGO:
6802 uint32 i;
6804 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6805 return JS_FALSE;
6807 ale = cg->atomList.add(cg->parser,
6808 (pn->pn_type == TOK_XMLETAGO)
6809 ? cx->runtime->atomState.etagoAtom
6810 : cx->runtime->atomState.stagoAtom);
6811 if (!ale)
6812 return JS_FALSE;
6813 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6815 JS_ASSERT(pn->pn_count != 0);
6816 pn2 = pn->pn_head;
6817 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6818 return JS_FALSE;
6819 if (!js_EmitTree(cx, cg, pn2))
6820 return JS_FALSE;
6821 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6822 return JS_FALSE;
6824 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6825 if (pn2->pn_type == TOK_LC &&
6826 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6827 return JS_FALSE;
6829 if (!js_EmitTree(cx, cg, pn2))
6830 return JS_FALSE;
6831 if ((i & 1) && pn2->pn_type == TOK_LC) {
6832 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6833 return JS_FALSE;
6835 if (js_Emit1(cx, cg,
6836 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6837 return JS_FALSE;
6841 ale = cg->atomList.add(cg->parser,
6842 (pn->pn_type == TOK_XMLPTAGC)
6843 ? cx->runtime->atomState.ptagcAtom
6844 : cx->runtime->atomState.tagcAtom);
6845 if (!ale)
6846 return JS_FALSE;
6847 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6848 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6849 return JS_FALSE;
6851 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
6852 return JS_FALSE;
6853 break;
6856 case TOK_XMLNAME:
6857 if (pn->pn_arity == PN_LIST) {
6858 JS_ASSERT(pn->pn_count != 0);
6859 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6860 if (pn2->pn_type == TOK_LC &&
6861 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6862 return JS_FALSE;
6864 if (!js_EmitTree(cx, cg, pn2))
6865 return JS_FALSE;
6866 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6867 return JS_FALSE;
6869 } else {
6870 JS_ASSERT(pn->pn_arity == PN_NULLARY);
6871 ok = (pn->pn_op == JSOP_OBJECT)
6872 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
6873 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
6875 break;
6877 case TOK_XMLPI:
6878 ale = cg->atomList.add(cg->parser, pn->pn_atom2);
6879 if (!ale)
6880 return JS_FALSE;
6881 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6882 return JS_FALSE;
6883 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6884 return JS_FALSE;
6885 break;
6886 #endif /* JS_HAS_XML_SUPPORT */
6888 default:
6889 JS_ASSERT(0);
6892 if (ok && --cg->emitLevel == 0) {
6893 if (cg->spanDeps)
6894 ok = OptimizeSpanDeps(cx, cg);
6895 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
6896 return JS_FALSE;
6899 return ok;
6903 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6904 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6906 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6907 {"null", 0, 0, 0},
6908 {"if", 0, 0, 0},
6909 {"if-else", 2, 0, 1},
6910 {"for", 3, 1, 1},
6911 {"while", 1, 0, 1},
6912 {"continue", 0, 0, 0},
6913 {"decl", 1, 1, 1},
6914 {"pcdelta", 1, 0, 1},
6915 {"assignop", 0, 0, 0},
6916 {"cond", 1, 0, 1},
6917 {"brace", 1, 0, 1},
6918 {"hidden", 0, 0, 0},
6919 {"pcbase", 1, 0, -1},
6920 {"label", 1, 0, 0},
6921 {"labelbrace", 1, 0, 0},
6922 {"endbrace", 0, 0, 0},
6923 {"break2label", 1, 0, 0},
6924 {"cont2label", 1, 0, 0},
6925 {"switch", 2, 0, 1},
6926 {"funcdef", 1, 0, 0},
6927 {"catch", 1, 0, 1},
6928 {"extended", -1, 0, 0},
6929 {"newline", 0, 0, 0},
6930 {"setline", 1, 0, 0},
6931 {"xdelta", 0, 0, 0},
6934 static intN
6935 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6937 intN index;
6938 JSArenaPool *pool;
6939 size_t size;
6941 index = CG_NOTE_COUNT(cg);
6942 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6943 pool = cg->notePool;
6944 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6945 if (!CG_NOTES(cg)) {
6946 /* Allocate the first note array lazily; leave noteMask alone. */
6947 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6948 } else {
6949 /* Grow by doubling note array size; update noteMask on success. */
6950 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6951 if (CG_NOTES(cg))
6952 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6954 if (!CG_NOTES(cg)) {
6955 js_ReportOutOfScriptQuota(cx);
6956 return -1;
6960 CG_NOTE_COUNT(cg) = index + 1;
6961 return index;
6964 intN
6965 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
6967 intN index, n;
6968 jssrcnote *sn;
6969 ptrdiff_t offset, delta, xdelta;
6972 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6973 * incrementing CG_NOTE_COUNT(cg).
6975 index = AllocSrcNote(cx, cg);
6976 if (index < 0)
6977 return -1;
6978 sn = &CG_NOTES(cg)[index];
6981 * Compute delta from the last annotated bytecode's offset. If it's too
6982 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6984 offset = CG_OFFSET(cg);
6985 delta = offset - CG_LAST_NOTE_OFFSET(cg);
6986 CG_LAST_NOTE_OFFSET(cg) = offset;
6987 if (delta >= SN_DELTA_LIMIT) {
6988 do {
6989 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
6990 SN_MAKE_XDELTA(sn, xdelta);
6991 delta -= xdelta;
6992 index = AllocSrcNote(cx, cg);
6993 if (index < 0)
6994 return -1;
6995 sn = &CG_NOTES(cg)[index];
6996 } while (delta >= SN_DELTA_LIMIT);
7000 * Initialize type and delta, then allocate the minimum number of notes
7001 * needed for type's arity. Usually, we won't need more, but if an offset
7002 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7004 SN_MAKE_NOTE(sn, type, delta);
7005 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
7006 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
7007 return -1;
7009 return index;
7012 intN
7013 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7014 ptrdiff_t offset)
7016 intN index;
7018 index = js_NewSrcNote(cx, cg, type);
7019 if (index >= 0) {
7020 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
7021 return -1;
7023 return index;
7026 intN
7027 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7028 ptrdiff_t offset1, ptrdiff_t offset2)
7030 intN index;
7032 index = js_NewSrcNote(cx, cg, type);
7033 if (index >= 0) {
7034 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
7035 return -1;
7036 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
7037 return -1;
7039 return index;
7042 static JSBool
7043 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
7045 JSArenaPool *pool;
7046 size_t size;
7048 /* Grow by doubling note array size; update noteMask on success. */
7049 pool = cg->notePool;
7050 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7051 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
7052 if (!CG_NOTES(cg)) {
7053 js_ReportOutOfScriptQuota(cx);
7054 return JS_FALSE;
7056 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7057 return JS_TRUE;
7060 jssrcnote *
7061 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
7062 ptrdiff_t delta)
7064 ptrdiff_t base, limit, newdelta, diff;
7065 intN index;
7068 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7069 * main script note deltas, and only by a small positive amount.
7071 JS_ASSERT(cg->current == &cg->main);
7072 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7074 base = SN_DELTA(sn);
7075 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7076 newdelta = base + delta;
7077 if (newdelta < limit) {
7078 SN_SET_DELTA(sn, newdelta);
7079 } else {
7080 index = sn - cg->main.notes;
7081 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
7082 if (!GrowSrcNotes(cx, cg))
7083 return NULL;
7084 sn = cg->main.notes + index;
7086 diff = cg->main.noteCount - index;
7087 cg->main.noteCount++;
7088 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
7089 SN_MAKE_XDELTA(sn, delta);
7090 sn++;
7092 return sn;
7095 JS_FRIEND_API(uintN)
7096 js_SrcNoteLength(jssrcnote *sn)
7098 uintN arity;
7099 jssrcnote *base;
7101 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7102 for (base = sn++; arity; sn++, arity--) {
7103 if (*sn & SN_3BYTE_OFFSET_FLAG)
7104 sn += 2;
7106 return sn - base;
7109 JS_FRIEND_API(ptrdiff_t)
7110 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7112 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7113 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7114 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7115 for (sn++; which; sn++, which--) {
7116 if (*sn & SN_3BYTE_OFFSET_FLAG)
7117 sn += 2;
7119 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7120 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7121 | (sn[1] << 8)
7122 | sn[2]);
7124 return (ptrdiff_t)*sn;
7127 JSBool
7128 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7129 uintN which, ptrdiff_t offset)
7131 jssrcnote *sn;
7132 ptrdiff_t diff;
7134 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7135 ReportStatementTooLarge(cx, cg);
7136 return JS_FALSE;
7139 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7140 sn = &CG_NOTES(cg)[index];
7141 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7142 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7143 for (sn++; which; sn++, which--) {
7144 if (*sn & SN_3BYTE_OFFSET_FLAG)
7145 sn += 2;
7148 /* See if the new offset requires three bytes. */
7149 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7150 /* Maybe this offset was already set to a three-byte value. */
7151 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7152 /* Losing, need to insert another two bytes for this offset. */
7153 index = sn - CG_NOTES(cg);
7156 * Simultaneously test to see if the source note array must grow to
7157 * accommodate either the first or second byte of additional storage
7158 * required by this 3-byte offset.
7160 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7161 if (!GrowSrcNotes(cx, cg))
7162 return JS_FALSE;
7163 sn = CG_NOTES(cg) + index;
7165 CG_NOTE_COUNT(cg) += 2;
7167 diff = CG_NOTE_COUNT(cg) - (index + 3);
7168 JS_ASSERT(diff >= 0);
7169 if (diff > 0)
7170 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7172 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7173 *sn++ = (jssrcnote)(offset >> 8);
7175 *sn = (jssrcnote)offset;
7176 return JS_TRUE;
7179 #ifdef DEBUG_notme
7180 #define DEBUG_srcnotesize
7181 #endif
7183 #ifdef DEBUG_srcnotesize
7184 #define NBINS 10
7185 static uint32 hist[NBINS];
7187 void DumpSrcNoteSizeHist()
7189 static FILE *fp;
7190 int i, n;
7192 if (!fp) {
7193 fp = fopen("/tmp/srcnotes.hist", "w");
7194 if (!fp)
7195 return;
7196 setvbuf(fp, NULL, _IONBF, 0);
7198 fprintf(fp, "SrcNote size histogram:\n");
7199 for (i = 0; i < NBINS; i++) {
7200 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7201 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7202 fputc('*', fp);
7203 fputc('\n', fp);
7205 fputc('\n', fp);
7207 #endif
7210 * Fill in the storage at notes with prolog and main srcnotes; the space at
7211 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7212 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7213 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7215 JSBool
7216 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7218 uintN prologCount, mainCount, totalCount;
7219 ptrdiff_t offset, delta;
7220 jssrcnote *sn;
7222 JS_ASSERT(cg->current == &cg->main);
7224 prologCount = cg->prolog.noteCount;
7225 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7226 CG_SWITCH_TO_PROLOG(cg);
7227 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7228 return JS_FALSE;
7229 prologCount = cg->prolog.noteCount;
7230 CG_SWITCH_TO_MAIN(cg);
7231 } else {
7233 * Either no prolog srcnotes, or no line number change over prolog.
7234 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7235 * of the first main note, by adding to its delta and possibly even
7236 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7237 * that came at and after the last annotated bytecode.
7239 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7240 JS_ASSERT(offset >= 0);
7241 if (offset > 0 && cg->main.noteCount != 0) {
7242 /* NB: Use as much of the first main note's delta as we can. */
7243 sn = cg->main.notes;
7244 delta = SN_IS_XDELTA(sn)
7245 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7246 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7247 if (offset < delta)
7248 delta = offset;
7249 for (;;) {
7250 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7251 return JS_FALSE;
7252 offset -= delta;
7253 if (offset == 0)
7254 break;
7255 delta = JS_MIN(offset, SN_XDELTA_MASK);
7256 sn = cg->main.notes;
7261 mainCount = cg->main.noteCount;
7262 totalCount = prologCount + mainCount;
7263 if (prologCount)
7264 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7265 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7266 SN_MAKE_TERMINATOR(&notes[totalCount]);
7268 #ifdef DEBUG_notme
7269 { int bin = JS_CeilingLog2(totalCount);
7270 if (bin >= NBINS)
7271 bin = NBINS - 1;
7272 ++hist[bin];
7274 #endif
7275 return JS_TRUE;
7278 static JSBool
7279 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7280 uintN stackDepth, size_t start, size_t end)
7282 JSTryNode *tryNode;
7284 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7285 JS_ASSERT(start <= end);
7286 JS_ASSERT((size_t)(uint32)start == start);
7287 JS_ASSERT((size_t)(uint32)end == end);
7289 JS_ARENA_ALLOCATE_TYPE(tryNode, JSTryNode, &cx->tempPool);
7290 if (!tryNode) {
7291 js_ReportOutOfScriptQuota(cx);
7292 return JS_FALSE;
7295 tryNode->note.kind = kind;
7296 tryNode->note.stackDepth = (uint16)stackDepth;
7297 tryNode->note.start = (uint32)start;
7298 tryNode->note.length = (uint32)(end - start);
7299 tryNode->prev = cg->lastTryNode;
7300 cg->lastTryNode = tryNode;
7301 cg->ntrynotes++;
7302 return JS_TRUE;
7305 void
7306 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7308 JSTryNode *tryNode;
7309 JSTryNote *tn;
7311 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7312 tn = array->vector + array->length;
7313 tryNode = cg->lastTryNode;
7314 do {
7315 *--tn = tryNode->note;
7316 } while ((tryNode = tryNode->prev) != NULL);
7317 JS_ASSERT(tn == array->vector);
7321 * Find the index of the given object for code generator.
7323 * Since the emitter refers to each parsed object only once, for the index we
7324 * use the number of already indexes objects. We also add the object to a list
7325 * to convert the list to a fixed-size array when we complete code generation,
7326 * see JSCGObjectList::finish below.
7328 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7329 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7330 * directly used to store and fetch a reference to a cloned RegExp object that
7331 * shares the same JSRegExp private data created for the object literal in
7332 * objbox. We need a cloned object to hold lastIndex and other direct properties
7333 * that should not be shared among threads sharing a precompiled function or
7334 * script.
7336 * If the code being compiled is function code, allocate a reserved slot in
7337 * the cloned function object that shares its precompiled script with other
7338 * cloned function objects and with the compiler-created clone-parent. There
7339 * are nregexps = script->regexps()->length such reserved slots in each
7340 * function object cloned from fun->object. NB: during compilation, a funobj
7341 * slots element must never be allocated, because js_AllocSlot could hand out
7342 * one of the slots that should be given to a regexp clone.
7344 * If the code being compiled is global code, the cloned regexp are stored in
7345 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7346 * fp->nvars to ngvars + nregexps.
7348 * The slots initially contain undefined or null. We populate them lazily when
7349 * JSOP_REGEXP is executed for the first time.
7351 * Why clone regexp objects? ECMA specifies that when a regular expression
7352 * literal is scanned, a RegExp object is created. In the spec, compilation
7353 * and execution happen indivisibly, but in this implementation and many of
7354 * its embeddings, code is precompiled early and re-executed in multiple
7355 * threads, or using multiple global objects, or both, for efficiency.
7357 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7358 * objects, which makes for collisions on the lastIndex property (especially
7359 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7360 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7362 uintN
7363 JSCGObjectList::index(JSObjectBox *objbox)
7365 JS_ASSERT(!objbox->emitLink);
7366 objbox->emitLink = lastbox;
7367 lastbox = objbox;
7368 return length++;
7371 void
7372 JSCGObjectList::finish(JSObjectArray *array)
7374 JSObject **cursor;
7375 JSObjectBox *objbox;
7377 JS_ASSERT(length <= INDEX_LIMIT);
7378 JS_ASSERT(length == array->length);
7380 cursor = array->vector + array->length;
7381 objbox = lastbox;
7382 do {
7383 --cursor;
7384 JS_ASSERT(!*cursor);
7385 *cursor = objbox->object;
7386 } while ((objbox = objbox->emitLink) != NULL);
7387 JS_ASSERT(cursor == array->vector);
7390 void
7391 JSGCConstList::finish(JSConstArray *array)
7393 JS_ASSERT(array->length == list.length());
7394 Value *src = list.begin(), *srcend = list.end();
7395 Value *dst = array->vector;
7396 for (; src != srcend; ++src, ++dst)
7397 *dst = *src;