Backed out changeset 3862a7e48e79 due to tinderbox failures on js1_5/GC/regress-32427...
[mozilla-central.git] / js / src / jsemit.cpp
blob2b198acc300c5ea79058ceb1f7cd8b2feff7f779
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
82 static JSBool
83 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
84 uintN stackDepth, size_t start, size_t end);
86 JSCodeGenerator::JSCodeGenerator(JSCompiler *jsc,
87 JSArenaPool *cpool, JSArenaPool *npool,
88 uintN lineno)
89 : JSTreeContext(jsc),
90 codePool(cpool), notePool(npool),
91 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL),
94 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
96 arrayCompDepth(0),
97 emitLevel(0)
99 flags = TCF_COMPILING;
100 memset(&prolog, 0, sizeof prolog);
101 memset(&main, 0, sizeof main);
102 current = &main;
103 firstLine = prolog.currentLine = main.currentLine = lineno;
104 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
105 memset(&upvarMap, 0, sizeof upvarMap);
108 JSCodeGenerator::~JSCodeGenerator()
110 JS_ARENA_RELEASE(codePool, codeMark);
111 JS_ARENA_RELEASE(notePool, noteMark);
113 /* NB: non-null only after OOM. */
114 if (spanDeps)
115 compiler->context->free(spanDeps);
117 if (upvarMap.vector)
118 compiler->context->free(upvarMap.vector);
121 static ptrdiff_t
122 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
124 jsbytecode *base, *limit, *next;
125 ptrdiff_t offset, length;
126 size_t incr, size;
128 base = CG_BASE(cg);
129 next = CG_NEXT(cg);
130 limit = CG_LIMIT(cg);
131 offset = next - base;
132 if (next + delta > limit) {
133 length = offset + delta;
134 length = (length <= BYTECODE_CHUNK)
135 ? BYTECODE_CHUNK
136 : JS_BIT(JS_CeilingLog2(length));
137 incr = BYTECODE_SIZE(length);
138 if (!base) {
139 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
140 } else {
141 size = BYTECODE_SIZE(limit - base);
142 incr -= size;
143 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
145 if (!base) {
146 js_ReportOutOfScriptQuota(cx);
147 return -1;
149 CG_BASE(cg) = base;
150 CG_LIMIT(cg) = base + length;
151 CG_NEXT(cg) = base + offset;
153 return offset;
156 static void
157 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
159 jsbytecode *pc;
160 JSOp op;
161 const JSCodeSpec *cs;
162 uintN extra, depth, nuses;
163 intN ndefs;
165 pc = CG_CODE(cg, target);
166 op = (JSOp) *pc;
167 cs = &js_CodeSpec[op];
168 #ifdef JS_TRACER
169 extern uint8 js_opcode2extra[];
170 extra = js_opcode2extra[op];
171 #else
172 extra = 0;
173 #endif
174 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
175 depth = (uintN) cg->stackDepth +
176 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
177 extra;
178 if (depth > cg->maxStackDepth)
179 cg->maxStackDepth = depth;
182 nuses = js_GetStackUses(cs, op, pc);
183 cg->stackDepth -= nuses;
184 JS_ASSERT(cg->stackDepth >= 0);
185 if (cg->stackDepth < 0) {
186 char numBuf[12];
187 JSTokenStream *ts;
189 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
190 ts = &cg->compiler->tokenStream;
191 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
192 js_GetErrorMessage, NULL,
193 JSMSG_STACK_UNDERFLOW,
194 ts->filename ? ts->filename : "stdin",
195 numBuf);
197 ndefs = cs->ndefs;
198 if (ndefs < 0) {
199 JSObject *blockObj;
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op == JSOP_ENTERBLOCK);
203 JS_ASSERT(nuses == 0);
204 blockObj = cg->objectList.lastbox->object;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
208 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
209 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
211 cg->stackDepth += ndefs;
212 if ((uintN)cg->stackDepth > cg->maxStackDepth)
213 cg->maxStackDepth = cg->stackDepth;
216 ptrdiff_t
217 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
219 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
221 if (offset >= 0) {
222 *CG_NEXT(cg)++ = (jsbytecode)op;
223 UpdateDepth(cx, cg, offset);
225 return offset;
228 ptrdiff_t
229 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
231 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
233 if (offset >= 0) {
234 jsbytecode *next = CG_NEXT(cg);
235 next[0] = (jsbytecode)op;
236 next[1] = op1;
237 CG_NEXT(cg) = next + 2;
238 UpdateDepth(cx, cg, offset);
240 return offset;
243 ptrdiff_t
244 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
245 jsbytecode op2)
247 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
249 if (offset >= 0) {
250 jsbytecode *next = CG_NEXT(cg);
251 next[0] = (jsbytecode)op;
252 next[1] = op1;
253 next[2] = op2;
254 CG_NEXT(cg) = next + 3;
255 UpdateDepth(cx, cg, offset);
257 return offset;
260 ptrdiff_t
261 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
263 ptrdiff_t length = 1 + (ptrdiff_t)extra;
264 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
266 if (offset >= 0) {
267 jsbytecode *next = CG_NEXT(cg);
268 *next = (jsbytecode)op;
269 memset(next + 1, 0, BYTECODE_SIZE(extra));
270 CG_NEXT(cg) = next + length;
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
276 if (js_CodeSpec[op].nuses >= 0)
277 UpdateDepth(cx, cg, offset);
279 return offset;
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str[] = "with statement";
284 const char js_finally_block_str[] = "finally block";
285 const char js_script_str[] = "script";
287 static const char *statementName[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
293 "block", /* BLOCK */
294 js_with_statement_str, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str, /* FINALLY */
298 js_finally_block_str, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
307 static const char *
308 StatementName(JSCodeGenerator *cg)
310 if (!cg->topStmt)
311 return js_script_str;
312 return statementName[cg->topStmt->type];
315 static void
316 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
318 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
319 StatementName(cg));
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
329 many jump offsets.
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
339 be extended.
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
386 offset.
388 static int
389 BalanceJumpTargets(JSJumpTarget **jtp)
391 JSJumpTarget *jt, *jt2, *root;
392 int dir, otherDir, heightChanged;
393 JSBool doubleRotate;
395 jt = *jtp;
396 JS_ASSERT(jt->balance != 0);
398 if (jt->balance < -1) {
399 dir = JT_RIGHT;
400 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
401 } else if (jt->balance > 1) {
402 dir = JT_LEFT;
403 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
404 } else {
405 return 0;
408 otherDir = JT_OTHER_DIR(dir);
409 if (doubleRotate) {
410 jt2 = jt->kids[otherDir];
411 *jtp = root = jt2->kids[dir];
413 jt->kids[otherDir] = root->kids[dir];
414 root->kids[dir] = jt;
416 jt2->kids[dir] = root->kids[otherDir];
417 root->kids[otherDir] = jt2;
419 heightChanged = 1;
420 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
421 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
422 root->balance = 0;
423 } else {
424 *jtp = root = jt->kids[otherDir];
425 jt->kids[otherDir] = root->kids[dir];
426 root->kids[dir] = jt;
428 heightChanged = (root->balance != 0);
429 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
432 return heightChanged;
435 typedef struct AddJumpTargetArgs {
436 JSContext *cx;
437 JSCodeGenerator *cg;
438 ptrdiff_t offset;
439 JSJumpTarget *node;
440 } AddJumpTargetArgs;
442 static int
443 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
445 JSJumpTarget *jt;
446 int balanceDelta;
448 jt = *jtp;
449 if (!jt) {
450 JSCodeGenerator *cg = args->cg;
452 jt = cg->jtFreeList;
453 if (jt) {
454 cg->jtFreeList = jt->kids[JT_LEFT];
455 } else {
456 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
457 sizeof *jt);
458 if (!jt) {
459 js_ReportOutOfScriptQuota(args->cx);
460 return 0;
463 jt->offset = args->offset;
464 jt->balance = 0;
465 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
466 cg->numJumpTargets++;
467 args->node = jt;
468 *jtp = jt;
469 return 1;
472 if (jt->offset == args->offset) {
473 args->node = jt;
474 return 0;
477 if (args->offset < jt->offset)
478 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
479 else
480 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
481 if (!args->node)
482 return 0;
484 jt->balance += balanceDelta;
485 return (balanceDelta && jt->balance)
486 ? 1 - BalanceJumpTargets(jtp)
487 : 0;
490 #ifdef DEBUG_brendan
491 static int AVLCheck(JSJumpTarget *jt)
493 int lh, rh;
495 if (!jt) return 0;
496 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
497 lh = AVLCheck(jt->kids[JT_LEFT]);
498 rh = AVLCheck(jt->kids[JT_RIGHT]);
499 JS_ASSERT(jt->balance == rh - lh);
500 return 1 + JS_MAX(lh, rh);
502 #endif
504 static JSBool
505 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
506 ptrdiff_t off)
508 AddJumpTargetArgs args;
510 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
511 ReportStatementTooLarge(cx, cg);
512 return JS_FALSE;
515 args.cx = cx;
516 args.cg = cg;
517 args.offset = sd->top + off;
518 args.node = NULL;
519 AddJumpTarget(&args, &cg->jumpTargets);
520 if (!args.node)
521 return JS_FALSE;
523 #ifdef DEBUG_brendan
524 AVLCheck(cg->jumpTargets);
525 #endif
527 SD_SET_TARGET(sd, args.node);
528 return JS_TRUE;
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
535 static JSBool
536 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
537 ptrdiff_t off)
539 uintN index;
540 JSSpanDep *sdbase, *sd;
541 size_t size;
543 index = cg->numSpanDeps;
544 if (index + 1 == 0) {
545 ReportStatementTooLarge(cx, cg);
546 return JS_FALSE;
549 if ((index & (index - 1)) == 0 &&
550 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
551 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
552 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
553 if (!sdbase)
554 return JS_FALSE;
555 cg->spanDeps = sdbase;
558 cg->numSpanDeps = index + 1;
559 sd = cg->spanDeps + index;
560 sd->top = pc - CG_BASE(cg);
561 sd->offset = sd->before = pc2 - CG_BASE(cg);
563 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
565 if (off != 0) {
566 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
567 if (off > BPDELTA_MAX) {
568 ReportStatementTooLarge(cx, cg);
569 return JS_FALSE;
572 SD_SET_BPDELTA(sd, off);
573 } else if (off == 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd, 0);
576 } else {
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx, cg, sd, off))
579 return JS_FALSE;
582 if (index > SPANDEP_INDEX_MAX)
583 index = SPANDEP_INDEX_HUGE;
584 SET_SPANDEP_INDEX(pc2, index);
585 return JS_TRUE;
588 static jsbytecode *
589 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
591 JSOp op;
592 jsbytecode *pc2;
593 ptrdiff_t off;
594 jsint low, high;
595 uintN njumps, indexlen;
597 op = (JSOp) *pc;
598 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
599 pc2 = pc;
600 off = GET_JUMP_OFFSET(pc2);
601 if (!AddSpanDep(cx, cg, pc, pc2, off))
602 return NULL;
603 pc2 += JUMP_OFFSET_LEN;
604 if (op == JSOP_TABLESWITCH) {
605 low = GET_JUMP_OFFSET(pc2);
606 pc2 += JUMP_OFFSET_LEN;
607 high = GET_JUMP_OFFSET(pc2);
608 pc2 += JUMP_OFFSET_LEN;
609 njumps = (uintN) (high - low + 1);
610 indexlen = 0;
611 } else {
612 njumps = GET_UINT16(pc2);
613 pc2 += UINT16_LEN;
614 indexlen = INDEX_LEN;
616 while (njumps) {
617 --njumps;
618 pc2 += indexlen;
619 off = GET_JUMP_OFFSET(pc2);
620 if (!AddSpanDep(cx, cg, pc, pc2, off))
621 return NULL;
622 pc2 += JUMP_OFFSET_LEN;
624 return 1 + pc2;
627 static JSBool
628 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
630 jsbytecode *pc, *end;
631 JSOp op;
632 const JSCodeSpec *cs;
633 ptrdiff_t off;
635 pc = CG_BASE(cg) + cg->spanDepTodo;
636 end = CG_NEXT(cg);
637 while (pc != end) {
638 JS_ASSERT(pc < end);
639 op = (JSOp)*pc;
640 cs = &js_CodeSpec[op];
642 switch (JOF_TYPE(cs->format)) {
643 case JOF_TABLESWITCH:
644 case JOF_LOOKUPSWITCH:
645 pc = AddSwitchSpanDeps(cx, cg, pc);
646 if (!pc)
647 return JS_FALSE;
648 break;
650 case JOF_JUMP:
651 off = GET_JUMP_OFFSET(pc);
652 if (!AddSpanDep(cx, cg, pc, pc, off))
653 return JS_FALSE;
654 /* FALL THROUGH */
655 default:
656 pc += cs->length;
657 break;
661 return JS_TRUE;
664 static JSSpanDep *
665 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
667 uintN index;
668 ptrdiff_t offset;
669 int lo, hi, mid;
670 JSSpanDep *sd;
672 index = GET_SPANDEP_INDEX(pc);
673 if (index != SPANDEP_INDEX_HUGE)
674 return cg->spanDeps + index;
676 offset = pc - CG_BASE(cg);
677 lo = 0;
678 hi = cg->numSpanDeps - 1;
679 while (lo <= hi) {
680 mid = (lo + hi) / 2;
681 sd = cg->spanDeps + mid;
682 if (sd->before == offset)
683 return sd;
684 if (sd->before < offset)
685 lo = mid + 1;
686 else
687 hi = mid - 1;
690 JS_ASSERT(0);
691 return NULL;
694 static JSBool
695 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
696 ptrdiff_t delta)
698 JSSpanDep *sd;
700 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
701 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
702 SET_JUMP_OFFSET(pc, delta);
703 return JS_TRUE;
706 if (delta > BPDELTA_MAX) {
707 ReportStatementTooLarge(cx, cg);
708 return JS_FALSE;
711 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
712 return JS_FALSE;
714 sd = GetSpanDep(cg, pc);
715 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
716 SD_SET_BPDELTA(sd, delta);
717 return JS_TRUE;
720 static void
721 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
723 if (jt->offset > pivot) {
724 jt->offset += delta;
725 if (jt->kids[JT_LEFT])
726 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
728 if (jt->kids[JT_RIGHT])
729 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
732 static JSSpanDep *
733 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
734 JSSpanDep *guard)
736 int num, hi, mid;
737 JSSpanDep *sdbase, *sd;
739 num = cg->numSpanDeps;
740 JS_ASSERT(num > 0);
741 hi = num - 1;
742 sdbase = cg->spanDeps;
743 while (lo <= hi) {
744 mid = (lo + hi) / 2;
745 sd = sdbase + mid;
746 if (sd->before == offset)
747 return sd;
748 if (sd->before < offset)
749 lo = mid + 1;
750 else
751 hi = mid - 1;
753 if (lo == num)
754 return guard;
755 sd = sdbase + lo;
756 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
757 return sd;
760 static void
761 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
763 if (jt->kids[JT_LEFT])
764 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
765 if (jt->kids[JT_RIGHT])
766 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
767 jt->kids[JT_LEFT] = cg->jtFreeList;
768 cg->jtFreeList = jt;
771 static JSBool
772 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
774 jsbytecode *pc, *oldpc, *base, *limit, *next;
775 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
776 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
777 JSBool done;
778 JSOp op;
779 uint32 type;
780 size_t size, incr;
781 jssrcnote *sn, *snlimit;
782 JSSrcNoteSpec *spec;
783 uintN i, n, noteIndex;
784 JSTryNode *tryNode;
785 #ifdef DEBUG_brendan
786 int passes = 0;
787 #endif
789 base = CG_BASE(cg);
790 sdbase = cg->spanDeps;
791 sdlimit = sdbase + cg->numSpanDeps;
792 offset = CG_OFFSET(cg);
793 growth = 0;
795 do {
796 done = JS_TRUE;
797 delta = 0;
798 top = pivot = -1;
799 sdtop = NULL;
800 pc = NULL;
801 op = JSOP_NOP;
802 type = 0;
803 #ifdef DEBUG_brendan
804 passes++;
805 #endif
807 for (sd = sdbase; sd < sdlimit; sd++) {
808 JS_ASSERT(JT_HAS_TAG(sd->target));
809 sd->offset += delta;
811 if (sd->top != top) {
812 sdtop = sd;
813 top = sd->top;
814 JS_ASSERT(top == sd->before);
815 pivot = sd->offset;
816 pc = base + top;
817 op = (JSOp) *pc;
818 type = JOF_OPTYPE(op);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
826 continue;
829 JS_ASSERT(type == JOF_JUMP ||
830 type == JOF_TABLESWITCH ||
831 type == JOF_LOOKUPSWITCH);
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
835 span = SD_SPAN(sd, pivot);
836 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
837 ptrdiff_t deltaFromTop = 0;
839 done = JS_FALSE;
841 switch (op) {
842 case JSOP_GOTO: op = JSOP_GOTOX; break;
843 case JSOP_IFEQ: op = JSOP_IFEQX; break;
844 case JSOP_IFNE: op = JSOP_IFNEX; break;
845 case JSOP_OR: op = JSOP_ORX; break;
846 case JSOP_AND: op = JSOP_ANDX; break;
847 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
848 case JSOP_CASE: op = JSOP_CASEX; break;
849 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
850 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
851 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
852 default:
853 ReportStatementTooLarge(cx, cg);
854 return JS_FALSE;
856 *pc = (jsbytecode) op;
858 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
859 if (sd2 <= sd) {
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
869 * iterate once only.
871 sd2->offset += deltaFromTop;
872 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
873 } else {
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
879 sd2->offset += delta;
882 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
883 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
884 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
886 sd = sd2 - 1;
891 growth += delta;
892 } while (!done);
894 if (growth) {
895 #ifdef DEBUG_brendan
896 JSTokenStream *ts = &cg->compiler->tokenStream;
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts->filename ? ts->filename : "stdin", cg->firstLine,
900 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
901 passes, offset + growth, offset, growth);
902 #endif
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
908 limit = CG_LIMIT(cg);
909 length = offset + growth;
910 next = base + length;
911 if (next > limit) {
912 JS_ASSERT(length > BYTECODE_CHUNK);
913 size = BYTECODE_SIZE(limit - base);
914 incr = BYTECODE_SIZE(length) - size;
915 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
916 if (!base) {
917 js_ReportOutOfScriptQuota(cx);
918 return JS_FALSE;
920 CG_BASE(cg) = base;
921 CG_LIMIT(cg) = next = base + length;
923 CG_NEXT(cg) = next;
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
931 guard.top = -1;
932 guard.offset = offset + growth;
933 guard.before = offset;
934 guard.target = NULL;
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
945 JS_ASSERT(sd == sdlimit);
946 top = -1;
947 while (--sd >= sdbase) {
948 if (sd->top != top) {
949 top = sd->top;
950 op = (JSOp) base[top];
951 type = JOF_OPTYPE(op);
953 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
954 continue;
955 sd2++;
956 pivot = sd2->offset;
957 JS_ASSERT(top == sd2->before);
960 oldpc = base + sd->before;
961 span = SD_SPAN(sd, pivot);
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
974 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
975 SET_JUMP_OFFSET(oldpc, span);
976 continue;
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
986 pc = base + sd->offset;
987 delta = offset - sd->before;
988 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
996 offset = sd->before + 1;
997 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
998 if (size) {
999 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1000 oldpc + 1 + JUMP_OFFSET_LEN,
1001 size);
1004 SET_JUMPX_OFFSET(pc, span);
1007 if (growth) {
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1016 offset = growth = 0;
1017 sd = sdbase;
1018 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1019 sn < snlimit;
1020 sn = SN_NEXT(sn)) {
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1026 offset += SN_DELTA(sn);
1027 while (sd < sdlimit && sd->before < offset) {
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1033 sd2 = sd + 1;
1034 if (sd2 == sdlimit)
1035 sd2 = &guard;
1036 delta = sd2->offset - (sd2->before + growth);
1037 if (delta > 0) {
1038 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1039 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1040 if (!sn)
1041 return JS_FALSE;
1042 snlimit = cg->main.notes + cg->main.noteCount;
1043 growth += delta;
1045 sd++;
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1054 * instructions!
1056 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1057 if (spec->isSpanDep) {
1058 pivot = offset + spec->offsetBias;
1059 n = spec->arity;
1060 for (i = 0; i < n; i++) {
1061 span = js_GetSrcNoteOffset(sn, i);
1062 if (span == 0)
1063 continue;
1064 target = pivot + span * spec->isSpanDep;
1065 sd2 = FindNearestSpanDep(cg, target,
1066 (target >= pivot)
1067 ? sd - sdbase
1068 : 0,
1069 &guard);
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1077 target += sd2->offset - sd2->before;
1078 span = target - (pivot + growth);
1079 span *= spec->isSpanDep;
1080 noteIndex = sn - cg->main.notes;
1081 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1082 return JS_FALSE;
1083 sn = cg->main.notes + noteIndex;
1084 snlimit = cg->main.notes + cg->main.noteCount;
1088 cg->main.lastNoteOffset += growth;
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1094 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1098 * be returned.
1100 offset = tryNode->note.start;
1101 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1102 delta = sd->offset - sd->before;
1103 tryNode->note.start = offset + delta;
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1109 length = tryNode->note.length;
1110 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1111 if (sd2 != sd) {
1112 tryNode->note.length =
1113 length + sd2->offset - sd2->before - delta;
1118 #ifdef DEBUG_brendan
1120 uintN bigspans = 0;
1121 top = -1;
1122 for (sd = sdbase; sd < sdlimit; sd++) {
1123 offset = sd->offset;
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd->top != top) {
1127 JS_ASSERT(top == -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1129 bigspans != 0);
1130 bigspans = 0;
1131 top = sd->top;
1132 JS_ASSERT(top == sd->before);
1133 op = (JSOp) base[offset];
1134 type = JOF_OPTYPE(op);
1135 JS_ASSERT(type == JOF_JUMP ||
1136 type == JOF_JUMPX ||
1137 type == JOF_TABLESWITCH ||
1138 type == JOF_TABLESWITCHX ||
1139 type == JOF_LOOKUPSWITCH ||
1140 type == JOF_LOOKUPSWITCHX);
1141 pivot = offset;
1144 pc = base + offset;
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1146 span = GET_JUMPX_OFFSET(pc);
1147 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1148 bigspans++;
1149 } else {
1150 JS_ASSERT(type == JOF_TABLESWITCHX ||
1151 type == JOF_LOOKUPSWITCHX);
1153 } else {
1154 span = GET_JUMP_OFFSET(pc);
1156 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1160 #endif
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1167 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1168 cx->free(cg->spanDeps);
1169 cg->spanDeps = NULL;
1170 FreeJumpTargets(cg, cg->jumpTargets);
1171 cg->jumpTargets = NULL;
1172 cg->numSpanDeps = cg->numJumpTargets = 0;
1173 cg->spanDepTodo = CG_OFFSET(cg);
1174 return JS_TRUE;
1177 static ptrdiff_t
1178 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1180 JSBool extend;
1181 ptrdiff_t jmp;
1182 jsbytecode *pc;
1184 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1185 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1186 return -1;
1188 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1189 if (jmp >= 0 && (extend || cg->spanDeps)) {
1190 pc = CG_CODE(cg, jmp);
1191 if (!AddSpanDep(cx, cg, pc, pc, off))
1192 return -1;
1194 return jmp;
1197 static ptrdiff_t
1198 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1200 JSSpanDep *sd;
1201 JSJumpTarget *jt;
1202 ptrdiff_t top;
1204 if (!cg->spanDeps)
1205 return GET_JUMP_OFFSET(pc);
1207 sd = GetSpanDep(cg, pc);
1208 jt = sd->target;
1209 if (!JT_HAS_TAG(jt))
1210 return JT_TO_BPDELTA(jt);
1212 top = sd->top;
1213 while (--sd >= cg->spanDeps && sd->top == top)
1214 continue;
1215 sd++;
1216 return JT_CLR_TAG(jt)->offset - sd->offset;
1219 JSBool
1220 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1221 ptrdiff_t off)
1223 if (!cg->spanDeps) {
1224 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1225 SET_JUMP_OFFSET(pc, off);
1226 return JS_TRUE;
1229 if (!BuildSpanDepTable(cx, cg))
1230 return JS_FALSE;
1233 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1236 bool
1237 JSTreeContext::inStatement(JSStmtType type)
1239 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1240 if (stmt->type == type)
1241 return true;
1243 return false;
1246 bool
1247 JSTreeContext::ensureSharpSlots()
1249 #if JS_HAS_SHARP_VARS
1250 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1252 if (sharpSlotBase >= 0) {
1253 JS_ASSERT(flags & TCF_HAS_SHARPS);
1254 return true;
1257 JS_ASSERT(!(flags & TCF_HAS_SHARPS));
1258 if (flags & TCF_IN_FUNCTION) {
1259 JSContext *cx = compiler->context;
1260 JSAtom *sharpArrayAtom = js_Atomize(cx, "#array", 6, 0);
1261 JSAtom *sharpDepthAtom = js_Atomize(cx, "#depth", 6, 0);
1262 if (!sharpArrayAtom || !sharpDepthAtom)
1263 return false;
1265 sharpSlotBase = fun->u.i.nvars;
1266 if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
1267 return false;
1268 if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
1269 return false;
1270 } else {
1272 * JSCompiler::compileScript will rebase immediate operands indexing
1273 * the sharp slots to come at the end of the global script's |nfixed|
1274 * slots storage, after gvars and regexps.
1276 sharpSlotBase = 0;
1278 flags |= TCF_HAS_SHARPS;
1279 #endif
1280 return true;
1283 void
1284 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1285 ptrdiff_t top)
1287 stmt->type = type;
1288 stmt->flags = 0;
1289 stmt->blockid = tc->blockid();
1290 SET_STATEMENT_TOP(stmt, top);
1291 stmt->label = NULL;
1292 JS_ASSERT(!stmt->blockObj);
1293 stmt->down = tc->topStmt;
1294 tc->topStmt = stmt;
1295 if (STMT_LINKS_SCOPE(stmt)) {
1296 stmt->downScope = tc->topScopeStmt;
1297 tc->topScopeStmt = stmt;
1298 } else {
1299 stmt->downScope = NULL;
1303 void
1304 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1305 ptrdiff_t top)
1307 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1308 stmt->flags |= SIF_SCOPE;
1309 STOBJ_SET_PARENT(blockObj, tc->blockChain);
1310 stmt->downScope = tc->topScopeStmt;
1311 tc->topScopeStmt = stmt;
1312 tc->blockChain = blockObj;
1313 stmt->blockObj = blockObj;
1317 * Emit a backpatch op with offset pointing to the previous jump of this type,
1318 * so that we can walk back up the chain fixing up the op and jump offset.
1320 static ptrdiff_t
1321 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1323 ptrdiff_t offset, delta;
1325 offset = CG_OFFSET(cg);
1326 delta = offset - *lastp;
1327 *lastp = offset;
1328 JS_ASSERT(delta > 0);
1329 return EmitJump(cx, cg, op, delta);
1333 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1334 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1335 * NB: We use cx and cg from our caller's lexical environment, and return
1336 * false on error.
1338 #define EMIT_UINT16_IMM_OP(op, i) \
1339 JS_BEGIN_MACRO \
1340 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1341 return JS_FALSE; \
1342 JS_END_MACRO
1344 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1345 JS_BEGIN_MACRO \
1346 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1347 if (off_ < 0) \
1348 return JS_FALSE; \
1349 jsbytecode *pc_ = CG_CODE(cg, off_); \
1350 SET_UINT16(pc_, i); \
1351 pc_ += UINT16_LEN; \
1352 SET_UINT16(pc_, j); \
1353 JS_END_MACRO
1355 static JSBool
1356 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1358 JS_ASSERT(*npops != 0);
1359 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1360 return JS_FALSE;
1361 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1362 *npops = 0;
1363 return JS_TRUE;
1367 * Emit additional bytecode(s) for non-local jumps.
1369 static JSBool
1370 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1372 intN depth, npops;
1373 JSStmtInfo *stmt;
1376 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1377 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1378 * end of a with statement, so we save cg->stackDepth here and restore it
1379 * just before a successful return.
1381 depth = cg->stackDepth;
1382 npops = 0;
1384 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1386 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1387 switch (stmt->type) {
1388 case STMT_FINALLY:
1389 FLUSH_POPS();
1390 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1391 return JS_FALSE;
1392 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1393 return JS_FALSE;
1394 break;
1396 case STMT_WITH:
1397 /* There's a With object on the stack that we need to pop. */
1398 FLUSH_POPS();
1399 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1400 return JS_FALSE;
1401 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1402 return JS_FALSE;
1403 break;
1405 case STMT_FOR_IN_LOOP:
1407 * The iterator and the object being iterated need to be popped.
1409 FLUSH_POPS();
1410 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1411 return JS_FALSE;
1412 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1413 return JS_FALSE;
1414 break;
1416 case STMT_SUBROUTINE:
1418 * There's a [exception or hole, retsub pc-index] pair on the
1419 * stack that we need to pop.
1421 npops += 2;
1422 break;
1424 default:;
1427 if (stmt->flags & SIF_SCOPE) {
1428 uintN i;
1430 /* There is a Block object with locals on the stack to pop. */
1431 FLUSH_POPS();
1432 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1433 return JS_FALSE;
1434 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1435 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1439 FLUSH_POPS();
1440 cg->stackDepth = depth;
1441 return JS_TRUE;
1443 #undef FLUSH_POPS
1446 static ptrdiff_t
1447 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1448 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1450 intN index;
1452 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1453 return -1;
1455 if (label)
1456 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1457 else if (noteType != SRC_NULL)
1458 index = js_NewSrcNote(cx, cg, noteType);
1459 else
1460 index = 0;
1461 if (index < 0)
1462 return -1;
1464 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1467 static JSBool
1468 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1469 jsbytecode *target, jsbytecode op)
1471 jsbytecode *pc, *stop;
1472 ptrdiff_t delta, span;
1474 pc = CG_CODE(cg, last);
1475 stop = CG_CODE(cg, -1);
1476 while (pc != stop) {
1477 delta = GetJumpOffset(cg, pc);
1478 span = target - pc;
1479 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1482 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1483 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1484 * and need to see the JSOP_BACKPATCH* op at *pc).
1486 *pc = op;
1487 pc -= delta;
1489 return JS_TRUE;
1492 void
1493 js_PopStatement(JSTreeContext *tc)
1495 JSStmtInfo *stmt;
1497 stmt = tc->topStmt;
1498 tc->topStmt = stmt->down;
1499 if (STMT_LINKS_SCOPE(stmt)) {
1500 tc->topScopeStmt = stmt->downScope;
1501 if (stmt->flags & SIF_SCOPE) {
1502 tc->blockChain = STOBJ_GET_PARENT(stmt->blockObj);
1503 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1508 JSBool
1509 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1511 JSStmtInfo *stmt;
1513 stmt = cg->topStmt;
1514 if (!STMT_IS_TRYING(stmt) &&
1515 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1516 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1517 JSOP_GOTO))) {
1518 return JS_FALSE;
1520 js_PopStatement(cg);
1521 return JS_TRUE;
1524 JSBool
1525 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1526 JSParseNode *pn)
1528 jsdouble dval;
1529 jsint ival;
1530 JSAtom *valueAtom;
1531 jsval v;
1532 JSAtomListElement *ale;
1534 /* XXX just do numbers for now */
1535 if (pn->pn_type == TOK_NUMBER) {
1536 dval = pn->pn_dval;
1537 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1538 v = INT_TO_JSVAL(ival);
1539 } else {
1541 * We atomize double to root a jsdouble instance that we wrap as
1542 * jsval and store in cg->constList. This works because atoms are
1543 * protected from GC during compilation.
1545 valueAtom = js_AtomizeDouble(cx, dval);
1546 if (!valueAtom)
1547 return JS_FALSE;
1548 v = ATOM_KEY(valueAtom);
1550 ale = cg->constList.add(cg->compiler, atom);
1551 if (!ale)
1552 return JS_FALSE;
1553 ALE_SET_VALUE(ale, v);
1555 return JS_TRUE;
1558 JSStmtInfo *
1559 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1561 JSObject *obj;
1562 JSScope *scope;
1563 JSScopeProperty *sprop;
1565 if (!stmt)
1566 stmt = tc->topScopeStmt;
1567 for (; stmt; stmt = stmt->downScope) {
1568 if (stmt->type == STMT_WITH)
1569 break;
1571 /* Skip "maybe scope" statements that don't contain let bindings. */
1572 if (!(stmt->flags & SIF_SCOPE))
1573 continue;
1575 obj = stmt->blockObj;
1576 JS_ASSERT(obj->getClass() == &js_BlockClass);
1577 scope = OBJ_SCOPE(obj);
1578 sprop = scope->lookup(ATOM_TO_JSID(atom));
1579 if (sprop) {
1580 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1582 if (slotp) {
1583 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1584 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1585 sprop->shortid;
1587 return stmt;
1591 if (slotp)
1592 *slotp = -1;
1593 return stmt;
1597 * Check if the attributes describe a property holding a compile-time constant
1598 * or a permanent, read-only property without a getter.
1600 #define IS_CONSTANT_PROPERTY(attrs) \
1601 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1602 (JSPROP_READONLY | JSPROP_PERMANENT))
1605 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1606 * name defining a constant.
1608 static JSBool
1609 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1610 jsval *vp)
1612 JSBool ok;
1613 JSStmtInfo *stmt;
1614 JSAtomListElement *ale;
1615 JSObject *obj, *objbox;
1616 JSProperty *prop;
1617 uintN attrs;
1620 * Chase down the cg stack, but only until we reach the outermost cg.
1621 * This enables propagating consts from top-level into switch cases in a
1622 * function compiled along with the top-level script.
1624 *vp = JSVAL_HOLE;
1625 do {
1626 if (cg->flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1627 /* XXX this will need revising if 'const' becomes block-scoped. */
1628 stmt = js_LexicalLookup(cg, atom, NULL);
1629 if (stmt)
1630 return JS_TRUE;
1632 ale = cg->constList.lookup(atom);
1633 if (ale) {
1634 JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1635 *vp = ALE_VALUE(ale);
1636 return JS_TRUE;
1640 * Try looking in the variable object for a direct property that
1641 * is readonly and permanent. We know such a property can't be
1642 * shadowed by another property on obj's prototype chain, or a
1643 * with object or catch variable; nor can prop's value be changed,
1644 * nor can prop be deleted.
1646 if (cg->flags & TCF_IN_FUNCTION) {
1647 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1648 break;
1649 } else {
1650 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
1651 obj = cg->scopeChain;
1652 ok = obj->lookupProperty(cx, ATOM_TO_JSID(atom), &objbox, &prop);
1653 if (!ok)
1654 return JS_FALSE;
1655 if (objbox == obj) {
1657 * We're compiling code that will be executed immediately,
1658 * not re-executed against a different scope chain and/or
1659 * variable object. Therefore we can get constant values
1660 * from our variable object here.
1662 ok = obj->getAttributes(cx, ATOM_TO_JSID(atom), prop, &attrs);
1663 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1664 ok = obj->getProperty(cx, ATOM_TO_JSID(atom), vp);
1665 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1668 if (prop)
1669 objbox->dropProperty(cx, prop);
1670 if (!ok)
1671 return JS_FALSE;
1672 if (prop)
1673 break;
1676 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1677 return JS_TRUE;
1681 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1682 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1683 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1684 * after the main bytecode sequence.
1686 static JSOp
1687 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1689 uintN indexBase;
1692 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1693 * for big indexes.
1695 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1696 JS_STATIC_ASSERT(INDEX_LIMIT >=
1697 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1699 if (index < JS_BIT(16))
1700 return JSOP_NOP;
1701 indexBase = index >> 16;
1702 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1703 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1704 return JSOP_FALSE;
1705 return JSOP_RESETBASE0;
1708 if (index >= INDEX_LIMIT) {
1709 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1710 JSMSG_TOO_MANY_LITERALS);
1711 return JSOP_FALSE;
1714 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1715 return JSOP_FALSE;
1716 return JSOP_RESETBASE;
1720 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1721 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1722 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1723 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1724 * with single-byte JSOP_INDEXBASE[123] codes.
1726 * Such prefixing currently requires a suffix to restore the "zero segment"
1727 * register setting, but this could be optimized further.
1729 static JSBool
1730 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1732 JSOp bigSuffix;
1734 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1735 if (bigSuffix == JSOP_FALSE)
1736 return JS_FALSE;
1737 EMIT_UINT16_IMM_OP(op, index);
1738 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1742 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1743 * caller's lexical environment, and embedding a false return on error.
1745 #define EMIT_INDEX_OP(op, index) \
1746 JS_BEGIN_MACRO \
1747 if (!EmitIndexOp(cx, op, index, cg)) \
1748 return JS_FALSE; \
1749 JS_END_MACRO
1751 static JSBool
1752 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1754 JSAtomListElement *ale;
1756 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1757 if (op == JSOP_GETPROP &&
1758 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1759 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1761 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
1762 if (!ale)
1763 return JS_FALSE;
1764 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1767 static JSBool
1768 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1769 JSCodeGenerator *cg)
1771 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1772 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1776 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1777 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1778 * other parts of the code there's no necessary relationship between the two.
1779 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1780 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1782 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1783 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1785 static JSBool
1786 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1787 JSCodeGenerator *cg)
1789 JSOp bigSuffix;
1790 ptrdiff_t off;
1791 jsbytecode *pc;
1793 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1794 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1795 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1796 if (bigSuffix == JSOP_FALSE)
1797 return JS_FALSE;
1799 /* Emit [op, slot, index]. */
1800 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1801 if (off < 0)
1802 return JS_FALSE;
1803 pc = CG_CODE(cg, off);
1804 SET_UINT16(pc, slot);
1805 pc += 2;
1806 SET_INDEX(pc, index);
1807 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1811 * Adjust the slot for a block local to account for the number of variables
1812 * that share the same index space with locals. Due to the incremental code
1813 * generation for top-level script, we do the adjustment via code patching in
1814 * JSCompiler::compileScript; see comments there.
1816 * The function returns -1 on failures.
1818 static jsint
1819 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1821 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1822 if (cg->flags & TCF_IN_FUNCTION) {
1823 slot += cg->fun->u.i.nvars;
1824 if ((uintN) slot >= SLOTNO_LIMIT) {
1825 js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1826 JSREPORT_ERROR,
1827 JSMSG_TOO_MANY_LOCALS);
1828 slot = -1;
1831 return slot;
1834 static bool
1835 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1837 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1838 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1839 return false;
1841 JSObject *blockObj = pn->pn_objbox->object;
1842 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1843 if (depth < 0)
1844 return false;
1846 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1847 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1848 slot < limit; slot++) {
1849 jsval v = STOBJ_GET_SLOT(blockObj, slot);
1851 /* Beware the empty destructuring dummy. */
1852 if (JSVAL_IS_VOID(v)) {
1853 JS_ASSERT(slot + 1 <= limit);
1854 continue;
1857 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1858 JS_ASSERT(dn->pn_defn);
1859 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1860 dn->pn_cookie += depth;
1861 #ifdef DEBUG
1862 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1863 JS_ASSERT(pnu->pn_lexdef == dn);
1864 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1865 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1867 #endif
1870 OBJ_SCOPE(blockObj)->freeslot = JSSLOT_FREE(&js_BlockClass);
1871 return js_GrowSlots(cx, blockObj, JSSLOT_FREE(&js_BlockClass));
1875 * When eval is called from a function, the eval code or function code it
1876 * compiles may reference upvars that live in the eval-calling function. The
1877 * eval-invoked compiler does not have explicit definitions for these upvars
1878 * and we do not attempt to create them a-priori (by inspecting the function's
1879 * args and vars) -- we could, but we'd take an avoidable penalty for each
1880 * function local not referenced by any upvar. Instead, we map such upvars
1881 * lazily, growing upvarMap.vector by powers of two.
1883 * This function knows that it is called with pn pointing to a PN_NAME-arity
1884 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1885 * static level of cg at least one greater than the eval-calling function's
1886 * static level.
1888 static bool
1889 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1891 JSContext *cx = cg->compiler->context;
1892 JSFunction *fun = cg->compiler->callerFrame->fun;
1893 uintN upvarLevel = fun->u.i.script->staticLevel;
1895 JSFunctionBox *funbox = cg->funbox;
1896 if (funbox) {
1898 * Treat top-level function definitions as escaping (i.e., as funargs),
1899 * required since we compile each such top level function or statement
1900 * and throw away the AST, so we can't yet see all funarg uses of this
1901 * function being compiled (cg->funbox->object). See bug 493177.
1903 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1904 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1905 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1906 ((JSFunction *) funbox->object)->atom);
1907 return true;
1910 while (funbox->level >= upvarLevel) {
1911 if (funbox->node->pn_dflags & PND_FUNARG)
1912 return true;
1913 funbox = funbox->parent;
1914 if (!funbox)
1915 break;
1919 JSAtom *atom = pn->pn_atom;
1921 uintN index;
1922 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1923 if (localKind == JSLOCAL_NONE)
1924 return true;
1926 JS_ASSERT(cg->staticLevel > upvarLevel);
1927 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1928 return true;
1930 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1931 if (!ale) {
1932 if ((cg->flags & TCF_IN_FUNCTION) &&
1933 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1934 return false;
1937 ale = cg->upvarList.add(cg->compiler, atom);
1938 if (!ale)
1939 return false;
1940 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1942 uint32 *vector = cg->upvarMap.vector;
1943 uint32 length = cg->upvarMap.length;
1945 JS_ASSERT(ALE_INDEX(ale) <= length);
1946 if (ALE_INDEX(ale) == length) {
1947 length = 2 * JS_MAX(2, length);
1948 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1949 if (!vector)
1950 return false;
1951 cg->upvarMap.vector = vector;
1952 cg->upvarMap.length = length;
1955 if (localKind != JSLOCAL_ARG)
1956 index += fun->nargs;
1957 JS_ASSERT(index < JS_BIT(16));
1959 uintN skip = cg->staticLevel - upvarLevel;
1960 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1963 pn->pn_op = JSOP_GETUPVAR;
1964 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1965 pn->pn_dflags |= PND_BOUND;
1966 return true;
1970 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1971 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1972 * It returns false on error, true on success.
1974 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1975 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1976 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1977 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1978 * pn->pn_op was modified, if this function finds an argument or local variable
1979 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1980 * successful return.
1982 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1983 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1984 * in js_EmitTree.
1986 static JSBool
1987 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1989 JSDefinition *dn;
1990 JSOp op;
1991 JSAtom *atom;
1992 uint32 cookie;
1993 JSDefinition::Kind dn_kind;
1994 JSAtomListElement *ale;
1995 uintN index;
1997 JS_ASSERT(pn->pn_type == TOK_NAME);
1999 /* Idempotency tests come first, since we may be called more than once. */
2000 if (pn->pn_dflags & PND_BOUND)
2001 return JS_TRUE;
2003 /* No cookie initialized for these two, they're pre-bound by definition. */
2004 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
2007 * The parser linked all uses (including forward references) to their
2008 * definitions, unless a with statement or direct eval intervened.
2010 if (pn->pn_used) {
2011 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
2012 dn = pn->pn_lexdef;
2013 JS_ASSERT(dn->pn_defn);
2014 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
2015 } else {
2016 if (!pn->pn_defn)
2017 return JS_TRUE;
2018 dn = (JSDefinition *) pn;
2021 op = PN_OP(pn);
2022 if (op == JSOP_NOP)
2023 return JS_TRUE;
2025 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2026 atom = pn->pn_atom;
2027 cookie = dn->pn_cookie;
2028 dn_kind = dn->kind();
2031 * Turn attempts to mutate const-declared bindings into get ops (for
2032 * pre-increment and pre-decrement ops, our caller will have to emit
2033 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2035 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2036 * bindings visible to the compiler are permanent in JS unless the
2037 * declaration originates in eval code. We detect eval code by testing
2038 * cg->compiler->callerFrame, which is set only by eval or a debugger
2039 * equivalent.
2041 * Note that this callerFrame non-null test must be qualified by testing
2042 * !cg->funbox to exclude function code nested in eval code, which is not
2043 * subject to the deletable binding exception.
2045 switch (op) {
2046 case JSOP_NAME:
2047 case JSOP_SETCONST:
2048 break;
2049 case JSOP_DELNAME:
2050 if (dn_kind != JSDefinition::UNKNOWN) {
2051 if (cg->compiler->callerFrame && !cg->funbox)
2052 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2053 else
2054 pn->pn_op = JSOP_FALSE;
2055 pn->pn_dflags |= PND_BOUND;
2056 return JS_TRUE;
2058 break;
2059 default:
2060 if (pn->isConst())
2061 pn->pn_op = op = JSOP_NAME;
2064 if (cookie == FREE_UPVAR_COOKIE) {
2065 JSStackFrame *caller = cg->compiler->callerFrame;
2066 if (caller) {
2067 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2070 * Don't generate upvars on the left side of a for loop. See
2071 * bug 470758.
2073 if (cg->flags & TCF_IN_FOR_INIT)
2074 return JS_TRUE;
2076 JS_ASSERT(caller->script);
2077 if (!caller->fun)
2078 return JS_TRUE;
2081 * Make sure the variable object used by the compiler to initialize
2082 * parent links matches the caller's varobj. Compile-n-go compiler-
2083 * created function objects have the top-level cg's scopeChain set
2084 * as their parent by JSCompiler::newFunction.
2086 JSObject *scopeobj = (cg->flags & TCF_IN_FUNCTION)
2087 ? STOBJ_GET_PARENT(FUN_OBJECT(cg->fun))
2088 : cg->scopeChain;
2089 if (scopeobj != caller->varobj)
2090 return JS_TRUE;
2093 * We are compiling eval or debug script inside a function frame
2094 * and the scope chain matches the function's variable object.
2095 * Optimize access to function's arguments and variable and the
2096 * arguments object.
2098 if (op != JSOP_NAME)
2099 return JS_TRUE;
2101 return MakeUpvarForEval(pn, cg);
2103 return JS_TRUE;
2106 if (dn->pn_dflags & PND_GVAR) {
2108 * If this is a global reference from within a function, leave pn_op as
2109 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2110 * only we could depend on the global frame's slots being valid for all
2111 * calls to the function.
2113 if (cg->flags & TCF_IN_FUNCTION)
2114 return JS_TRUE;
2117 * We are optimizing global variables and there may be no pre-existing
2118 * global property named atom when this global script runs. If atom was
2119 * declared via const or var, optimize pn to access fp->vars using the
2120 * appropriate JSOP_*GVAR op.
2122 * FIXME: should be able to optimize global function access too.
2124 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2126 switch (op) {
2127 case JSOP_NAME: op = JSOP_GETGVAR; break;
2128 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2129 case JSOP_SETCONST: /* NB: no change */ break;
2130 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2131 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2132 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2133 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2134 case JSOP_FORNAME: /* NB: no change */ break;
2135 case JSOP_DELNAME: /* NB: no change */ break;
2136 default: JS_NOT_REACHED("gvar");
2138 pn->pn_op = op;
2139 pn->pn_cookie = cookie;
2140 pn->pn_dflags |= PND_BOUND;
2141 return JS_TRUE;
2144 uintN level = UPVAR_FRAME_SKIP(cookie);
2145 JS_ASSERT(cg->staticLevel >= level);
2148 * A JSDefinition witnessed as a declaration by the parser cannot be an
2149 * upvar, unless it is the degenerate kind of upvar selected above (in the
2150 * code before the PND_GVAR test) for the special case of compile-and-go
2151 * code generated from eval called from a function, where the eval code
2152 * uses local vars defined in the function. We detect this upvar-for-eval
2153 * case by checking dn's op.
2155 if (PN_OP(dn) == JSOP_GETUPVAR) {
2156 JS_ASSERT(cg->staticLevel >= level);
2157 if (op != JSOP_NAME)
2158 return JS_TRUE;
2160 #ifdef DEBUG
2161 JSStackFrame *caller = cg->compiler->callerFrame;
2162 #endif
2163 JS_ASSERT(caller);
2164 JS_ASSERT(caller->script);
2166 JSTreeContext *tc = cg;
2167 while (tc->staticLevel != level)
2168 tc = tc->parent;
2169 JS_ASSERT(tc->flags & TCF_COMPILING);
2171 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2172 JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
2173 JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
2176 * Don't generate upvars on the left side of a for loop. See
2177 * bug 470758 and bug 520513.
2179 if (evalcg->flags & TCF_IN_FOR_INIT)
2180 return JS_TRUE;
2182 if (cg->staticLevel == level) {
2183 pn->pn_op = JSOP_GETUPVAR;
2184 pn->pn_cookie = cookie;
2185 pn->pn_dflags |= PND_BOUND;
2186 return JS_TRUE;
2189 return MakeUpvarForEval(pn, cg);
2192 uintN skip = cg->staticLevel - level;
2193 if (skip != 0) {
2194 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
2195 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie) != CALLEE_UPVAR_SLOT,
2196 cg->lexdeps.lookup(atom));
2197 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2198 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2201 * If op is a mutating opcode, this upvar's static level is too big to
2202 * index into the display, or the function is heavyweight, we fall back
2203 * on JSOP_*NAME*.
2205 if (op != JSOP_NAME)
2206 return JS_TRUE;
2207 if (level >= JS_DISPLAY_SIZE)
2208 return JS_TRUE;
2209 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2210 return JS_TRUE;
2212 if (FUN_FLAT_CLOSURE(cg->fun)) {
2213 op = JSOP_GETDSLOT;
2214 } else {
2216 * The function we're compiling may not be heavyweight, but if it
2217 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2218 * JSCompiler::analyzeFunctions has arranged for this function's
2219 * enclosing functions to be heavyweight, so we can safely stick
2220 * with JSOP_NAME/JSOP_CALLNAME.
2222 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2223 return JS_TRUE;
2226 * Generator functions may be resumed from any call stack, which
2227 * defeats the display optimization to static link searching used
2228 * by JSOP_{GET,CALL}UPVAR.
2230 if (cg->flags & TCF_FUN_IS_GENERATOR)
2231 return JS_TRUE;
2233 op = JSOP_GETUPVAR;
2236 ale = cg->upvarList.lookup(atom);
2237 if (ale) {
2238 index = ALE_INDEX(ale);
2239 } else {
2240 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2241 return JS_FALSE;
2243 ale = cg->upvarList.add(cg->compiler, atom);
2244 if (!ale)
2245 return JS_FALSE;
2246 index = ALE_INDEX(ale);
2247 JS_ASSERT(index == cg->upvarList.count - 1);
2249 uint32 *vector = cg->upvarMap.vector;
2250 if (!vector) {
2251 uint32 length = cg->lexdeps.count;
2253 vector = (uint32 *) js_calloc(length * sizeof *vector);
2254 if (!vector) {
2255 JS_ReportOutOfMemory(cx);
2256 return JS_FALSE;
2258 cg->upvarMap.vector = vector;
2259 cg->upvarMap.length = length;
2262 uintN slot = UPVAR_FRAME_SLOT(cookie);
2263 if (slot != CALLEE_UPVAR_SLOT && dn_kind != JSDefinition::ARG) {
2264 JSTreeContext *tc = cg;
2265 do {
2266 tc = tc->parent;
2267 } while (tc->staticLevel != level);
2268 if (tc->flags & TCF_IN_FUNCTION)
2269 slot += tc->fun->nargs;
2272 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2275 pn->pn_op = op;
2276 pn->pn_cookie = index;
2277 pn->pn_dflags |= PND_BOUND;
2278 return JS_TRUE;
2282 * We are compiling a function body and may be able to optimize name
2283 * to stack slot. Look for an argument or variable in the function and
2284 * rewrite pn_op and update pn accordingly.
2286 switch (dn_kind) {
2287 case JSDefinition::UNKNOWN:
2288 return JS_TRUE;
2290 case JSDefinition::LET:
2291 switch (op) {
2292 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2293 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2294 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2295 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2296 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2297 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2298 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2299 default: JS_NOT_REACHED("let");
2301 break;
2303 case JSDefinition::ARG:
2304 switch (op) {
2305 case JSOP_NAME: op = JSOP_GETARG; break;
2306 case JSOP_SETNAME: op = JSOP_SETARG; break;
2307 case JSOP_INCNAME: op = JSOP_INCARG; break;
2308 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2309 case JSOP_DECNAME: op = JSOP_DECARG; break;
2310 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2311 case JSOP_FORNAME: op = JSOP_FORARG; break;
2312 default: JS_NOT_REACHED("arg");
2314 JS_ASSERT(!pn->isConst());
2315 break;
2317 case JSDefinition::VAR:
2318 if (PN_OP(dn) == JSOP_CALLEE) {
2319 JS_ASSERT(op != JSOP_CALLEE);
2320 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2323 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2324 * cannot be sure cg->fun is not something of the form:
2326 * var ff = (function f(s) { eval(s); return f; });
2328 * where a caller invokes ff("var f = 42"). The result returned for
2329 * such an invocation must be 42, since the callee name is
2330 * lexically bound in an outer declarative environment from the
2331 * function's activation. See jsfun.cpp:call_resolve.
2333 JS_ASSERT(op != JSOP_DELNAME);
2334 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2335 op = JSOP_CALLEE;
2336 pn->pn_dflags |= PND_CONST;
2339 pn->pn_op = op;
2340 pn->pn_dflags |= PND_BOUND;
2341 return JS_TRUE;
2343 /* FALL THROUGH */
2345 default:
2346 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2347 dn_kind == JSDefinition::VAR ||
2348 dn_kind == JSDefinition::CONST);
2349 switch (op) {
2350 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2351 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2352 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2353 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2354 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2355 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2356 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2357 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2358 default: JS_NOT_REACHED("local");
2360 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2361 break;
2364 JS_ASSERT(op != PN_OP(pn));
2365 pn->pn_op = op;
2366 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2367 pn->pn_dflags |= PND_BOUND;
2368 return JS_TRUE;
2372 * If pn contains a useful expression, return true with *answer set to true.
2373 * If pn contains a useless expression, return true with *answer set to false.
2374 * Return false on error.
2376 * The caller should initialize *answer to false and invoke this function on
2377 * an expression statement or similar subtree to decide whether the tree could
2378 * produce code that has any side effects. For an expression statement, we
2379 * define useless code as code with no side effects, because the main effect,
2380 * the value left on the stack after the code executes, will be discarded by a
2381 * pop bytecode.
2383 static JSBool
2384 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2385 JSBool *answer)
2387 JSBool ok;
2388 JSParseNode *pn2;
2390 ok = JS_TRUE;
2391 if (!pn || *answer)
2392 return ok;
2394 switch (pn->pn_arity) {
2395 case PN_FUNC:
2397 * A named function, contrary to ES3, is no longer useful, because we
2398 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2399 * Object instance and binding a readonly, permanent property in it
2400 * (the object and binding can be detected and hijacked or captured).
2401 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2403 *answer = JS_FALSE;
2404 break;
2406 case PN_LIST:
2407 if (pn->pn_op == JSOP_NOP ||
2408 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2409 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2411 * Non-operators along with ||, &&, ===, and !== never invoke
2412 * toString or valueOf.
2414 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2415 ok &= CheckSideEffects(cx, cg, pn2, answer);
2416 } else {
2418 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2419 * are presumed to be useful, because they may have side effects
2420 * even if their main effect (their return value) is discarded.
2422 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2423 * to avoid too much recursion. All such lists must be presumed
2424 * to be useful because each index operation could invoke a getter
2425 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2426 * does not apply here: arguments[i][j] might invoke a getter).
2428 * Likewise, array and object initialisers may call prototype
2429 * setters (the __defineSetter__ built-in, and writable __proto__
2430 * on Array.prototype create this hazard). Initialiser list nodes
2431 * have JSOP_NEWINIT in their pn_op.
2433 *answer = JS_TRUE;
2435 break;
2437 case PN_TERNARY:
2438 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2439 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2440 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2441 break;
2443 case PN_BINARY:
2444 if (pn->pn_type == TOK_ASSIGN) {
2446 * Assignment is presumed to be useful, even if the next operation
2447 * is another assignment overwriting this one's ostensible effect,
2448 * because the left operand may be a property with a setter that
2449 * has side effects.
2451 * The only exception is assignment of a useless value to a const
2452 * declared in the function currently being compiled.
2454 pn2 = pn->pn_left;
2455 if (pn2->pn_type != TOK_NAME) {
2456 *answer = JS_TRUE;
2457 } else {
2458 if (!BindNameToSlot(cx, cg, pn2))
2459 return JS_FALSE;
2460 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2461 return JS_FALSE;
2462 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2463 *answer = JS_TRUE;
2465 } else {
2466 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2467 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2469 * ||, &&, ===, and !== do not convert their operands via
2470 * toString or valueOf method calls.
2472 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2473 CheckSideEffects(cx, cg, pn->pn_right, answer);
2474 } else {
2476 * We can't easily prove that neither operand ever denotes an
2477 * object with a toString or valueOf method.
2479 *answer = JS_TRUE;
2482 break;
2484 case PN_UNARY:
2485 switch (pn->pn_type) {
2486 case TOK_DELETE:
2487 pn2 = pn->pn_kid;
2488 switch (pn2->pn_type) {
2489 case TOK_NAME:
2490 if (!BindNameToSlot(cx, cg, pn2))
2491 return JS_FALSE;
2492 if (pn2->isConst()) {
2493 *answer = JS_FALSE;
2494 break;
2496 /* FALL THROUGH */
2497 case TOK_DOT:
2498 #if JS_HAS_XML_SUPPORT
2499 case TOK_DBLDOT:
2500 #endif
2501 case TOK_LP:
2502 case TOK_LB:
2503 /* All these delete addressing modes have effects too. */
2504 *answer = JS_TRUE;
2505 break;
2506 default:
2507 ok = CheckSideEffects(cx, cg, pn2, answer);
2508 break;
2510 break;
2512 case TOK_UNARYOP:
2513 if (pn->pn_op == JSOP_NOT) {
2514 /* ! does not convert its operand via toString or valueOf. */
2515 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2516 break;
2518 /* FALL THROUGH */
2520 default:
2522 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2523 * have direct effects. Of the remaining unary-arity node types,
2524 * we can't easily prove that the operand never denotes an object
2525 * with a toString or valueOf method.
2527 *answer = JS_TRUE;
2528 break;
2530 break;
2532 case PN_NAME:
2534 * Take care to avoid trying to bind a label name (labels, both for
2535 * statements and property values in object initialisers, have pn_op
2536 * defaulted to JSOP_NOP).
2538 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2539 if (!BindNameToSlot(cx, cg, pn))
2540 return JS_FALSE;
2541 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2542 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2544 * Not an argument or local variable use, and not a use of a
2545 * unshadowed named function expression's given name, so this
2546 * expression could invoke a getter that has side effects.
2548 *answer = JS_TRUE;
2551 pn2 = pn->maybeExpr();
2552 if (pn->pn_type == TOK_DOT) {
2553 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2554 return JS_FALSE;
2555 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2556 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2558 * Any dotted property reference could call a getter, except
2559 * for arguments.length where arguments is unambiguous.
2561 *answer = JS_TRUE;
2564 ok = CheckSideEffects(cx, cg, pn2, answer);
2565 break;
2567 case PN_NAMESET:
2568 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2569 break;
2571 case PN_NULLARY:
2572 if (pn->pn_type == TOK_DEBUGGER)
2573 *answer = JS_TRUE;
2574 break;
2576 return ok;
2579 static JSBool
2580 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2581 JSBool callContext)
2583 JSOp op;
2585 if (!BindNameToSlot(cx, cg, pn))
2586 return JS_FALSE;
2587 op = PN_OP(pn);
2589 if (callContext) {
2590 switch (op) {
2591 case JSOP_NAME:
2592 op = JSOP_CALLNAME;
2593 break;
2594 case JSOP_GETGVAR:
2595 op = JSOP_CALLGVAR;
2596 break;
2597 case JSOP_GETARG:
2598 op = JSOP_CALLARG;
2599 break;
2600 case JSOP_GETLOCAL:
2601 op = JSOP_CALLLOCAL;
2602 break;
2603 case JSOP_GETUPVAR:
2604 op = JSOP_CALLUPVAR;
2605 break;
2606 case JSOP_GETDSLOT:
2607 op = JSOP_CALLDSLOT;
2608 break;
2609 default:
2610 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2611 break;
2615 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2616 if (js_Emit1(cx, cg, op) < 0)
2617 return JS_FALSE;
2618 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2619 return JS_FALSE;
2620 } else {
2621 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2622 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2623 } else {
2624 if (!EmitAtomOp(cx, pn, op, cg))
2625 return JS_FALSE;
2629 return JS_TRUE;
2632 #if JS_HAS_XML_SUPPORT
2633 static JSBool
2634 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2636 JSParseNode *pn2;
2637 uintN oldflags;
2639 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2640 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2641 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2643 pn2 = pn->pn_kid;
2644 oldflags = cg->flags;
2645 cg->flags &= ~TCF_IN_FOR_INIT;
2646 if (!js_EmitTree(cx, cg, pn2))
2647 return JS_FALSE;
2648 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2649 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2650 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2651 return JS_FALSE;
2654 return js_Emit1(cx, cg, op) >= 0;
2656 #endif
2658 static JSBool
2659 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2662 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2663 * deoptimize away from fast paths in the interpreter and trace recorder,
2664 * which skip dense array instances by going up to Array.prototype before
2665 * looking up the property name.
2667 JSAtomListElement *ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2668 if (!ale)
2669 return JS_FALSE;
2670 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2671 return JS_FALSE;
2672 if (js_Emit1(cx, cg, op) < 0)
2673 return JS_FALSE;
2674 return JS_TRUE;
2677 static JSBool
2678 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2679 JSBool callContext)
2681 JSParseNode *pn2, *pndot, *pnup, *pndown;
2682 ptrdiff_t top;
2684 JS_ASSERT(pn->pn_arity == PN_NAME);
2685 pn2 = pn->maybeExpr();
2687 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2688 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2689 (pn->pn_atom == cx->runtime->atomState.protoAtom ||
2690 pn->pn_atom == cx->runtime->atomState.parentAtom ||
2691 pn->pn_atom == cx->runtime->atomState.countAtom)) {
2692 if (pn2 && !js_EmitTree(cx, cg, pn2))
2693 return JS_FALSE;
2694 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2697 if (callContext) {
2698 JS_ASSERT(pn->pn_type == TOK_DOT);
2699 JS_ASSERT(op == JSOP_GETPROP);
2700 op = JSOP_CALLPROP;
2701 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2702 if (pn2->pn_op == JSOP_THIS) {
2703 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2704 /* Fast path for gets of |this.foo|. */
2705 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2707 } else if (pn2->pn_type == TOK_NAME) {
2709 * Try to optimize:
2710 * - arguments.length into JSOP_ARGCNT
2711 * - argname.prop into JSOP_GETARGPROP
2712 * - localname.prop into JSOP_GETLOCALPROP
2713 * but don't do this if the property is 'length' -- prefer to emit
2714 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2716 if (!BindNameToSlot(cx, cg, pn2))
2717 return JS_FALSE;
2718 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2719 if (pn2->pn_op == JSOP_ARGUMENTS)
2720 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2721 } else {
2722 switch (pn2->pn_op) {
2723 case JSOP_GETARG:
2724 op = JSOP_GETARGPROP;
2725 goto do_indexconst;
2726 case JSOP_GETLOCAL:
2727 op = JSOP_GETLOCALPROP;
2728 do_indexconst: {
2729 JSAtomListElement *ale;
2730 jsatomid atomIndex;
2732 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2733 if (!ale)
2734 return JS_FALSE;
2735 atomIndex = ALE_INDEX(ale);
2736 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2739 default:;
2746 * If the object operand is also a dotted property reference, reverse the
2747 * list linked via pn_expr temporarily so we can iterate over it from the
2748 * bottom up (reversing again as we go), to avoid excessive recursion.
2750 if (pn2->pn_type == TOK_DOT) {
2751 pndot = pn2;
2752 pnup = NULL;
2753 top = CG_OFFSET(cg);
2754 for (;;) {
2755 /* Reverse pndot->pn_expr to point up, not down. */
2756 pndot->pn_offset = top;
2757 JS_ASSERT(!pndot->pn_used);
2758 pndown = pndot->pn_expr;
2759 pndot->pn_expr = pnup;
2760 if (pndown->pn_type != TOK_DOT)
2761 break;
2762 pnup = pndot;
2763 pndot = pndown;
2766 /* pndown is a primary expression, not a dotted property reference. */
2767 if (!js_EmitTree(cx, cg, pndown))
2768 return JS_FALSE;
2770 do {
2771 /* Walk back up the list, emitting annotated name ops. */
2772 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2773 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2774 return JS_FALSE;
2778 * Special case deoptimization on __proto__, __count__ and
2779 * __parent__, as above.
2781 if (pndot->pn_arity == PN_NAME &&
2782 (pndot->pn_atom == cx->runtime->atomState.protoAtom ||
2783 pndot->pn_atom == cx->runtime->atomState.parentAtom ||
2784 pndot->pn_atom == cx->runtime->atomState.countAtom)) {
2785 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2786 return JS_FALSE;
2787 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2788 return JS_FALSE;
2791 /* Reverse the pn_expr link again. */
2792 pnup = pndot->pn_expr;
2793 pndot->pn_expr = pndown;
2794 pndown = pndot;
2795 } while ((pndot = pnup) != NULL);
2796 } else {
2797 if (!js_EmitTree(cx, cg, pn2))
2798 return JS_FALSE;
2801 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2802 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2803 return JS_FALSE;
2806 return EmitAtomOp(cx, pn, op, cg);
2809 static JSBool
2810 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2812 ptrdiff_t top;
2813 JSParseNode *left, *right, *next, ltmp, rtmp;
2814 jsint slot;
2816 top = CG_OFFSET(cg);
2817 if (pn->pn_arity == PN_LIST) {
2818 /* Left-associative operator chain to avoid too much recursion. */
2819 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2820 JS_ASSERT(pn->pn_count >= 3);
2821 left = pn->pn_head;
2822 right = pn->last();
2823 next = left->pn_next;
2824 JS_ASSERT(next != right);
2827 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2828 * one or more index expression and JSOP_GETELEM op pairs.
2830 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2831 if (!BindNameToSlot(cx, cg, left))
2832 return JS_FALSE;
2833 if (left->pn_op == JSOP_ARGUMENTS &&
2834 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2835 (jsuint)slot < JS_BIT(16)) {
2837 * arguments[i]() requires arguments object as "this".
2838 * Check that we never generates list for that usage.
2840 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2841 left->pn_offset = next->pn_offset = top;
2842 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2843 left = next;
2844 next = left->pn_next;
2849 * Check whether we generated JSOP_ARGSUB, just above, and have only
2850 * one more index expression to emit. Given arguments[0][j], we must
2851 * skip the while loop altogether, falling through to emit code for j
2852 * (in the subtree referenced by right), followed by the annotated op,
2853 * at the bottom of this function.
2855 JS_ASSERT(next != right || pn->pn_count == 3);
2856 if (left == pn->pn_head) {
2857 if (!js_EmitTree(cx, cg, left))
2858 return JS_FALSE;
2860 while (next != right) {
2861 if (!js_EmitTree(cx, cg, next))
2862 return JS_FALSE;
2863 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2864 return JS_FALSE;
2865 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2866 return JS_FALSE;
2867 next = next->pn_next;
2869 } else {
2870 if (pn->pn_arity == PN_NAME) {
2872 * Set left and right so pn appears to be a TOK_LB node, instead
2873 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2874 * EmitDestructuringOps nearer below. In the destructuring case,
2875 * the base expression (pn_expr) of the name may be null, which
2876 * means we have to emit a JSOP_BINDNAME.
2878 left = pn->maybeExpr();
2879 if (!left) {
2880 left = &ltmp;
2881 left->pn_type = TOK_STRING;
2882 left->pn_op = JSOP_BINDNAME;
2883 left->pn_arity = PN_NULLARY;
2884 left->pn_pos = pn->pn_pos;
2885 left->pn_atom = pn->pn_atom;
2887 right = &rtmp;
2888 right->pn_type = TOK_STRING;
2889 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2890 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2891 ? JSOP_QNAMEPART
2892 : JSOP_STRING;
2893 right->pn_arity = PN_NULLARY;
2894 right->pn_pos = pn->pn_pos;
2895 right->pn_atom = pn->pn_atom;
2896 } else {
2897 JS_ASSERT(pn->pn_arity == PN_BINARY);
2898 left = pn->pn_left;
2899 right = pn->pn_right;
2902 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2903 if (op == JSOP_GETELEM &&
2904 left->pn_type == TOK_NAME &&
2905 right->pn_type == TOK_NUMBER) {
2906 if (!BindNameToSlot(cx, cg, left))
2907 return JS_FALSE;
2908 if (left->pn_op == JSOP_ARGUMENTS &&
2909 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2910 (jsuint)slot < JS_BIT(16)) {
2911 left->pn_offset = right->pn_offset = top;
2912 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2913 return JS_TRUE;
2917 if (!js_EmitTree(cx, cg, left))
2918 return JS_FALSE;
2921 /* The right side of the descendant operator is implicitly quoted. */
2922 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2923 right->pn_op == JSOP_QNAMEPART);
2924 if (!js_EmitTree(cx, cg, right))
2925 return JS_FALSE;
2926 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2927 return JS_FALSE;
2928 return js_Emit1(cx, cg, op) >= 0;
2931 static JSBool
2932 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2934 jsint ival;
2935 uint32 u;
2936 ptrdiff_t off;
2937 jsbytecode *pc;
2938 JSAtom *atom;
2939 JSAtomListElement *ale;
2941 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2942 if (ival == 0)
2943 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2944 if (ival == 1)
2945 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2946 if ((jsint)(int8)ival == ival)
2947 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2949 u = (uint32)ival;
2950 if (u < JS_BIT(16)) {
2951 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2952 } else if (u < JS_BIT(24)) {
2953 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2954 if (off < 0)
2955 return JS_FALSE;
2956 pc = CG_CODE(cg, off);
2957 SET_UINT24(pc, u);
2958 } else {
2959 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2960 if (off < 0)
2961 return JS_FALSE;
2962 pc = CG_CODE(cg, off);
2963 SET_INT32(pc, ival);
2965 return JS_TRUE;
2968 atom = js_AtomizeDouble(cx, dval);
2969 if (!atom)
2970 return JS_FALSE;
2972 ale = cg->atomList.add(cg->compiler, atom);
2973 if (!ale)
2974 return JS_FALSE;
2975 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2978 static JSBool
2979 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2980 JSStmtInfo *stmtInfo)
2982 JSOp switchOp;
2983 JSBool ok, hasDefault, constPropagated;
2984 ptrdiff_t top, off, defaultOffset;
2985 JSParseNode *pn2, *pn3, *pn4;
2986 uint32 caseCount, tableLength;
2987 JSParseNode **table;
2988 jsdouble d;
2989 jsint i, low, high;
2990 jsval v;
2991 JSAtom *atom;
2992 JSAtomListElement *ale;
2993 intN noteIndex;
2994 size_t switchSize, tableSize;
2995 jsbytecode *pc, *savepc;
2996 #if JS_HAS_BLOCK_SCOPE
2997 jsint count;
2998 #endif
3000 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3001 switchOp = JSOP_TABLESWITCH;
3002 ok = JS_TRUE;
3003 hasDefault = constPropagated = JS_FALSE;
3004 defaultOffset = -1;
3007 * If the switch contains let variables scoped by its body, model the
3008 * resulting block on the stack first, before emitting the discriminant's
3009 * bytecode (in case the discriminant contains a stack-model dependency
3010 * such as a let expression).
3012 pn2 = pn->pn_right;
3013 #if JS_HAS_BLOCK_SCOPE
3014 if (pn2->pn_type == TOK_LEXICALSCOPE) {
3016 * Push the body's block scope before discriminant code-gen for proper
3017 * static block scope linkage in case the discriminant contains a let
3018 * expression. The block's locals must lie under the discriminant on
3019 * the stack so that case-dispatch bytecodes can find the discriminant
3020 * on top of stack.
3022 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
3023 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
3024 stmtInfo->type = STMT_SWITCH;
3026 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3027 if (!EmitEnterBlock(cx, pn2, cg))
3028 return JS_FALSE;
3031 * Pop the switch's statement info around discriminant code-gen. Note
3032 * how this leaves cg->blockChain referencing the switch's
3033 * block scope object, which is necessary for correct block parenting
3034 * in the case where the discriminant contains a let expression.
3036 cg->topStmt = stmtInfo->down;
3037 cg->topScopeStmt = stmtInfo->downScope;
3039 #ifdef __GNUC__
3040 else {
3041 count = 0;
3043 #endif
3044 #endif
3047 * Emit code for the discriminant first (or nearly first, in the case of a
3048 * switch whose body is a block scope).
3050 if (!js_EmitTree(cx, cg, pn->pn_left))
3051 return JS_FALSE;
3053 /* Switch bytecodes run from here till end of final case. */
3054 top = CG_OFFSET(cg);
3055 #if !JS_HAS_BLOCK_SCOPE
3056 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3057 #else
3058 if (pn2->pn_type == TOK_LC) {
3059 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3060 } else {
3061 /* Re-push the switch's statement info record. */
3062 cg->topStmt = cg->topScopeStmt = stmtInfo;
3064 /* Set the statement info record's idea of top. */
3065 stmtInfo->update = top;
3067 /* Advance pn2 to refer to the switch case list. */
3068 pn2 = pn2->expr();
3070 #endif
3072 caseCount = pn2->pn_count;
3073 tableLength = 0;
3074 table = NULL;
3076 if (caseCount == 0 ||
3077 (caseCount == 1 &&
3078 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3079 caseCount = 0;
3080 low = 0;
3081 high = -1;
3082 } else {
3083 #define INTMAP_LENGTH 256
3084 jsbitmap intmap_space[INTMAP_LENGTH];
3085 jsbitmap *intmap = NULL;
3086 int32 intmap_bitlen = 0;
3088 low = JSVAL_INT_MAX;
3089 high = JSVAL_INT_MIN;
3091 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3092 if (pn3->pn_type == TOK_DEFAULT) {
3093 hasDefault = JS_TRUE;
3094 caseCount--; /* one of the "cases" was the default */
3095 continue;
3098 JS_ASSERT(pn3->pn_type == TOK_CASE);
3099 if (switchOp == JSOP_CONDSWITCH)
3100 continue;
3102 pn4 = pn3->pn_left;
3103 while (pn4->pn_type == TOK_RP)
3104 pn4 = pn4->pn_kid;
3105 switch (pn4->pn_type) {
3106 case TOK_NUMBER:
3107 d = pn4->pn_dval;
3108 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3109 pn3->pn_val = INT_TO_JSVAL(i);
3110 } else {
3111 atom = js_AtomizeDouble(cx, d);
3112 if (!atom) {
3113 ok = JS_FALSE;
3114 goto release;
3116 pn3->pn_val = ATOM_KEY(atom);
3118 break;
3119 case TOK_STRING:
3120 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3121 break;
3122 case TOK_NAME:
3123 if (!pn4->maybeExpr()) {
3124 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3125 if (!ok)
3126 goto release;
3127 if (v != JSVAL_HOLE) {
3128 if (!JSVAL_IS_PRIMITIVE(v)) {
3130 * XXX JSOP_LOOKUPSWITCH does not support const-
3131 * propagated object values, see bug 407186.
3133 switchOp = JSOP_CONDSWITCH;
3134 continue;
3136 pn3->pn_val = v;
3137 constPropagated = JS_TRUE;
3138 break;
3141 /* FALL THROUGH */
3142 case TOK_PRIMARY:
3143 if (pn4->pn_op == JSOP_TRUE) {
3144 pn3->pn_val = JSVAL_TRUE;
3145 break;
3147 if (pn4->pn_op == JSOP_FALSE) {
3148 pn3->pn_val = JSVAL_FALSE;
3149 break;
3151 /* FALL THROUGH */
3152 default:
3153 switchOp = JSOP_CONDSWITCH;
3154 continue;
3157 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3159 if (switchOp != JSOP_TABLESWITCH)
3160 continue;
3161 if (!JSVAL_IS_INT(pn3->pn_val)) {
3162 switchOp = JSOP_LOOKUPSWITCH;
3163 continue;
3165 i = JSVAL_TO_INT(pn3->pn_val);
3166 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3167 switchOp = JSOP_LOOKUPSWITCH;
3168 continue;
3170 if (i < low)
3171 low = i;
3172 if (high < i)
3173 high = i;
3176 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3177 * We bias i by 65536 if it's negative, and hope that's a rare
3178 * case (because it requires a malloc'd bitmap).
3180 if (i < 0)
3181 i += JS_BIT(16);
3182 if (i >= intmap_bitlen) {
3183 if (!intmap &&
3184 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3185 intmap = intmap_space;
3186 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3187 } else {
3188 /* Just grab 8K for the worst-case bitmap. */
3189 intmap_bitlen = JS_BIT(16);
3190 intmap = (jsbitmap *)
3191 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3192 * sizeof(jsbitmap));
3193 if (!intmap) {
3194 JS_ReportOutOfMemory(cx);
3195 return JS_FALSE;
3198 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3200 if (JS_TEST_BIT(intmap, i)) {
3201 switchOp = JSOP_LOOKUPSWITCH;
3202 continue;
3204 JS_SET_BIT(intmap, i);
3207 release:
3208 if (intmap && intmap != intmap_space)
3209 cx->free(intmap);
3210 if (!ok)
3211 return JS_FALSE;
3214 * Compute table length and select lookup instead if overlarge or
3215 * more than half-sparse.
3217 if (switchOp == JSOP_TABLESWITCH) {
3218 tableLength = (uint32)(high - low + 1);
3219 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3220 switchOp = JSOP_LOOKUPSWITCH;
3221 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3223 * Lookup switch supports only atom indexes below 64K limit.
3224 * Conservatively estimate the maximum possible index during
3225 * switch generation and use conditional switch if it exceeds
3226 * the limit.
3228 if (caseCount + cg->atomList.count > JS_BIT(16))
3229 switchOp = JSOP_CONDSWITCH;
3234 * Emit a note with two offsets: first tells total switch code length,
3235 * second tells offset to first JSOP_CASE if condswitch.
3237 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3238 if (noteIndex < 0)
3239 return JS_FALSE;
3241 if (switchOp == JSOP_CONDSWITCH) {
3243 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3245 switchSize = 0;
3246 } else if (switchOp == JSOP_TABLESWITCH) {
3248 * 3 offsets (len, low, high) before the table, 1 per entry.
3250 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3251 } else {
3253 * JSOP_LOOKUPSWITCH:
3254 * 1 offset (len) and 1 atom index (npairs) before the table,
3255 * 1 atom index and 1 jump offset per entry.
3257 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3258 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3262 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3264 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3265 * to emit the immediate operand(s) by which bytecode readers such as
3266 * BuildSpanDepTable discover the length of the switch opcode *before*
3267 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3268 * also important to zero all unknown jump offset immediate operands,
3269 * so they can be converted to span dependencies with null targets to
3270 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3272 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3273 return JS_FALSE;
3275 off = -1;
3276 if (switchOp == JSOP_CONDSWITCH) {
3277 intN caseNoteIndex = -1;
3278 JSBool beforeCases = JS_TRUE;
3280 /* Emit code for evaluating cases and jumping to case statements. */
3281 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3282 pn4 = pn3->pn_left;
3283 if (pn4 && !js_EmitTree(cx, cg, pn4))
3284 return JS_FALSE;
3285 if (caseNoteIndex >= 0) {
3286 /* off is the previous JSOP_CASE's bytecode offset. */
3287 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3288 CG_OFFSET(cg) - off)) {
3289 return JS_FALSE;
3292 if (!pn4) {
3293 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3294 continue;
3296 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3297 if (caseNoteIndex < 0)
3298 return JS_FALSE;
3299 off = EmitJump(cx, cg, JSOP_CASE, 0);
3300 if (off < 0)
3301 return JS_FALSE;
3302 pn3->pn_offset = off;
3303 if (beforeCases) {
3304 uintN noteCount, noteCountDelta;
3306 /* Switch note's second offset is to first JSOP_CASE. */
3307 noteCount = CG_NOTE_COUNT(cg);
3308 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3309 off - top)) {
3310 return JS_FALSE;
3312 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3313 if (noteCountDelta != 0)
3314 caseNoteIndex += noteCountDelta;
3315 beforeCases = JS_FALSE;
3320 * If we didn't have an explicit default (which could fall in between
3321 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3322 * call in the loop above), link the last case to the implicit default
3323 * for the decompiler.
3325 if (!hasDefault &&
3326 caseNoteIndex >= 0 &&
3327 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3328 CG_OFFSET(cg) - off)) {
3329 return JS_FALSE;
3332 /* Emit default even if no explicit default statement. */
3333 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3334 if (defaultOffset < 0)
3335 return JS_FALSE;
3336 } else {
3337 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3339 if (switchOp == JSOP_TABLESWITCH) {
3340 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3341 SET_JUMP_OFFSET(pc, low);
3342 pc += JUMP_OFFSET_LEN;
3343 SET_JUMP_OFFSET(pc, high);
3344 pc += JUMP_OFFSET_LEN;
3347 * Use malloc to avoid arena bloat for programs with many switches.
3348 * We free table if non-null at label out, so all control flow must
3349 * exit this function through goto out or goto bad.
3351 if (tableLength != 0) {
3352 tableSize = (size_t)tableLength * sizeof *table;
3353 table = (JSParseNode **) cx->malloc(tableSize);
3354 if (!table)
3355 return JS_FALSE;
3356 memset(table, 0, tableSize);
3357 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3358 if (pn3->pn_type == TOK_DEFAULT)
3359 continue;
3360 i = JSVAL_TO_INT(pn3->pn_val);
3361 i -= low;
3362 JS_ASSERT((uint32)i < tableLength);
3363 table[i] = pn3;
3366 } else {
3367 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3369 /* Fill in the number of cases. */
3370 SET_INDEX(pc, caseCount);
3371 pc += INDEX_LEN;
3375 * After this point, all control flow involving JSOP_TABLESWITCH
3376 * must set ok and goto out to exit this function. To keep things
3377 * simple, all switchOp cases exit that way.
3379 MUST_FLOW_THROUGH("out");
3380 if (cg->spanDeps) {
3382 * We have already generated at least one big jump so we must
3383 * explicitly add span dependencies for the switch jumps. When
3384 * called below, js_SetJumpOffset can only do it when patching
3385 * the first big jump or when cg->spanDeps is null.
3387 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3388 goto bad;
3391 if (constPropagated) {
3393 * Skip switchOp, as we are not setting jump offsets in the two
3394 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3395 * unless there was an error.
3397 savepc = CG_NEXT(cg);
3398 CG_NEXT(cg) = pc + 1;
3399 if (switchOp == JSOP_TABLESWITCH) {
3400 for (i = 0; i < (jsint)tableLength; i++) {
3401 pn3 = table[i];
3402 if (pn3 &&
3403 (pn4 = pn3->pn_left) != NULL &&
3404 pn4->pn_type == TOK_NAME) {
3405 /* Note a propagated constant with the const's name. */
3406 JS_ASSERT(!pn4->maybeExpr());
3407 ale = cg->atomList.add(cg->compiler, pn4->pn_atom);
3408 if (!ale)
3409 goto bad;
3410 CG_NEXT(cg) = pc;
3411 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3412 ALE_INDEX(ale)) < 0) {
3413 goto bad;
3416 pc += JUMP_OFFSET_LEN;
3418 } else {
3419 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3420 pn4 = pn3->pn_left;
3421 if (pn4 && pn4->pn_type == TOK_NAME) {
3422 /* Note a propagated constant with the const's name. */
3423 JS_ASSERT(!pn4->maybeExpr());
3424 ale = cg->atomList.add(cg->compiler, pn4->pn_atom);
3425 if (!ale)
3426 goto bad;
3427 CG_NEXT(cg) = pc;
3428 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3429 ALE_INDEX(ale)) < 0) {
3430 goto bad;
3433 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3436 CG_NEXT(cg) = savepc;
3440 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3441 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3442 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3443 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3444 pn4 = pn3->pn_right;
3445 ok = js_EmitTree(cx, cg, pn4);
3446 if (!ok)
3447 goto out;
3448 pn3->pn_offset = pn4->pn_offset;
3449 if (pn3->pn_type == TOK_DEFAULT)
3450 off = pn3->pn_offset - top;
3453 if (!hasDefault) {
3454 /* If no default case, offset for default is to end of switch. */
3455 off = CG_OFFSET(cg) - top;
3458 /* We better have set "off" by now. */
3459 JS_ASSERT(off != -1);
3461 /* Set the default offset (to end of switch if no default). */
3462 if (switchOp == JSOP_CONDSWITCH) {
3463 pc = NULL;
3464 JS_ASSERT(defaultOffset != -1);
3465 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3466 off - (defaultOffset - top));
3467 if (!ok)
3468 goto out;
3469 } else {
3470 pc = CG_CODE(cg, top);
3471 ok = js_SetJumpOffset(cx, cg, pc, off);
3472 if (!ok)
3473 goto out;
3474 pc += JUMP_OFFSET_LEN;
3477 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3478 off = CG_OFFSET(cg) - top;
3479 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3480 if (!ok)
3481 goto out;
3483 if (switchOp == JSOP_TABLESWITCH) {
3484 /* Skip over the already-initialized switch bounds. */
3485 pc += 2 * JUMP_OFFSET_LEN;
3487 /* Fill in the jump table, if there is one. */
3488 for (i = 0; i < (jsint)tableLength; i++) {
3489 pn3 = table[i];
3490 off = pn3 ? pn3->pn_offset - top : 0;
3491 ok = js_SetJumpOffset(cx, cg, pc, off);
3492 if (!ok)
3493 goto out;
3494 pc += JUMP_OFFSET_LEN;
3496 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3497 /* Skip over the already-initialized number of cases. */
3498 pc += INDEX_LEN;
3500 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3501 if (pn3->pn_type == TOK_DEFAULT)
3502 continue;
3503 if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom))
3504 goto bad;
3505 ale = cg->atomList.add(cg->compiler, atom);
3506 if (!ale)
3507 goto bad;
3508 SET_INDEX(pc, ALE_INDEX(ale));
3509 pc += INDEX_LEN;
3511 off = pn3->pn_offset - top;
3512 ok = js_SetJumpOffset(cx, cg, pc, off);
3513 if (!ok)
3514 goto out;
3515 pc += JUMP_OFFSET_LEN;
3519 out:
3520 if (table)
3521 cx->free(table);
3522 if (ok) {
3523 ok = js_PopStatementCG(cx, cg);
3525 #if JS_HAS_BLOCK_SCOPE
3526 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3527 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3528 #endif
3530 return ok;
3532 bad:
3533 ok = JS_FALSE;
3534 goto out;
3537 JSBool
3538 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3540 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3541 /* JSOP_GENERATOR must be the first instruction. */
3542 CG_SWITCH_TO_PROLOG(cg);
3543 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3544 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3545 return JS_FALSE;
3546 CG_SWITCH_TO_MAIN(cg);
3547 } else {
3549 * Emit a trace hint opcode only if not in a generator, since generators
3550 * are not yet traced and both want to be the first instruction.
3552 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
3553 return JS_FALSE;
3556 return js_EmitTree(cx, cg, body) &&
3557 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3558 js_NewScriptFromCG(cx, cg);
3561 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3562 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3563 JS_BEGIN_MACRO \
3564 uintN line_ = (line); \
3565 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3566 if (delta_ != 0) { \
3567 /* \
3568 * Encode any change in the current source line number by using \
3569 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3570 * whichever consumes less space. \
3572 * NB: We handle backward line number deltas (possible with for \
3573 * loops where the update part is emitted after the body, but its \
3574 * line number is <= any line number in the body) here by letting \
3575 * unsigned delta_ wrap to a very large number, which triggers a \
3576 * SRC_SETLINE. \
3577 */ \
3578 CG_CURRENT_LINE(cg) = line_; \
3579 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3580 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3581 return JS_FALSE; \
3582 } else { \
3583 do { \
3584 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3585 return JS_FALSE; \
3586 } while (--delta_ != 0); \
3589 JS_END_MACRO
3591 /* A function, so that we avoid macro-bloating all the other callsites. */
3592 static JSBool
3593 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3595 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3596 return JS_TRUE;
3599 static JSBool
3600 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3601 JSParseNode *pn, jsatomid *result)
3603 jsatomid atomIndex;
3604 JSAtomListElement *ale;
3606 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
3607 atomIndex = (jsatomid) UPVAR_FRAME_SLOT(pn->pn_cookie);
3608 } else {
3609 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
3610 if (!ale)
3611 return JS_FALSE;
3612 atomIndex = ALE_INDEX(ale);
3615 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3616 (!(cg->flags & TCF_IN_FUNCTION) || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
3617 CG_SWITCH_TO_PROLOG(cg);
3618 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3619 return JS_FALSE;
3620 EMIT_INDEX_OP(prologOp, atomIndex);
3621 CG_SWITCH_TO_MAIN(cg);
3624 if (result)
3625 *result = atomIndex;
3626 return JS_TRUE;
3629 #if JS_HAS_DESTRUCTURING
3631 typedef JSBool
3632 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3633 JSParseNode *pn);
3635 static JSBool
3636 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3637 JSParseNode *pn)
3639 JS_ASSERT(pn->pn_type == TOK_NAME);
3640 if (!BindNameToSlot(cx, cg, pn))
3641 return JS_FALSE;
3643 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3644 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3647 static JSBool
3648 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3649 JSParseNode *pn)
3651 JSParseNode *pn2, *pn3;
3652 DestructuringDeclEmitter emitter;
3654 if (pn->pn_type == TOK_RB) {
3655 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3656 if (pn2->pn_type == TOK_COMMA)
3657 continue;
3658 emitter = (pn2->pn_type == TOK_NAME)
3659 ? EmitDestructuringDecl
3660 : EmitDestructuringDecls;
3661 if (!emitter(cx, cg, prologOp, pn2))
3662 return JS_FALSE;
3664 } else {
3665 JS_ASSERT(pn->pn_type == TOK_RC);
3666 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3667 pn3 = pn2->pn_right;
3668 emitter = (pn3->pn_type == TOK_NAME)
3669 ? EmitDestructuringDecl
3670 : EmitDestructuringDecls;
3671 if (!emitter(cx, cg, prologOp, pn3))
3672 return JS_FALSE;
3675 return JS_TRUE;
3678 static JSBool
3679 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3681 static JSBool
3682 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3684 jsuint slot;
3687 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3688 * destructuring initialiser-form, call ourselves to handle it, then
3689 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3690 * ending with a JSOP_ENUMELEM or equivalent op.
3692 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3693 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3694 return JS_FALSE;
3695 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3696 return JS_FALSE;
3697 } else {
3698 if (pn->pn_type == TOK_NAME) {
3699 if (!BindNameToSlot(cx, cg, pn))
3700 return JS_FALSE;
3701 if (pn->isConst() && !pn->isInitialized())
3702 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3705 switch (pn->pn_op) {
3706 case JSOP_SETNAME:
3708 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3709 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3710 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3712 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3713 return JS_FALSE;
3714 break;
3716 case JSOP_SETCONST:
3717 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3718 return JS_FALSE;
3719 break;
3721 case JSOP_SETLOCAL:
3722 slot = (jsuint) pn->pn_cookie;
3723 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3724 break;
3726 case JSOP_SETARG:
3727 case JSOP_SETGVAR:
3728 slot = (jsuint) pn->pn_cookie;
3729 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3730 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3731 return JS_FALSE;
3732 break;
3734 default:
3736 ptrdiff_t top;
3738 top = CG_OFFSET(cg);
3739 if (!js_EmitTree(cx, cg, pn))
3740 return JS_FALSE;
3741 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3742 return JS_FALSE;
3743 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3744 return JS_FALSE;
3745 break;
3748 case JSOP_ENUMELEM:
3749 JS_ASSERT(0);
3753 return JS_TRUE;
3757 * Recursive helper for EmitDestructuringOps.
3759 * Given a value to destructure on the stack, walk over an object or array
3760 * initialiser at pn, emitting bytecodes to match property values and store
3761 * them in the lvalues identified by the matched property names.
3763 static JSBool
3764 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3766 jsuint index;
3767 JSParseNode *pn2, *pn3;
3768 JSBool doElemOp;
3770 #ifdef DEBUG
3771 intN stackDepth = cg->stackDepth;
3772 JS_ASSERT(stackDepth != 0);
3773 JS_ASSERT(pn->pn_arity == PN_LIST);
3774 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3775 #endif
3777 if (pn->pn_count == 0) {
3778 /* Emit a DUP;POP sequence for the decompiler. */
3779 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3780 js_Emit1(cx, cg, JSOP_POP) >= 0;
3783 index = 0;
3784 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3786 * Duplicate the value being destructured to use as a reference base.
3787 * If dup is not the first one, annotate it for the decompiler.
3789 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3790 return JS_FALSE;
3791 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3792 return JS_FALSE;
3795 * Now push the property name currently being matched, which is either
3796 * the array initialiser's current index, or the current property name
3797 * "label" on the left of a colon in the object initialiser. Set pn3
3798 * to the lvalue node, which is in the value-initializing position.
3800 doElemOp = JS_TRUE;
3801 if (pn->pn_type == TOK_RB) {
3802 if (!EmitNumberOp(cx, index, cg))
3803 return JS_FALSE;
3804 pn3 = pn2;
3805 } else {
3806 JS_ASSERT(pn->pn_type == TOK_RC);
3807 JS_ASSERT(pn2->pn_type == TOK_COLON);
3808 pn3 = pn2->pn_left;
3809 if (pn3->pn_type == TOK_NUMBER) {
3811 * If we are emitting an object destructuring initialiser,
3812 * annotate the index op with SRC_INITPROP so we know we are
3813 * not decompiling an array initialiser.
3815 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3816 return JS_FALSE;
3817 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3818 return JS_FALSE;
3819 } else {
3820 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3821 pn3->pn_type == TOK_NAME);
3822 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3823 return JS_FALSE;
3824 doElemOp = JS_FALSE;
3826 pn3 = pn2->pn_right;
3829 if (doElemOp) {
3831 * Ok, get the value of the matching property name. This leaves
3832 * that value on top of the value being destructured, so the stack
3833 * is one deeper than when we started.
3835 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3836 return JS_FALSE;
3837 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3840 /* Nullary comma node makes a hole in the array destructurer. */
3841 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3842 JS_ASSERT(pn->pn_type == TOK_RB);
3843 JS_ASSERT(pn2 == pn3);
3844 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3845 return JS_FALSE;
3846 } else {
3847 if (!EmitDestructuringLHS(cx, cg, pn3))
3848 return JS_FALSE;
3851 JS_ASSERT(cg->stackDepth == stackDepth);
3852 ++index;
3855 return JS_TRUE;
3858 static ptrdiff_t
3859 OpToDeclType(JSOp op)
3861 switch (op) {
3862 case JSOP_NOP:
3863 return SRC_DECL_LET;
3864 case JSOP_DEFCONST:
3865 return SRC_DECL_CONST;
3866 case JSOP_DEFVAR:
3867 return SRC_DECL_VAR;
3868 default:
3869 return SRC_DECL_NONE;
3873 static JSBool
3874 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3875 JSParseNode *pn)
3878 * If we're called from a variable declaration, help the decompiler by
3879 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3880 * If the destructuring initialiser is empty, our helper will emit a
3881 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3883 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
3884 return JS_FALSE;
3887 * Call our recursive helper to emit the destructuring assignments and
3888 * related stack manipulations.
3890 return EmitDestructuringOpsHelper(cx, cg, pn);
3893 static JSBool
3894 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3895 JSParseNode *lhs, JSParseNode *rhs)
3897 jsuint depth, limit, i, nslots;
3898 JSParseNode *pn;
3900 depth = limit = (uintN) cg->stackDepth;
3901 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3902 if (limit == JS_BIT(16)) {
3903 js_ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR,
3904 JSMSG_ARRAY_INIT_TOO_BIG);
3905 return JS_FALSE;
3908 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3909 JS_ASSERT(!(pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY));
3910 if (!js_EmitTree(cx, cg, pn))
3911 return JS_FALSE;
3912 ++limit;
3915 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
3916 return JS_FALSE;
3918 i = depth;
3919 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
3920 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3921 JS_ASSERT(i < limit);
3922 jsint slot = AdjustBlockSlot(cx, cg, i);
3923 if (slot < 0)
3924 return JS_FALSE;
3925 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3927 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3928 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3929 return JS_FALSE;
3930 } else {
3931 if (!EmitDestructuringLHS(cx, cg, pn))
3932 return JS_FALSE;
3936 nslots = limit - depth;
3937 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
3938 cg->stackDepth = (uintN) depth;
3939 return JS_TRUE;
3943 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3944 * can emit a group assignment sequence, which results in 0 stack depth delta,
3945 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3947 static JSBool
3948 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3949 JSParseNode *pn, JSOp *pop)
3951 JSParseNode *lhs, *rhs;
3953 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3954 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3955 lhs = pn->pn_left;
3956 rhs = pn->pn_right;
3957 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3958 !(rhs->pn_xflags & PNX_HOLEY) &&
3959 lhs->pn_count <= rhs->pn_count) {
3960 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
3961 return JS_FALSE;
3962 *pop = JSOP_NOP;
3964 return JS_TRUE;
3967 #endif /* JS_HAS_DESTRUCTURING */
3969 static JSBool
3970 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3971 JSBool inLetHead, ptrdiff_t *headNoteIndex)
3973 bool let, forInVar, first;
3974 #if JS_HAS_BLOCK_SCOPE
3975 bool forInLet, popScope;
3976 JSStmtInfo *stmt, *scopeStmt;
3977 #endif
3978 ptrdiff_t off, noteIndex, tmp;
3979 JSParseNode *pn2, *pn3, *next;
3980 JSOp op;
3981 jsatomid atomIndex;
3982 uintN oldflags;
3984 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3985 *headNoteIndex = -1;
3988 * Let blocks and expressions have a parenthesized head in which the new
3989 * scope is not yet open. Initializer evaluation uses the parent node's
3990 * lexical scope. If popScope is true below, then we hide the top lexical
3991 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3992 * it won't find any names in the new let block.
3994 * The same goes for let declarations in the head of any kind of for loop.
3995 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3996 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3997 * in the containing scope, and puts x in the loop body's scope.
3999 let = (pn->pn_op == JSOP_NOP);
4000 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
4001 #if JS_HAS_BLOCK_SCOPE
4002 forInLet = let && forInVar;
4003 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
4004 if (popScope) {
4005 stmt = cg->topStmt;
4006 scopeStmt = cg->topScopeStmt;
4008 # ifdef __GNUC__
4009 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
4010 # endif
4011 JS_ASSERT(!popScope || let);
4012 #endif
4014 off = noteIndex = -1;
4015 for (pn2 = pn->pn_head; ; pn2 = next) {
4016 first = pn2 == pn->pn_head;
4017 next = pn2->pn_next;
4019 if (pn2->pn_type != TOK_NAME) {
4020 #if JS_HAS_DESTRUCTURING
4021 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
4023 * Emit variable binding ops, but not destructuring ops.
4024 * The parser (see Variables, jsparse.c) has ensured that
4025 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4026 * and that case will emit the destructuring code only after
4027 * emitting an enumerating opcode and a branch that tests
4028 * whether the enumeration ended.
4030 JS_ASSERT(forInVar);
4031 JS_ASSERT(pn->pn_count == 1);
4032 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
4033 return JS_FALSE;
4034 break;
4036 #endif
4039 * A destructuring initialiser assignment preceded by var will
4040 * never occur to the left of 'in' in a for-in loop. As with 'for
4041 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4042 * i' to be hoisted out of the loop.
4044 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
4045 JS_ASSERT(!forInVar);
4048 * To allow the front end to rewrite var f = x; as f = x; when a
4049 * function f(){} precedes the var, detect simple name assignment
4050 * here and initialize the name.
4052 #if !JS_HAS_DESTRUCTURING
4053 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4054 #else
4055 if (pn2->pn_left->pn_type == TOK_NAME)
4056 #endif
4058 pn3 = pn2->pn_right;
4059 pn2 = pn2->pn_left;
4060 goto do_name;
4063 #if JS_HAS_DESTRUCTURING
4064 if (pn->pn_count == 1) {
4066 * If this is the only destructuring assignment in the list,
4067 * try to optimize to a group assignment. If we're in a let
4068 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4069 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4071 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4072 op = JSOP_POP;
4073 if (!MaybeEmitGroupAssignment(cx, cg,
4074 inLetHead ? JSOP_POP : PN_OP(pn),
4075 pn2, &op)) {
4076 return JS_FALSE;
4078 if (op == JSOP_NOP) {
4079 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4080 break;
4084 pn3 = pn2->pn_left;
4085 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4086 return JS_FALSE;
4088 if (!js_EmitTree(cx, cg, pn2->pn_right))
4089 return JS_FALSE;
4092 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4093 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4094 * we will emit at the bottom of this function.
4096 if (!EmitDestructuringOps(cx, cg,
4097 inLetHead ? JSOP_POP : PN_OP(pn),
4098 pn3)) {
4099 return JS_FALSE;
4101 goto emit_note_pop;
4102 #endif
4106 * Load initializer early to share code above that jumps to do_name.
4107 * NB: if this var redeclares an existing binding, then pn2 is linked
4108 * on its definition's use-chain and pn_expr has been overlayed with
4109 * pn_lexdef.
4111 pn3 = pn2->maybeExpr();
4113 do_name:
4114 if (!BindNameToSlot(cx, cg, pn2))
4115 return JS_FALSE;
4117 op = PN_OP(pn2);
4118 if (op == JSOP_ARGUMENTS) {
4119 /* JSOP_ARGUMENTS => no initializer */
4120 JS_ASSERT(!pn3 && !let);
4121 pn3 = NULL;
4122 #ifdef __GNUC__
4123 atomIndex = 0; /* quell GCC overwarning */
4124 #endif
4125 } else {
4126 JS_ASSERT(op != JSOP_CALLEE);
4127 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE || !let);
4128 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4129 return JS_FALSE;
4131 if (pn3) {
4132 JS_ASSERT(!forInVar);
4133 if (op == JSOP_SETNAME) {
4134 JS_ASSERT(!let);
4135 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4137 if (pn->pn_op == JSOP_DEFCONST &&
4138 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4139 return JS_FALSE;
4142 #if JS_HAS_BLOCK_SCOPE
4143 /* Evaluate expr in the outer lexical scope if requested. */
4144 if (popScope) {
4145 cg->topStmt = stmt->down;
4146 cg->topScopeStmt = scopeStmt->downScope;
4148 #endif
4150 oldflags = cg->flags;
4151 cg->flags &= ~TCF_IN_FOR_INIT;
4152 if (!js_EmitTree(cx, cg, pn3))
4153 return JS_FALSE;
4154 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4156 #if JS_HAS_BLOCK_SCOPE
4157 if (popScope) {
4158 cg->topStmt = stmt;
4159 cg->topScopeStmt = scopeStmt;
4161 #endif
4166 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4167 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4168 * a TOK_SEQ node to make the two statements appear as one. Therefore
4169 * if this declaration is part of a for-in loop head, we do not need to
4170 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4171 * js_EmitTree, will annotate appropriately.
4173 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4174 if (forInVar) {
4175 JS_ASSERT(pn->pn_count == 1);
4176 JS_ASSERT(!pn3);
4177 break;
4180 if (first &&
4181 !inLetHead &&
4182 js_NewSrcNote2(cx, cg, SRC_DECL,
4183 (pn->pn_op == JSOP_DEFCONST)
4184 ? SRC_DECL_CONST
4185 : (pn->pn_op == JSOP_DEFVAR)
4186 ? SRC_DECL_VAR
4187 : SRC_DECL_LET) < 0) {
4188 return JS_FALSE;
4190 if (op == JSOP_ARGUMENTS) {
4191 if (js_Emit1(cx, cg, op) < 0)
4192 return JS_FALSE;
4193 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
4194 EMIT_UINT16_IMM_OP(op, atomIndex);
4195 } else {
4196 EMIT_INDEX_OP(op, atomIndex);
4199 #if JS_HAS_DESTRUCTURING
4200 emit_note_pop:
4201 #endif
4202 tmp = CG_OFFSET(cg);
4203 if (noteIndex >= 0) {
4204 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4205 return JS_FALSE;
4207 if (!next)
4208 break;
4209 off = tmp;
4210 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4211 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4212 return JS_FALSE;
4215 /* If this is a let head, emit and return a srcnote on the pop. */
4216 if (inLetHead) {
4217 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4218 if (*headNoteIndex < 0)
4219 return JS_FALSE;
4220 if (!(pn->pn_xflags & PNX_POPVAR))
4221 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4224 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4227 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4228 static JSBool
4229 GettableNoteForNextOp(JSCodeGenerator *cg)
4231 ptrdiff_t offset, target;
4232 jssrcnote *sn, *end;
4234 offset = 0;
4235 target = CG_OFFSET(cg);
4236 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4237 sn = SN_NEXT(sn)) {
4238 if (offset == target && SN_IS_GETTABLE(sn))
4239 return JS_TRUE;
4240 offset += SN_DELTA(sn);
4242 return JS_FALSE;
4244 #endif
4246 /* Top-level named functions need a nop for decompilation. */
4247 static JSBool
4248 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4250 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4251 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4254 static bool
4255 EmitNewInit(JSContext *cx, JSCodeGenerator *cg, JSProtoKey key, JSParseNode *pn, int sharpnum)
4257 if (js_Emit2(cx, cg, JSOP_NEWINIT, (jsbytecode) key) < 0)
4258 return false;
4259 #if JS_HAS_SHARP_VARS
4260 if (cg->hasSharps()) {
4261 if (pn->pn_count != 0)
4262 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4263 if (sharpnum >= 0)
4264 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
4265 } else {
4266 JS_ASSERT(sharpnum < 0);
4268 #endif
4269 return true;
4272 static bool
4273 EmitEndInit(JSContext *cx, JSCodeGenerator *cg, uint32 count)
4275 #if JS_HAS_SHARP_VARS
4276 /* Emit an op for sharp array cleanup and decompilation. */
4277 if (cg->hasSharps() && count != 0)
4278 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4279 #endif
4280 return js_Emit1(cx, cg, JSOP_ENDINIT) >= 0;
4283 /* See the SRC_FOR source note offsetBias comments later in this file. */
4284 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4285 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4287 JSBool
4288 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4290 JSBool ok, useful, wantval;
4291 JSStmtInfo *stmt, stmtInfo;
4292 ptrdiff_t top, off, tmp, beq, jmp;
4293 JSParseNode *pn2, *pn3;
4294 JSAtom *atom;
4295 JSAtomListElement *ale;
4296 jsatomid atomIndex;
4297 uintN index;
4298 ptrdiff_t noteIndex;
4299 JSSrcNoteType noteType;
4300 jsbytecode *pc;
4301 JSOp op;
4302 JSTokenType type;
4303 uint32 argc;
4304 #if JS_HAS_SHARP_VARS
4305 jsint sharpnum;
4306 #endif
4308 JS_CHECK_RECURSION(cx, return JS_FALSE);
4310 ok = JS_TRUE;
4311 cg->emitLevel++;
4312 pn->pn_offset = top = CG_OFFSET(cg);
4314 /* Emit notes to tell the current bytecode's source line number. */
4315 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4317 switch (pn->pn_type) {
4318 case TOK_FUNCTION:
4320 JSFunction *fun;
4321 uintN slot;
4323 #if JS_HAS_XML_SUPPORT
4324 if (pn->pn_arity == PN_NULLARY) {
4325 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4326 return JS_FALSE;
4327 break;
4329 #endif
4331 fun = (JSFunction *) pn->pn_funbox->object;
4332 JS_ASSERT(FUN_INTERPRETED(fun));
4333 if (fun->u.i.script) {
4335 * This second pass is needed to emit JSOP_NOP with a source note
4336 * for the already-emitted function definition prolog opcode. See
4337 * comments in the TOK_LC case.
4339 JS_ASSERT(pn->pn_op == JSOP_NOP);
4340 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
4341 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4342 return JS_FALSE;
4343 break;
4346 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
4347 pn->pn_defn ||
4348 (!pn->pn_used && !pn->isTopLevel()) ||
4349 (fun->flags & JSFUN_LAMBDA));
4351 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4352 FUN_KIND(fun) == JSFUN_INTERPRETED);
4354 /* Generate code for the function's body. */
4355 void *cg2mark = JS_ARENA_MARK(cg->codePool);
4356 void *cg2space;
4357 JS_ARENA_ALLOCATE_TYPE(cg2space, JSCodeGenerator, cg->codePool);
4358 if (!cg2space) {
4359 js_ReportOutOfScriptQuota(cx);
4360 return JS_FALSE;
4362 JSCodeGenerator *cg2 =
4363 new (cg2space) JSCodeGenerator(cg->compiler,
4364 cg->codePool, cg->notePool,
4365 pn->pn_pos.begin.lineno);
4366 cg2->flags = pn->pn_funbox->tcflags | TCF_IN_FUNCTION;
4367 #if JS_HAS_SHARP_VARS
4368 if (cg2->flags & TCF_HAS_SHARPS) {
4369 cg2->sharpSlotBase = fun->sharpSlotBase(cx);
4370 if (cg2->sharpSlotBase < 0)
4371 return JS_FALSE;
4373 #endif
4374 cg2->fun = fun;
4375 cg2->funbox = pn->pn_funbox;
4376 cg2->parent = cg;
4379 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4380 * bits and to reserve the all-ones value, thereby reserving the magic
4381 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4383 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4384 cg2->staticLevel = cg->staticLevel + 1;
4386 /* We measured the max scope depth when we parsed the function. */
4387 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = uint16(-1));
4388 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4389 pn = NULL;
4391 cg2->~JSCodeGenerator();
4392 JS_ARENA_RELEASE(cg->codePool, cg2mark);
4393 cg2 = NULL;
4394 if (!pn)
4395 return JS_FALSE;
4397 /* Make the function object a literal in the outer script's pool. */
4398 index = cg->objectList.index(pn->pn_funbox);
4400 /* Emit a bytecode pointing to the closure object in its immediate. */
4401 op = PN_OP(pn);
4402 if (op != JSOP_NOP) {
4403 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4404 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4405 return JS_FALSE;
4407 EMIT_INDEX_OP(op, index);
4408 break;
4412 * For a script we emit the code as we parse. Thus the bytecode for
4413 * top-level functions should go in the prolog to predefine their
4414 * names in the variable object before the already-generated main code
4415 * is executed. This extra work for top-level scripts is not necessary
4416 * when we emit the code for a function. It is fully parsed prior to
4417 * invocation of the emitter and calls to js_EmitTree for function
4418 * definitions can be scheduled before generating the rest of code.
4420 if (!(cg->flags & TCF_IN_FUNCTION)) {
4421 JS_ASSERT(!cg->topStmt);
4422 CG_SWITCH_TO_PROLOG(cg);
4423 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4424 EMIT_INDEX_OP(op, index);
4425 CG_SWITCH_TO_MAIN(cg);
4427 /* Emit NOP for the decompiler. */
4428 if (!EmitFunctionDefNop(cx, cg, index))
4429 return JS_FALSE;
4430 } else {
4431 #ifdef DEBUG
4432 JSLocalKind localKind =
4433 #endif
4434 js_LookupLocal(cx, cg->fun, fun->atom, &slot);
4435 JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
4436 JS_ASSERT(index < JS_BIT(20));
4437 pn->pn_index = index;
4438 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4439 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4440 return JS_FALSE;
4442 break;
4445 case TOK_ARGSBODY:
4446 ok = js_EmitTree(cx, cg, pn->last());
4447 break;
4449 case TOK_UPVARS:
4450 JS_ASSERT(cg->lexdeps.count == 0);
4451 JS_ASSERT(pn->pn_names.count != 0);
4452 cg->lexdeps = pn->pn_names;
4453 ok = js_EmitTree(cx, cg, pn->pn_tree);
4454 break;
4456 case TOK_IF:
4457 /* Initialize so we can detect else-if chains and avoid recursion. */
4458 stmtInfo.type = STMT_IF;
4459 beq = jmp = -1;
4460 noteIndex = -1;
4462 if_again:
4463 /* Emit code for the condition before pushing stmtInfo. */
4464 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4465 return JS_FALSE;
4466 top = CG_OFFSET(cg);
4467 if (stmtInfo.type == STMT_IF) {
4468 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4469 } else {
4471 * We came here from the goto further below that detects else-if
4472 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4473 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4474 * to help the decompiler. Actually, we need two offsets, one for
4475 * decompiling any else clause and the second for decompiling an
4476 * else-if chain without bracing, overindenting, or incorrectly
4477 * scoping let declarations.
4479 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4480 stmtInfo.type = STMT_IF;
4481 stmtInfo.update = top;
4482 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4483 return JS_FALSE;
4484 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - jmp))
4485 return JS_FALSE;
4488 /* Emit an annotated branch-if-false around the then part. */
4489 pn3 = pn->pn_kid3;
4490 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4491 if (noteIndex < 0)
4492 return JS_FALSE;
4493 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4494 if (beq < 0)
4495 return JS_FALSE;
4497 /* Emit code for the then and optional else parts. */
4498 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4499 return JS_FALSE;
4500 if (pn3) {
4501 /* Modify stmtInfo so we know we're in the else part. */
4502 stmtInfo.type = STMT_ELSE;
4505 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4506 * around the else part. The js_PopStatementCG call at the bottom
4507 * of this switch case will fix up the backpatch chain linked from
4508 * stmtInfo.breaks.
4510 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4511 if (jmp < 0)
4512 return JS_FALSE;
4514 /* Ensure the branch-if-false comes here, then emit the else. */
4515 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4516 if (pn3->pn_type == TOK_IF) {
4517 pn = pn3;
4518 goto if_again;
4521 if (!js_EmitTree(cx, cg, pn3))
4522 return JS_FALSE;
4525 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4526 * the decompiler's benefit. We can't just "back up" from the pc
4527 * of the else clause, because we don't know whether an extended
4528 * jump was required to leap from the end of the then clause over
4529 * the else clause.
4531 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4532 return JS_FALSE;
4533 } else {
4534 /* No else part, fixup the branch-if-false to come here. */
4535 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4537 ok = js_PopStatementCG(cx, cg);
4538 break;
4540 case TOK_SWITCH:
4541 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4542 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4543 break;
4545 case TOK_WHILE:
4547 * Minimize bytecodes issued for one or more iterations by jumping to
4548 * the condition below the body and closing the loop if the condition
4549 * is true with a backward branch. For iteration count i:
4551 * i test at the top test at the bottom
4552 * = =============== ==================
4553 * 0 ifeq-pass goto; ifne-fail
4554 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4555 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4556 * . . .
4557 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4559 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4560 * test at the top. When JSParseNode trees were added during the ES3
4561 * work (1998-9), the code generation scheme was not optimized, and
4562 * the decompiler continued to take advantage of the branch and jump
4563 * that bracketed the body. But given the SRC_WHILE note, it is easy
4564 * to support the more efficient scheme.
4566 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4567 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4568 if (noteIndex < 0)
4569 return JS_FALSE;
4570 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4571 if (jmp < 0)
4572 return JS_FALSE;
4573 top = js_Emit1(cx, cg, JSOP_TRACE);
4574 if (top < 0)
4575 return JS_FALSE;
4576 if (!js_EmitTree(cx, cg, pn->pn_right))
4577 return JS_FALSE;
4578 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4579 if (!js_EmitTree(cx, cg, pn->pn_left))
4580 return JS_FALSE;
4581 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4582 if (beq < 0)
4583 return JS_FALSE;
4584 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4585 return JS_FALSE;
4586 ok = js_PopStatementCG(cx, cg);
4587 break;
4589 case TOK_DO:
4590 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4591 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4592 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4593 return JS_FALSE;
4595 /* Compile the loop body. */
4596 top = js_Emit1(cx, cg, JSOP_TRACE);
4597 if (top < 0)
4598 return JS_FALSE;
4599 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4600 if (!js_EmitTree(cx, cg, pn->pn_left))
4601 return JS_FALSE;
4603 /* Set loop and enclosing label update offsets, for continue. */
4604 stmt = &stmtInfo;
4605 do {
4606 stmt->update = CG_OFFSET(cg);
4607 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4609 /* Compile the loop condition, now that continues know where to go. */
4610 if (!js_EmitTree(cx, cg, pn->pn_right))
4611 return JS_FALSE;
4614 * Since we use JSOP_IFNE for other purposes as well as for do-while
4615 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4616 * and the decompiler must get that delta and decompile recursively.
4618 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4619 if (beq < 0)
4620 return JS_FALSE;
4621 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4622 return JS_FALSE;
4623 ok = js_PopStatementCG(cx, cg);
4624 break;
4626 case TOK_FOR:
4627 beq = 0; /* suppress gcc warnings */
4628 jmp = -1;
4629 pn2 = pn->pn_left;
4630 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4632 if (pn2->pn_type == TOK_IN) {
4633 /* Set stmtInfo type for later testing. */
4634 stmtInfo.type = STMT_FOR_IN_LOOP;
4637 * If the left part is 'var x', emit code to define x if necessary
4638 * using a prolog opcode, but do not emit a pop. If the left part
4639 * is 'var x = i', emit prolog code to define x if necessary; then
4640 * emit code to evaluate i, assign the result to x, and pop the
4641 * result off the stack.
4643 * All the logic to do this is implemented in the outer switch's
4644 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4646 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4647 * called here will generate the proper note for the assignment
4648 * op that sets x = i, hoisting the initialized var declaration
4649 * out of the loop: 'var x = i; for (x in o) ...'.
4651 * In the 'for (var x in o) ...' case, nothing but the prolog op
4652 * (if needed) should be generated here, we must emit the note
4653 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4654 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4656 * A 'for (let x = i in o)' loop must not be hoisted, since in
4657 * this form the let variable is scoped by the loop body (but not
4658 * the head). The initializer expression i must be evaluated for
4659 * any side effects. So we hoist only i in the let case.
4661 pn3 = pn2->pn_left;
4662 type = PN_TYPE(pn3);
4663 cg->flags |= TCF_IN_FOR_INIT;
4664 if (TOKEN_TYPE_IS_DECL(type) && !js_EmitTree(cx, cg, pn3))
4665 return JS_FALSE;
4666 cg->flags &= ~TCF_IN_FOR_INIT;
4668 /* Compile the object expression to the right of 'in'. */
4669 if (!js_EmitTree(cx, cg, pn2->pn_right))
4670 return JS_FALSE;
4673 * Emit a bytecode to convert top of stack value to the iterator
4674 * object depending on the loop variant (for-in, for-each-in, or
4675 * destructuring for-in).
4677 JS_ASSERT(pn->pn_op == JSOP_ITER);
4678 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4679 return JS_FALSE;
4681 /* Annotate so the decompiler can find the loop-closing jump. */
4682 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4683 if (noteIndex < 0)
4684 return JS_FALSE;
4687 * Jump down to the loop condition to minimize overhead assuming at
4688 * least one iteration, as the other loop forms do.
4690 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4691 if (jmp < 0)
4692 return JS_FALSE;
4694 top = CG_OFFSET(cg);
4695 SET_STATEMENT_TOP(&stmtInfo, top);
4696 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4697 return JS_FALSE;
4699 #ifdef DEBUG
4700 intN loopDepth = cg->stackDepth;
4701 #endif
4704 * Compile a JSOP_FOR* bytecode based on the left hand side.
4706 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4707 * or similar, to signify assignment, rather than declaration, to
4708 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4709 * parameter and emits the appropriate source note, defaulting to
4710 * assignment, so JSOP_SETNAME is not critical here; many similar
4711 * ops could be used -- just not JSOP_NOP (which means 'let').
4713 op = JSOP_SETNAME;
4714 switch (type) {
4715 #if JS_HAS_BLOCK_SCOPE
4716 case TOK_LET:
4717 #endif
4718 case TOK_VAR:
4719 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4720 pn3 = pn3->pn_head;
4721 #if JS_HAS_DESTRUCTURING
4722 if (pn3->pn_type == TOK_ASSIGN) {
4723 pn3 = pn3->pn_left;
4724 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4726 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4727 op = PN_OP(pn2->pn_left);
4728 goto destructuring_for;
4730 #else
4731 JS_ASSERT(pn3->pn_type == TOK_NAME);
4732 #endif
4733 /* FALL THROUGH */
4735 case TOK_NAME:
4737 * Always annotate JSOP_FORLOCAL if given input of the form
4738 * 'for (let x in * o)' -- the decompiler must not hoist the
4739 * 'let x' out of the loop head, or x will be bound in the
4740 * wrong scope. Likewise, but in this case only for the sake
4741 * of higher decompilation fidelity only, do not hoist 'var x'
4742 * when given 'for (var x in o)'.
4744 if ((
4745 #if JS_HAS_BLOCK_SCOPE
4746 type == TOK_LET ||
4747 #endif
4748 (type == TOK_VAR && !pn3->maybeExpr())) &&
4749 js_NewSrcNote2(cx, cg, SRC_DECL,
4750 (type == TOK_VAR)
4751 ? SRC_DECL_VAR
4752 : SRC_DECL_LET) < 0) {
4753 return JS_FALSE;
4755 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4756 op = PN_OP(pn3);
4757 switch (op) {
4758 case JSOP_GETARG: /* FALL THROUGH */
4759 case JSOP_SETARG: op = JSOP_FORARG; break;
4760 case JSOP_GETGVAR: /* FALL THROUGH */
4761 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4762 case JSOP_GETLOCAL: /* FALL THROUGH */
4763 case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4764 default: JS_ASSERT(0);
4766 } else {
4767 pn3->pn_op = JSOP_FORNAME;
4768 if (!BindNameToSlot(cx, cg, pn3))
4769 return JS_FALSE;
4770 op = PN_OP(pn3);
4772 if (pn3->isConst()) {
4773 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
4774 JSMSG_BAD_FOR_LEFTSIDE);
4775 return JS_FALSE;
4777 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4778 atomIndex = (jsatomid) pn3->pn_cookie;
4779 EMIT_UINT16_IMM_OP(op, atomIndex);
4780 } else {
4781 if (!EmitAtomOp(cx, pn3, op, cg))
4782 return JS_FALSE;
4784 break;
4786 case TOK_DOT:
4788 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4789 * has no side effects.
4791 useful = JS_FALSE;
4792 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
4793 return JS_FALSE;
4794 if (!useful) {
4795 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
4796 return JS_FALSE;
4797 break;
4799 /* FALL THROUGH */
4801 #if JS_HAS_DESTRUCTURING
4802 destructuring_for:
4803 #endif
4804 default:
4805 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
4806 return JS_FALSE;
4807 JS_ASSERT(cg->stackDepth >= 3);
4809 #if JS_HAS_DESTRUCTURING
4810 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4811 if (!EmitDestructuringOps(cx, cg, op, pn3))
4812 return JS_FALSE;
4813 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4814 return JS_FALSE;
4815 } else
4816 #endif
4817 if (pn3->pn_type == TOK_LP) {
4818 JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4819 if (!js_EmitTree(cx, cg, pn3))
4820 return JS_FALSE;
4821 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4822 return JS_FALSE;
4823 } else
4824 #if JS_HAS_XML_SUPPORT
4825 if (pn3->pn_type == TOK_UNARYOP) {
4826 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4827 if (!js_EmitTree(cx, cg, pn3))
4828 return JS_FALSE;
4829 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4830 return JS_FALSE;
4831 } else
4832 #endif
4833 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4834 return JS_FALSE;
4835 break;
4838 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4839 JS_ASSERT(cg->stackDepth == loopDepth);
4841 /* Set the first srcnote offset so we can find the start of the loop body. */
4842 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - jmp))
4843 return JS_FALSE;
4845 /* Emit code for the loop body. */
4846 if (!js_EmitTree(cx, cg, pn->pn_right))
4847 return JS_FALSE;
4849 /* Set loop and enclosing "update" offsets, for continue. */
4850 stmt = &stmtInfo;
4851 do {
4852 stmt->update = CG_OFFSET(cg);
4853 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4856 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4858 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4859 if (js_Emit1(cx, cg, JSOP_NEXTITER) < 0)
4860 return JS_FALSE;
4861 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4862 if (beq < 0)
4863 return JS_FALSE;
4865 /* Set the second srcnote offset so we can find the closing jump. */
4866 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
4867 return JS_FALSE;
4868 } else {
4869 /* C-style for (init; cond; update) ... loop. */
4870 op = JSOP_POP;
4871 pn3 = pn2->pn_kid1;
4872 if (!pn3) {
4873 /* No initializer: emit an annotated nop for the decompiler. */
4874 op = JSOP_NOP;
4875 } else {
4876 cg->flags |= TCF_IN_FOR_INIT;
4877 #if JS_HAS_DESTRUCTURING
4878 if (pn3->pn_type == TOK_ASSIGN &&
4879 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4880 return JS_FALSE;
4882 #endif
4883 if (op == JSOP_POP) {
4884 if (!js_EmitTree(cx, cg, pn3))
4885 return JS_FALSE;
4886 if (TOKEN_TYPE_IS_DECL(pn3->pn_type)) {
4888 * Check whether a destructuring-initialized var decl
4889 * was optimized to a group assignment. If so, we do
4890 * not need to emit a pop below, so switch to a nop,
4891 * just for the decompiler.
4893 JS_ASSERT(pn3->pn_arity == PN_LIST);
4894 if (pn3->pn_xflags & PNX_GROUPINIT)
4895 op = JSOP_NOP;
4898 cg->flags &= ~TCF_IN_FOR_INIT;
4902 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4903 * Use tmp to hold the biased srcnote "top" offset, which differs
4904 * from the top local variable by the length of the JSOP_GOTO{,X}
4905 * emitted in between tmp and top if this loop has a condition.
4907 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4908 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
4909 return JS_FALSE;
4910 tmp = CG_OFFSET(cg);
4912 if (pn2->pn_kid2) {
4913 /* Goto the loop condition, which branches back to iterate. */
4914 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4915 if (jmp < 0)
4916 return JS_FALSE;
4919 top = CG_OFFSET(cg);
4920 SET_STATEMENT_TOP(&stmtInfo, top);
4922 /* Emit code for the loop body. */
4923 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4924 return JS_FALSE;
4925 if (!js_EmitTree(cx, cg, pn->pn_right))
4926 return JS_FALSE;
4928 /* Set the second note offset so we can find the update part. */
4929 JS_ASSERT(noteIndex != -1);
4930 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4931 CG_OFFSET(cg) - tmp)) {
4932 return JS_FALSE;
4935 /* Set loop and enclosing "update" offsets, for continue. */
4936 stmt = &stmtInfo;
4937 do {
4938 stmt->update = CG_OFFSET(cg);
4939 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4941 /* Check for update code to do before the condition (if any). */
4942 pn3 = pn2->pn_kid3;
4943 if (pn3) {
4944 op = JSOP_POP;
4945 #if JS_HAS_DESTRUCTURING
4946 if (pn3->pn_type == TOK_ASSIGN &&
4947 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4948 return JS_FALSE;
4950 #endif
4951 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
4952 return JS_FALSE;
4954 /* Always emit the POP or NOP, to help the decompiler. */
4955 if (js_Emit1(cx, cg, op) < 0)
4956 return JS_FALSE;
4958 /* Restore the absolute line number for source note readers. */
4959 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4960 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4961 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4962 return JS_FALSE;
4963 CG_CURRENT_LINE(cg) = (uintN) off;
4967 /* Set the first note offset so we can find the loop condition. */
4968 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
4969 CG_OFFSET(cg) - tmp)) {
4970 return JS_FALSE;
4973 if (pn2->pn_kid2) {
4974 /* Fix up the goto from top to target the loop condition. */
4975 JS_ASSERT(jmp >= 0);
4976 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4978 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
4979 return JS_FALSE;
4982 /* The third note offset helps us find the loop-closing jump. */
4983 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
4984 CG_OFFSET(cg) - tmp)) {
4985 return JS_FALSE;
4988 if (pn2->pn_kid2) {
4989 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4990 if (beq < 0)
4991 return JS_FALSE;
4992 } else {
4993 /* No loop condition -- emit the loop-closing jump. */
4994 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4995 if (jmp < 0)
4996 return JS_FALSE;
5000 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5001 if (!js_PopStatementCG(cx, cg))
5002 return JS_FALSE;
5004 if (pn2->pn_type == TOK_IN) {
5006 * JSOP_ENDITER must have a slot to save an exception thrown from
5007 * the body of for-in loop when closing the iterator object, and
5008 * fortunately it does: the slot that was set by JSOP_NEXTITER to
5009 * the return value of iterator.next().
5011 JS_ASSERT(js_CodeSpec[JSOP_ENDITER].nuses == 2);
5012 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
5013 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
5014 return JS_FALSE;
5017 break;
5019 case TOK_BREAK:
5020 stmt = cg->topStmt;
5021 atom = pn->pn_atom;
5022 if (atom) {
5023 ale = cg->atomList.add(cg->compiler, atom);
5024 if (!ale)
5025 return JS_FALSE;
5026 while (stmt->type != STMT_LABEL || stmt->label != atom)
5027 stmt = stmt->down;
5028 noteType = SRC_BREAK2LABEL;
5029 } else {
5030 ale = NULL;
5031 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
5032 stmt = stmt->down;
5033 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
5036 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
5037 return JS_FALSE;
5038 break;
5040 case TOK_CONTINUE:
5041 stmt = cg->topStmt;
5042 atom = pn->pn_atom;
5043 if (atom) {
5044 /* Find the loop statement enclosed by the matching label. */
5045 JSStmtInfo *loop = NULL;
5046 ale = cg->atomList.add(cg->compiler, atom);
5047 if (!ale)
5048 return JS_FALSE;
5049 while (stmt->type != STMT_LABEL || stmt->label != atom) {
5050 if (STMT_IS_LOOP(stmt))
5051 loop = stmt;
5052 stmt = stmt->down;
5054 stmt = loop;
5055 noteType = SRC_CONT2LABEL;
5056 } else {
5057 ale = NULL;
5058 while (!STMT_IS_LOOP(stmt))
5059 stmt = stmt->down;
5060 noteType = SRC_CONTINUE;
5063 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
5064 return JS_FALSE;
5065 break;
5067 case TOK_WITH:
5068 if (!js_EmitTree(cx, cg, pn->pn_left))
5069 return JS_FALSE;
5070 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
5071 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
5072 return JS_FALSE;
5073 if (!js_EmitTree(cx, cg, pn->pn_right))
5074 return JS_FALSE;
5075 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
5076 return JS_FALSE;
5077 ok = js_PopStatementCG(cx, cg);
5078 break;
5080 case TOK_TRY:
5082 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5083 intN depth;
5084 JSParseNode *lastCatch;
5086 catchJump = -1;
5089 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5090 * for later fixup.
5092 * When a finally block is active (STMT_FINALLY in our tree context),
5093 * non-local jumps (including jumps-over-catches) result in a GOSUB
5094 * being written into the bytecode stream and fixed-up later (c.f.
5095 * EmitBackPatchOp and BackPatch).
5097 js_PushStatement(cg, &stmtInfo,
5098 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5099 CG_OFFSET(cg));
5102 * Since an exception can be thrown at any place inside the try block,
5103 * we need to restore the stack and the scope chain before we transfer
5104 * the control to the exception handler.
5106 * For that we store in a try note associated with the catch or
5107 * finally block the stack depth upon the try entry. The interpreter
5108 * uses this depth to properly unwind the stack and the scope chain.
5110 depth = cg->stackDepth;
5112 /* Mark try location for decompilation, then emit try block. */
5113 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5114 return JS_FALSE;
5115 tryStart = CG_OFFSET(cg);
5116 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5117 return JS_FALSE;
5118 JS_ASSERT(depth == cg->stackDepth);
5120 /* GOSUB to finally, if present. */
5121 if (pn->pn_kid3) {
5122 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5123 return JS_FALSE;
5124 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5125 if (jmp < 0)
5126 return JS_FALSE;
5129 /* Emit (hidden) jump over catch and/or finally. */
5130 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5131 return JS_FALSE;
5132 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5133 if (jmp < 0)
5134 return JS_FALSE;
5136 tryEnd = CG_OFFSET(cg);
5138 /* If this try has a catch block, emit it. */
5139 pn2 = pn->pn_kid2;
5140 lastCatch = NULL;
5141 if (pn2) {
5142 jsint count = 0; /* previous catch block's population */
5145 * The emitted code for a catch block looks like:
5147 * [throwing] only if 2nd+ catch block
5148 * [leaveblock] only if 2nd+ catch block
5149 * enterblock with SRC_CATCH
5150 * exception
5151 * [dup] only if catchguard
5152 * setlocalpop <slot> or destructuring code
5153 * [< catchguard code >] if there's a catchguard
5154 * [ifeq <offset to next catch block>] " "
5155 * [pop] only if catchguard
5156 * < catch block contents >
5157 * leaveblock
5158 * goto <end of catch blocks> non-local; finally applies
5160 * If there's no catch block without a catchguard, the last
5161 * <offset to next catch block> points to rethrow code. This
5162 * code will [gosub] to the finally code if appropriate, and is
5163 * also used for the catch-all trynote for capturing exceptions
5164 * thrown from catch{} blocks.
5166 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5167 ptrdiff_t guardJump, catchNote;
5169 JS_ASSERT(cg->stackDepth == depth);
5170 guardJump = GUARDJUMP(stmtInfo);
5171 if (guardJump != -1) {
5172 /* Fix up and clean up previous catch block. */
5173 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5176 * Account for JSOP_ENTERBLOCK (whose block object count
5177 * is saved below) and pushed exception object that we
5178 * still have after the jumping from the previous guard.
5180 cg->stackDepth = depth + count + 1;
5183 * Move exception back to cx->exception to prepare for
5184 * the next catch. We hide [throwing] from the decompiler
5185 * since it compensates for the hidden JSOP_DUP at the
5186 * start of the previous guarded catch.
5188 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5189 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5190 return JS_FALSE;
5192 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5193 return JS_FALSE;
5194 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
5195 JS_ASSERT(cg->stackDepth == depth);
5199 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5200 * by the call to js_EmitTree immediately below. Save this
5201 * source note's index in stmtInfo for use by the TOK_CATCH:
5202 * case, where the length of the catch guard is set as the
5203 * note's offset.
5205 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5206 if (catchNote < 0)
5207 return JS_FALSE;
5208 CATCHNOTE(stmtInfo) = catchNote;
5211 * Emit the lexical scope and catch body. Save the catch's
5212 * block object population via count, for use when targeting
5213 * guardJump at the next catch (the guard mismatch case).
5215 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5216 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5217 if (!js_EmitTree(cx, cg, pn3))
5218 return JS_FALSE;
5220 /* gosub <finally>, if required */
5221 if (pn->pn_kid3) {
5222 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5223 &GOSUBS(stmtInfo));
5224 if (jmp < 0)
5225 return JS_FALSE;
5226 JS_ASSERT(cg->stackDepth == depth);
5230 * Jump over the remaining catch blocks. This will get fixed
5231 * up to jump to after catch/finally.
5233 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5234 return JS_FALSE;
5235 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5236 if (jmp < 0)
5237 return JS_FALSE;
5240 * Save a pointer to the last catch node to handle try-finally
5241 * and try-catch(guard)-finally special cases.
5243 lastCatch = pn3->expr();
5248 * Last catch guard jumps to the rethrow code sequence if none of the
5249 * guards match. Target guardJump at the beginning of the rethrow
5250 * sequence, just in case a guard expression throws and leaves the
5251 * stack unbalanced.
5253 if (lastCatch && lastCatch->pn_kid2) {
5254 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5256 /* Sync the stack to take into account pushed exception. */
5257 JS_ASSERT(cg->stackDepth == depth);
5258 cg->stackDepth = depth + 1;
5261 * Rethrow the exception, delegating executing of finally if any
5262 * to the exception handler.
5264 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5265 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5266 return JS_FALSE;
5270 JS_ASSERT(cg->stackDepth == depth);
5272 /* Emit finally handler if any. */
5273 finallyStart = 0; /* to quell GCC uninitialized warnings */
5274 if (pn->pn_kid3) {
5276 * Fix up the gosubs that might have been emitted before non-local
5277 * jumps to the finally code.
5279 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5280 return JS_FALSE;
5282 finallyStart = CG_OFFSET(cg);
5284 /* Indicate that we're emitting a subroutine body. */
5285 stmtInfo.type = STMT_SUBROUTINE;
5286 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5287 return JS_FALSE;
5288 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5289 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5290 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5291 return JS_FALSE;
5293 JS_ASSERT(cg->stackDepth == depth);
5295 if (!js_PopStatementCG(cx, cg))
5296 return JS_FALSE;
5298 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5299 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5300 return JS_FALSE;
5303 /* Fix up the end-of-try/catch jumps to come here. */
5304 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5305 return JS_FALSE;
5308 * Add the try note last, to let post-order give us the right ordering
5309 * (first to last for a given nesting level, inner to outer by level).
5311 if (pn->pn_kid2 &&
5312 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5313 return JS_FALSE;
5317 * If we've got a finally, mark try+catch region with additional
5318 * trynote to catch exceptions (re)thrown from a catch block or
5319 * for the try{}finally{} case.
5321 if (pn->pn_kid3 &&
5322 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5323 return JS_FALSE;
5325 break;
5328 case TOK_CATCH:
5330 ptrdiff_t catchStart, guardJump;
5331 JSObject *blockObj;
5334 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5335 * and save the block object atom.
5337 stmt = cg->topStmt;
5338 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5339 stmt->type = STMT_CATCH;
5340 catchStart = stmt->update;
5341 blockObj = stmt->blockObj;
5343 /* Go up one statement info record to the TRY or FINALLY record. */
5344 stmt = stmt->down;
5345 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5347 /* Pick up the pending exception and bind it to the catch variable. */
5348 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5349 return JS_FALSE;
5352 * Dup the exception object if there is a guard for rethrowing to use
5353 * it later when rethrowing or in other catches.
5355 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5356 return JS_FALSE;
5358 pn2 = pn->pn_kid1;
5359 switch (pn2->pn_type) {
5360 #if JS_HAS_DESTRUCTURING
5361 case TOK_RB:
5362 case TOK_RC:
5363 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5364 return JS_FALSE;
5365 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5366 return JS_FALSE;
5367 break;
5368 #endif
5370 case TOK_NAME:
5371 /* Inline and specialize BindNameToSlot for pn2. */
5372 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE);
5373 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie);
5374 break;
5376 default:
5377 JS_ASSERT(0);
5380 /* Emit the guard expression, if there is one. */
5381 if (pn->pn_kid2) {
5382 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5383 return JS_FALSE;
5384 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5385 CG_OFFSET(cg) - catchStart)) {
5386 return JS_FALSE;
5388 /* ifeq <next block> */
5389 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5390 if (guardJump < 0)
5391 return JS_FALSE;
5392 GUARDJUMP(*stmt) = guardJump;
5394 /* Pop duplicated exception object as we no longer need it. */
5395 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5396 return JS_FALSE;
5399 /* Emit the catch body. */
5400 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5401 return JS_FALSE;
5404 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5405 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5407 off = cg->stackDepth;
5408 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5409 return JS_FALSE;
5410 break;
5413 case TOK_VAR:
5414 if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5415 return JS_FALSE;
5416 break;
5418 case TOK_RETURN:
5419 /* Push a return value */
5420 pn2 = pn->pn_kid;
5421 if (pn2) {
5422 if (!js_EmitTree(cx, cg, pn2))
5423 return JS_FALSE;
5424 } else {
5425 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5426 return JS_FALSE;
5430 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5431 * blocks having finally clauses and to exit intermingled let blocks.
5432 * We can't simply transfer control flow to our caller in that case,
5433 * because we must gosub to those finally clauses from inner to outer,
5434 * with the correct stack pointer (i.e., after popping any with,
5435 * for/in, etc., slots nested inside the finally's try).
5437 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5438 * extra JSOP_RETRVAL after the fixups.
5440 top = CG_OFFSET(cg);
5441 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5442 return JS_FALSE;
5443 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5444 return JS_FALSE;
5445 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5446 CG_BASE(cg)[top] = JSOP_SETRVAL;
5447 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5448 return JS_FALSE;
5450 break;
5452 #if JS_HAS_GENERATORS
5453 case TOK_YIELD:
5454 if (!(cg->flags & TCF_IN_FUNCTION)) {
5455 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5456 JSMSG_BAD_RETURN_OR_YIELD,
5457 js_yield_str);
5458 return JS_FALSE;
5460 if (pn->pn_kid) {
5461 if (!js_EmitTree(cx, cg, pn->pn_kid))
5462 return JS_FALSE;
5463 } else {
5464 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5465 return JS_FALSE;
5467 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5468 return JS_FALSE;
5469 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5470 return JS_FALSE;
5471 break;
5472 #endif
5474 case TOK_LC:
5476 #if JS_HAS_XML_SUPPORT
5477 if (pn->pn_arity == PN_UNARY) {
5478 if (!js_EmitTree(cx, cg, pn->pn_kid))
5479 return JS_FALSE;
5480 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5481 return JS_FALSE;
5482 break;
5484 #endif
5486 JS_ASSERT(pn->pn_arity == PN_LIST);
5488 noteIndex = -1;
5489 tmp = CG_OFFSET(cg);
5490 if (pn->pn_xflags & PNX_NEEDBRACES) {
5491 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5492 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5493 return JS_FALSE;
5496 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5498 JSParseNode *pnchild = pn->pn_head;
5499 if (pn->pn_xflags & PNX_FUNCDEFS) {
5501 * This block contains top-level function definitions. To ensure
5502 * that we emit the bytecode defining them before the rest of code
5503 * in the block we use a separate pass over functions. During the
5504 * main pass later the emitter will add JSOP_NOP with source notes
5505 * for the function to preserve the original functions position
5506 * when decompiling.
5508 * Currently this is used only for functions, as compile-as-we go
5509 * mode for scripts does not allow separate emitter passes.
5511 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
5512 if (pn->pn_xflags & PNX_DESTRUCT) {
5514 * Assign the destructuring arguments before defining any
5515 * functions, see bug 419662.
5517 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5518 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_COMMA);
5519 if (!js_EmitTree(cx, cg, pnchild))
5520 return JS_FALSE;
5521 pnchild = pnchild->pn_next;
5524 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5525 if (pn2->pn_type == TOK_FUNCTION) {
5526 if (pn2->pn_op == JSOP_NOP) {
5527 if (!js_EmitTree(cx, cg, pn2))
5528 return JS_FALSE;
5529 } else {
5531 * JSOP_DEFFUN in a top-level block with function
5532 * definitions appears, for example, when "if (true)"
5533 * is optimized away from "if (true) function x() {}".
5534 * See bug 428424.
5536 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5541 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5542 if (!js_EmitTree(cx, cg, pn2))
5543 return JS_FALSE;
5546 if (noteIndex >= 0 &&
5547 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5548 CG_OFFSET(cg) - tmp)) {
5549 return JS_FALSE;
5552 ok = js_PopStatementCG(cx, cg);
5553 break;
5556 case TOK_SEQ:
5557 JS_ASSERT(pn->pn_arity == PN_LIST);
5558 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5559 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5560 if (!js_EmitTree(cx, cg, pn2))
5561 return JS_FALSE;
5563 ok = js_PopStatementCG(cx, cg);
5564 break;
5566 case TOK_SEMI:
5567 pn2 = pn->pn_kid;
5568 if (pn2) {
5570 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5571 * debugger, and eval frames may need the value of the ultimate
5572 * expression statement as the script's result, despite the fact
5573 * that it appears useless to the compiler.
5575 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5576 * calling JS_Compile* to suppress JSOP_POPV.
5578 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5579 if (!useful) {
5580 if (!CheckSideEffects(cx, cg, pn2, &useful))
5581 return JS_FALSE;
5585 * Don't eliminate apparently useless expressions if they are
5586 * labeled expression statements. The tc->topStmt->update test
5587 * catches the case where we are nesting in js_EmitTree for a
5588 * labeled compound statement.
5590 if (!useful &&
5591 (!cg->topStmt ||
5592 cg->topStmt->type != STMT_LABEL ||
5593 cg->topStmt->update < CG_OFFSET(cg))) {
5594 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5595 if (!js_ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5596 JSREPORT_WARNING |
5597 JSREPORT_STRICT,
5598 JSMSG_USELESS_EXPR)) {
5599 return JS_FALSE;
5601 } else {
5602 op = wantval ? JSOP_POPV : JSOP_POP;
5603 #if JS_HAS_DESTRUCTURING
5604 if (!wantval &&
5605 pn2->pn_type == TOK_ASSIGN &&
5606 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5607 return JS_FALSE;
5609 #endif
5610 if (op != JSOP_NOP) {
5611 if (!js_EmitTree(cx, cg, pn2))
5612 return JS_FALSE;
5613 if (js_Emit1(cx, cg, op) < 0)
5614 return JS_FALSE;
5618 break;
5620 case TOK_COLON:
5621 /* Emit an annotated nop so we know to decompile a label. */
5622 atom = pn->pn_atom;
5623 ale = cg->atomList.add(cg->compiler, atom);
5624 if (!ale)
5625 return JS_FALSE;
5626 pn2 = pn->expr();
5627 noteType = (pn2->pn_type == TOK_LC ||
5628 (pn2->pn_type == TOK_LEXICALSCOPE &&
5629 pn2->expr()->pn_type == TOK_LC))
5630 ? SRC_LABELBRACE
5631 : SRC_LABEL;
5632 noteIndex = js_NewSrcNote2(cx, cg, noteType,
5633 (ptrdiff_t) ALE_INDEX(ale));
5634 if (noteIndex < 0 ||
5635 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5636 return JS_FALSE;
5639 /* Emit code for the labeled statement. */
5640 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
5641 stmtInfo.label = atom;
5642 if (!js_EmitTree(cx, cg, pn2))
5643 return JS_FALSE;
5644 if (!js_PopStatementCG(cx, cg))
5645 return JS_FALSE;
5647 /* If the statement was compound, emit a note for the end brace. */
5648 if (noteType == SRC_LABELBRACE) {
5649 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5650 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5651 return JS_FALSE;
5654 break;
5656 case TOK_COMMA:
5658 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5659 * These notes help the decompiler bracket the bytecodes generated
5660 * from each sub-expression that follows a comma.
5662 off = noteIndex = -1;
5663 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5664 if (!js_EmitTree(cx, cg, pn2))
5665 return JS_FALSE;
5666 tmp = CG_OFFSET(cg);
5667 if (noteIndex >= 0) {
5668 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5669 return JS_FALSE;
5671 if (!pn2->pn_next)
5672 break;
5673 off = tmp;
5674 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5675 if (noteIndex < 0 ||
5676 js_Emit1(cx, cg, JSOP_POP) < 0) {
5677 return JS_FALSE;
5680 break;
5682 case TOK_ASSIGN:
5684 * Check left operand type and generate specialized code for it.
5685 * Specialize to avoid ECMA "reference type" values on the operand
5686 * stack, which impose pervasive runtime "GetValue" costs.
5688 pn2 = pn->pn_left;
5689 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
5690 switch (pn2->pn_type) {
5691 case TOK_NAME:
5692 if (!BindNameToSlot(cx, cg, pn2))
5693 return JS_FALSE;
5694 if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
5695 atomIndex = (jsatomid) pn2->pn_cookie;
5696 } else {
5697 ale = cg->atomList.add(cg->compiler, pn2->pn_atom);
5698 if (!ale)
5699 return JS_FALSE;
5700 atomIndex = ALE_INDEX(ale);
5701 if (!pn2->isConst())
5702 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
5704 break;
5705 case TOK_DOT:
5706 if (!js_EmitTree(cx, cg, pn2->expr()))
5707 return JS_FALSE;
5708 ale = cg->atomList.add(cg->compiler, pn2->pn_atom);
5709 if (!ale)
5710 return JS_FALSE;
5711 atomIndex = ALE_INDEX(ale);
5712 break;
5713 case TOK_LB:
5714 JS_ASSERT(pn2->pn_arity == PN_BINARY);
5715 if (!js_EmitTree(cx, cg, pn2->pn_left))
5716 return JS_FALSE;
5717 if (!js_EmitTree(cx, cg, pn2->pn_right))
5718 return JS_FALSE;
5719 break;
5720 #if JS_HAS_DESTRUCTURING
5721 case TOK_RB:
5722 case TOK_RC:
5723 break;
5724 #endif
5725 case TOK_LP:
5726 if (!js_EmitTree(cx, cg, pn2))
5727 return JS_FALSE;
5728 break;
5729 #if JS_HAS_XML_SUPPORT
5730 case TOK_UNARYOP:
5731 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5732 if (!js_EmitTree(cx, cg, pn2->pn_kid))
5733 return JS_FALSE;
5734 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5735 return JS_FALSE;
5736 break;
5737 #endif
5738 default:
5739 JS_ASSERT(0);
5742 op = PN_OP(pn);
5743 #if JS_HAS_GETTER_SETTER
5744 if (op == JSOP_GETTER || op == JSOP_SETTER) {
5745 if (pn2->pn_type == TOK_NAME && PN_OP(pn2) != JSOP_SETNAME) {
5747 * x getter = y where x is a local or let variable is not
5748 * supported.
5750 js_ReportCompileErrorNumber(cx,
5751 TS(cg->compiler),
5752 pn2, JSREPORT_ERROR,
5753 JSMSG_BAD_GETTER_OR_SETTER,
5754 (op == JSOP_GETTER)
5755 ? js_getter_str
5756 : js_setter_str);
5757 return JS_FALSE;
5760 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5761 } else
5762 #endif
5763 /* If += or similar, dup the left operand and get its value. */
5764 if (op != JSOP_NOP) {
5765 switch (pn2->pn_type) {
5766 case TOK_NAME:
5767 if (pn2->isConst()) {
5768 if (PN_OP(pn2) == JSOP_CALLEE) {
5769 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
5770 return JS_FALSE;
5771 } else {
5772 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5774 } else if (PN_OP(pn2) == JSOP_SETNAME) {
5775 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5776 return JS_FALSE;
5777 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
5778 } else {
5779 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
5780 ? JSOP_GETGVAR
5781 : (PN_OP(pn2) == JSOP_GETUPVAR)
5782 ? JSOP_GETUPVAR
5783 : (PN_OP(pn2) == JSOP_SETARG)
5784 ? JSOP_GETARG
5785 : JSOP_GETLOCAL,
5786 atomIndex);
5788 break;
5789 case TOK_DOT:
5790 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5791 return JS_FALSE;
5792 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
5793 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
5794 return JS_FALSE;
5795 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
5796 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
5797 return JS_FALSE;
5798 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5799 return JS_FALSE;
5800 } else {
5801 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
5803 break;
5804 case TOK_LB:
5805 case TOK_LP:
5806 #if JS_HAS_XML_SUPPORT
5807 case TOK_UNARYOP:
5808 #endif
5809 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5810 return JS_FALSE;
5811 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5812 return JS_FALSE;
5813 break;
5814 default:;
5818 /* Now emit the right operand (it may affect the namespace). */
5819 if (!js_EmitTree(cx, cg, pn->pn_right))
5820 return JS_FALSE;
5822 /* If += etc., emit the binary operator with a decompiler note. */
5823 if (op != JSOP_NOP) {
5825 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5826 * declared in the current compilation unit, as in this case (just
5827 * a bit further below) we will avoid emitting the assignment op.
5829 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
5830 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5831 return JS_FALSE;
5833 if (js_Emit1(cx, cg, op) < 0)
5834 return JS_FALSE;
5837 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5838 if (pn2->pn_type != TOK_NAME &&
5839 #if JS_HAS_DESTRUCTURING
5840 pn2->pn_type != TOK_RB &&
5841 pn2->pn_type != TOK_RC &&
5842 #endif
5843 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
5844 return JS_FALSE;
5847 /* Finally, emit the specialized assignment bytecode. */
5848 switch (pn2->pn_type) {
5849 case TOK_NAME:
5850 if (pn2->isConst())
5851 break;
5852 /* FALL THROUGH */
5853 case TOK_DOT:
5854 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5855 break;
5856 case TOK_LB:
5857 case TOK_LP:
5858 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5859 return JS_FALSE;
5860 break;
5861 #if JS_HAS_DESTRUCTURING
5862 case TOK_RB:
5863 case TOK_RC:
5864 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5865 return JS_FALSE;
5866 break;
5867 #endif
5868 #if JS_HAS_XML_SUPPORT
5869 case TOK_UNARYOP:
5870 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5871 return JS_FALSE;
5872 break;
5873 #endif
5874 default:
5875 JS_ASSERT(0);
5877 break;
5879 case TOK_HOOK:
5880 /* Emit the condition, then branch if false to the else part. */
5881 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5882 return JS_FALSE;
5883 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5884 if (noteIndex < 0)
5885 return JS_FALSE;
5886 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5887 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5888 return JS_FALSE;
5890 /* Jump around else, fixup the branch, emit else, fixup jump. */
5891 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5892 if (jmp < 0)
5893 return JS_FALSE;
5894 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5897 * Because each branch pushes a single value, but our stack budgeting
5898 * analysis ignores branches, we now have to adjust cg->stackDepth to
5899 * ignore the value pushed by the first branch. Execution will follow
5900 * only one path, so we must decrement cg->stackDepth.
5902 * Failing to do this will foil code, such as the try/catch/finally
5903 * exception handling code generator, that samples cg->stackDepth for
5904 * use at runtime (JSOP_SETSP), or in let expression and block code
5905 * generation, which must use the stack depth to compute local stack
5906 * indexes correctly.
5908 JS_ASSERT(cg->stackDepth > 0);
5909 cg->stackDepth--;
5910 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5911 return JS_FALSE;
5912 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5913 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5914 return JS_FALSE;
5915 break;
5917 case TOK_OR:
5918 case TOK_AND:
5920 * JSOP_OR converts the operand on the stack to boolean, and if true,
5921 * leaves the original operand value on the stack and jumps; otherwise
5922 * it pops and falls into the next bytecode, which evaluates the right
5923 * operand. The jump goes around the right operand evaluation.
5925 * JSOP_AND converts the operand on the stack to boolean, and if false,
5926 * leaves the original operand value on the stack and jumps; otherwise
5927 * it pops and falls into the right operand's bytecode.
5929 if (pn->pn_arity == PN_BINARY) {
5930 if (!js_EmitTree(cx, cg, pn->pn_left))
5931 return JS_FALSE;
5932 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5933 if (top < 0)
5934 return JS_FALSE;
5935 if (!js_EmitTree(cx, cg, pn->pn_right))
5936 return JS_FALSE;
5937 off = CG_OFFSET(cg);
5938 pc = CG_CODE(cg, top);
5939 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5940 *pc = pn->pn_op;
5941 } else {
5942 JS_ASSERT(pn->pn_arity == PN_LIST);
5943 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5945 /* Left-associative operator chain: avoid too much recursion. */
5946 pn2 = pn->pn_head;
5947 if (!js_EmitTree(cx, cg, pn2))
5948 return JS_FALSE;
5949 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5950 if (top < 0)
5951 return JS_FALSE;
5953 /* Emit nodes between the head and the tail. */
5954 jmp = top;
5955 while ((pn2 = pn2->pn_next)->pn_next) {
5956 if (!js_EmitTree(cx, cg, pn2))
5957 return JS_FALSE;
5958 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5959 if (off < 0)
5960 return JS_FALSE;
5961 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5962 return JS_FALSE;
5963 jmp = off;
5966 if (!js_EmitTree(cx, cg, pn2))
5967 return JS_FALSE;
5969 pn2 = pn->pn_head;
5970 off = CG_OFFSET(cg);
5971 do {
5972 pc = CG_CODE(cg, top);
5973 tmp = GetJumpOffset(cg, pc);
5974 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5975 *pc = pn->pn_op;
5976 top += tmp;
5977 } while ((pn2 = pn2->pn_next)->pn_next);
5979 break;
5981 case TOK_PLUS:
5982 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5983 if (pn->pn_arity == PN_LIST && pn->pn_count < JS_BIT(16) &&
5984 (cg->flags & TCF_IN_FUNCTION)) {
5985 /* Emit up to the first string literal conventionally. */
5986 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5987 if (pn2->pn_type == TOK_STRING)
5988 break;
5989 if (!js_EmitTree(cx, cg, pn2))
5990 return JS_FALSE;
5991 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
5992 return JS_FALSE;
5995 /* Emit remainder as a single JSOP_CONCATN. */
5996 for (index = 0; pn2; pn2 = pn2->pn_next, index++) {
5997 if (!js_EmitTree(cx, cg, pn2))
5998 return JS_FALSE;
6001 if (index != 0) {
6002 EMIT_UINT16_IMM_OP(JSOP_CONCATN, index);
6004 /* If we had a prefix, we need to be added to it now. */
6005 if (pn->pn_head->pn_type != TOK_STRING &&
6006 js_Emit1(cx, cg, JSOP_ADD) < 0) {
6007 return JS_FALSE;
6010 break;
6012 case TOK_BITOR:
6013 case TOK_BITXOR:
6014 case TOK_BITAND:
6015 case TOK_EQOP:
6016 case TOK_RELOP:
6017 case TOK_IN:
6018 case TOK_INSTANCEOF:
6019 case TOK_SHOP:
6020 case TOK_MINUS:
6021 case TOK_STAR:
6022 case TOK_DIVOP:
6023 if (pn->pn_arity == PN_LIST) {
6024 /* Left-associative operator chain: avoid too much recursion. */
6025 pn2 = pn->pn_head;
6026 if (!js_EmitTree(cx, cg, pn2))
6027 return JS_FALSE;
6028 op = PN_OP(pn);
6029 while ((pn2 = pn2->pn_next) != NULL) {
6030 if (!js_EmitTree(cx, cg, pn2))
6031 return JS_FALSE;
6032 if (js_Emit1(cx, cg, op) < 0)
6033 return JS_FALSE;
6035 } else {
6036 #if JS_HAS_XML_SUPPORT
6037 uintN oldflags;
6039 case TOK_DBLCOLON:
6040 if (pn->pn_arity == PN_NAME) {
6041 if (!js_EmitTree(cx, cg, pn->expr()))
6042 return JS_FALSE;
6043 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
6044 return JS_FALSE;
6045 break;
6049 * Binary :: has a right operand that brackets arbitrary code,
6050 * possibly including a let (a = b) ... expression. We must clear
6051 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6053 oldflags = cg->flags;
6054 cg->flags &= ~TCF_IN_FOR_INIT;
6055 #endif
6057 /* Binary operators that evaluate both operands unconditionally. */
6058 if (!js_EmitTree(cx, cg, pn->pn_left))
6059 return JS_FALSE;
6060 if (!js_EmitTree(cx, cg, pn->pn_right))
6061 return JS_FALSE;
6062 #if JS_HAS_XML_SUPPORT
6063 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6064 #endif
6065 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6066 return JS_FALSE;
6068 break;
6070 case TOK_THROW:
6071 #if JS_HAS_XML_SUPPORT
6072 case TOK_AT:
6073 case TOK_DEFAULT:
6074 JS_ASSERT(pn->pn_arity == PN_UNARY);
6075 /* FALL THROUGH */
6076 #endif
6077 case TOK_UNARYOP:
6079 uintN oldflags;
6081 /* Unary op, including unary +/-. */
6082 op = PN_OP(pn);
6083 #if JS_HAS_XML_SUPPORT
6084 if (op == JSOP_XMLNAME) {
6085 if (!EmitXMLName(cx, pn, op, cg))
6086 return JS_FALSE;
6087 break;
6089 #endif
6090 pn2 = pn->pn_kid;
6092 /* See js_FoldConstants for why this assertion holds true. */
6093 JS_ASSERT_IF(op == JSOP_TYPEOF, pn2->pn_type == TOK_NAME);
6095 oldflags = cg->flags;
6096 cg->flags &= ~TCF_IN_FOR_INIT;
6097 if (!js_EmitTree(cx, cg, pn2))
6098 return JS_FALSE;
6099 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6100 if (js_Emit1(cx, cg, op) < 0)
6101 return JS_FALSE;
6102 break;
6105 case TOK_INC:
6106 case TOK_DEC:
6107 /* Emit lvalue-specialized code for ++/-- operators. */
6108 pn2 = pn->pn_kid;
6109 JS_ASSERT(pn2->pn_type != TOK_RP);
6110 op = PN_OP(pn);
6111 switch (pn2->pn_type) {
6112 default:
6113 JS_ASSERT(pn2->pn_type == TOK_NAME);
6114 pn2->pn_op = op;
6115 if (!BindNameToSlot(cx, cg, pn2))
6116 return JS_FALSE;
6117 op = PN_OP(pn2);
6118 if (op == JSOP_CALLEE) {
6119 if (js_Emit1(cx, cg, op) < 0)
6120 return JS_FALSE;
6121 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
6122 atomIndex = (jsatomid) pn2->pn_cookie;
6123 EMIT_UINT16_IMM_OP(op, atomIndex);
6124 } else {
6125 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6126 if (!EmitAtomOp(cx, pn2, op, cg))
6127 return JS_FALSE;
6128 break;
6130 if (pn2->isConst()) {
6131 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6132 return JS_FALSE;
6133 op = PN_OP(pn);
6134 if (!(js_CodeSpec[op].format & JOF_POST)) {
6135 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6136 return JS_FALSE;
6137 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6138 if (js_Emit1(cx, cg, op) < 0)
6139 return JS_FALSE;
6142 break;
6143 case TOK_DOT:
6144 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6145 return JS_FALSE;
6146 break;
6147 case TOK_LB:
6148 if (!EmitElemOp(cx, pn2, op, cg))
6149 return JS_FALSE;
6150 break;
6151 case TOK_LP:
6152 if (!js_EmitTree(cx, cg, pn2))
6153 return JS_FALSE;
6154 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6155 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6156 return JS_FALSE;
6158 if (js_Emit1(cx, cg, op) < 0)
6159 return JS_FALSE;
6160 break;
6161 #if JS_HAS_XML_SUPPORT
6162 case TOK_UNARYOP:
6163 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6164 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6165 return JS_FALSE;
6166 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6167 return JS_FALSE;
6168 if (js_Emit1(cx, cg, op) < 0)
6169 return JS_FALSE;
6170 break;
6171 #endif
6173 break;
6175 case TOK_DELETE:
6177 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6178 * must evaluate the operand if it appears it might have side effects.
6180 pn2 = pn->pn_kid;
6181 switch (pn2->pn_type) {
6182 case TOK_NAME:
6183 if (!BindNameToSlot(cx, cg, pn2))
6184 return JS_FALSE;
6185 op = PN_OP(pn2);
6186 if (op == JSOP_FALSE) {
6187 if (js_Emit1(cx, cg, op) < 0)
6188 return JS_FALSE;
6189 } else {
6190 if (!EmitAtomOp(cx, pn2, op, cg))
6191 return JS_FALSE;
6193 break;
6194 case TOK_DOT:
6195 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6196 return JS_FALSE;
6197 break;
6198 #if JS_HAS_XML_SUPPORT
6199 case TOK_DBLDOT:
6200 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6201 return JS_FALSE;
6202 break;
6203 #endif
6204 case TOK_LB:
6205 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6206 return JS_FALSE;
6207 break;
6208 default:
6210 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6211 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6213 useful = JS_FALSE;
6214 if (!CheckSideEffects(cx, cg, pn2, &useful))
6215 return JS_FALSE;
6216 if (!useful) {
6217 off = noteIndex = -1;
6218 } else {
6219 if (pn2->pn_op == JSOP_SETCALL)
6220 pn2->pn_op = JSOP_CALL;
6221 if (!js_EmitTree(cx, cg, pn2))
6222 return JS_FALSE;
6223 off = CG_OFFSET(cg);
6224 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6225 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6226 return JS_FALSE;
6228 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6229 return JS_FALSE;
6230 if (noteIndex >= 0) {
6231 tmp = CG_OFFSET(cg);
6232 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6233 return JS_FALSE;
6236 break;
6238 #if JS_HAS_XML_SUPPORT
6239 case TOK_FILTER:
6240 if (!js_EmitTree(cx, cg, pn->pn_left))
6241 return JS_FALSE;
6242 jmp = js_Emit3(cx, cg, JSOP_FILTER, 0, 0);
6243 if (jmp < 0)
6244 return JS_FALSE;
6245 top = js_Emit1(cx, cg, JSOP_TRACE);
6246 if (top < 0)
6247 return JS_FALSE;
6248 if (!js_EmitTree(cx, cg, pn->pn_right))
6249 return JS_FALSE;
6250 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6251 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6252 return JS_FALSE;
6253 break;
6254 #endif
6256 case TOK_DOT:
6258 * Pop a stack operand, convert it to object, get a property named by
6259 * this bytecode's immediate-indexed atom operand, and push its value
6260 * (not a reference to it).
6262 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6263 break;
6265 case TOK_LB:
6266 #if JS_HAS_XML_SUPPORT
6267 case TOK_DBLDOT:
6268 #endif
6270 * Pop two operands, convert the left one to object and the right one
6271 * to property name (atom or tagged int), get the named property, and
6272 * push its value. Set the "obj" register to the result of ToObject
6273 * on the left operand.
6275 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6276 break;
6278 case TOK_NEW:
6279 case TOK_LP:
6281 bool callop = (PN_TYPE(pn) == TOK_LP);
6282 uintN oldflags;
6285 * Emit callable invocation or operator new (constructor call) code.
6286 * First, emit code for the left operand to evaluate the callable or
6287 * constructable object expression.
6289 * For operator new applied to other expressions than E4X ones, we emit
6290 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6291 * interpose the lambda-initialized method read barrier -- see the code
6292 * in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6294 * Then (or in a call case that has no explicit reference-base object)
6295 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6296 * parameter: in the operator new case, the newborn instance; in the
6297 * base-less call case, a cookie meaning "use the global object as the
6298 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6299 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6301 pn2 = pn->pn_head;
6302 switch (pn2->pn_type) {
6303 case TOK_NAME:
6304 if (!EmitNameOp(cx, cg, pn2, callop))
6305 return JS_FALSE;
6306 break;
6307 case TOK_DOT:
6308 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
6309 return JS_FALSE;
6310 break;
6311 case TOK_LB:
6312 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6313 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
6314 return JS_FALSE;
6315 break;
6316 case TOK_UNARYOP:
6317 #if JS_HAS_XML_SUPPORT
6318 if (pn2->pn_op == JSOP_XMLNAME) {
6319 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6320 return JS_FALSE;
6321 callop = true; /* suppress JSOP_NULL after */
6322 break;
6324 #endif
6325 /* FALL THROUGH */
6326 default:
6328 * Push null as a placeholder for the global object, per ECMA-262
6329 * 11.2.3 step 6.
6331 if (!js_EmitTree(cx, cg, pn2))
6332 return JS_FALSE;
6333 callop = false; /* trigger JSOP_NULL after */
6334 break;
6336 if (!callop && js_Emit1(cx, cg, JSOP_NULL) < 0)
6337 return JS_FALSE;
6339 /* Remember start of callable-object bytecode for decompilation hint. */
6340 off = top;
6343 * Emit code for each argument in order, then emit the JSOP_*CALL or
6344 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6345 * were pushed on the operand stack.
6347 oldflags = cg->flags;
6348 cg->flags &= ~TCF_IN_FOR_INIT;
6349 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6350 if (!js_EmitTree(cx, cg, pn3))
6351 return JS_FALSE;
6353 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6354 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6355 return JS_FALSE;
6357 argc = pn->pn_count - 1;
6358 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6359 return JS_FALSE;
6360 if (PN_OP(pn) == JSOP_CALL) {
6361 /* Add a trace hint opcode for recursion. */
6362 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
6363 return JS_FALSE;
6365 if (PN_OP(pn) == JSOP_EVAL)
6366 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6367 break;
6370 case TOK_LEXICALSCOPE:
6372 JSObjectBox *objbox;
6373 uintN count;
6375 objbox = pn->pn_objbox;
6376 js_PushBlockScope(cg, &stmtInfo, objbox->object, CG_OFFSET(cg));
6379 * If this lexical scope is not for a catch block, let block or let
6380 * expression, or any kind of for loop (where the scope starts in the
6381 * head after the first part if for (;;), else in the body if for-in);
6382 * and if our container is top-level but not a function body, or else
6383 * a block statement; then emit a SRC_BRACE note. All other container
6384 * statements get braces by default from the decompiler.
6386 noteIndex = -1;
6387 type = PN_TYPE(pn->expr());
6388 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6389 (!(stmt = stmtInfo.down)
6390 ? !(cg->flags & TCF_IN_FUNCTION)
6391 : stmt->type == STMT_BLOCK)) {
6392 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6393 /* There must be no source note already output for the next op. */
6394 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6395 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6396 !GettableNoteForNextOp(cg));
6397 #endif
6398 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6399 if (noteIndex < 0)
6400 return JS_FALSE;
6403 JS_ASSERT(CG_OFFSET(cg) == top);
6404 if (!EmitEnterBlock(cx, pn, cg))
6405 return JS_FALSE;
6407 if (!js_EmitTree(cx, cg, pn->pn_expr))
6408 return JS_FALSE;
6410 op = PN_OP(pn);
6411 if (op == JSOP_LEAVEBLOCKEXPR) {
6412 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6413 return JS_FALSE;
6414 } else {
6415 if (noteIndex >= 0 &&
6416 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6417 CG_OFFSET(cg) - top)) {
6418 return JS_FALSE;
6422 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6423 count = OBJ_BLOCK_COUNT(cx, objbox->object);
6424 EMIT_UINT16_IMM_OP(op, count);
6426 ok = js_PopStatementCG(cx, cg);
6427 break;
6430 #if JS_HAS_BLOCK_SCOPE
6431 case TOK_LET:
6432 /* Let statements have their variable declarations on the left. */
6433 if (pn->pn_arity == PN_BINARY) {
6434 pn2 = pn->pn_right;
6435 pn = pn->pn_left;
6436 } else {
6437 pn2 = NULL;
6440 /* Non-null pn2 means that pn is the variable list from a let head. */
6441 JS_ASSERT(pn->pn_arity == PN_LIST);
6442 if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
6443 return JS_FALSE;
6445 /* Thus non-null pn2 is the body of the let block or expression. */
6446 tmp = CG_OFFSET(cg);
6447 if (pn2 && !js_EmitTree(cx, cg, pn2))
6448 return JS_FALSE;
6450 if (noteIndex >= 0 &&
6451 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6452 CG_OFFSET(cg) - tmp)) {
6453 return JS_FALSE;
6455 break;
6456 #endif /* JS_HAS_BLOCK_SCOPE */
6458 #if JS_HAS_GENERATORS
6459 case TOK_ARRAYPUSH: {
6460 jsint slot;
6463 * The array object's stack index is in cg->arrayCompDepth. See below
6464 * under the array initialiser code generator for array comprehension
6465 * special casing.
6467 if (!js_EmitTree(cx, cg, pn->pn_kid))
6468 return JS_FALSE;
6469 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6470 if (slot < 0)
6471 return JS_FALSE;
6472 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6473 break;
6475 #endif
6477 case TOK_RB:
6478 #if JS_HAS_GENERATORS
6479 case TOK_ARRAYCOMP:
6480 #endif
6482 * Emit code for [a, b, c] that is equivalent to constructing a new
6483 * array and in source order evaluating each element value and adding
6484 * it to the array, without invoking latent setters. We use the
6485 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6486 * avoid dup'ing and popping the array as each element is added, as
6487 * JSOP_SETELEM/JSOP_SETPROP would do.
6489 * If no sharp variable is defined, the initializer is not for an array
6490 * comprehension, the initializer is not overlarge, and the initializer
6491 * is not in global code (whose stack growth cannot be precisely modeled
6492 * due to the need to reserve space for global variables and regular
6493 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6494 * array using a fast, all-at-once process rather than a slow, element-
6495 * by-element process.
6497 #if JS_HAS_SHARP_VARS
6498 sharpnum = -1;
6499 do_emit_array:
6500 #endif
6502 op = (JS_LIKELY(pn->pn_count < JS_BIT(16)) && (cg->flags & TCF_IN_FUNCTION))
6503 ? JSOP_NEWARRAY
6504 : JSOP_NEWINIT;
6506 #if JS_HAS_GENERATORS
6507 if (pn->pn_type == TOK_ARRAYCOMP)
6508 op = JSOP_NEWINIT;
6509 #endif
6510 #if JS_HAS_SHARP_VARS
6511 JS_ASSERT_IF(sharpnum >= 0, cg->hasSharps());
6512 if (cg->hasSharps())
6513 op = JSOP_NEWINIT;
6514 #endif
6516 if (op == JSOP_NEWINIT && !EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6517 return JS_FALSE;
6519 #if JS_HAS_GENERATORS
6520 if (pn->pn_type == TOK_ARRAYCOMP) {
6521 uintN saveDepth;
6524 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6525 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6526 * its kids under pn2 to generate this comprehension.
6528 JS_ASSERT(cg->stackDepth > 0);
6529 saveDepth = cg->arrayCompDepth;
6530 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6531 if (!js_EmitTree(cx, cg, pn->pn_head))
6532 return JS_FALSE;
6533 cg->arrayCompDepth = saveDepth;
6535 /* Emit the usual op needed for decompilation. */
6536 if (!EmitEndInit(cx, cg, 1))
6537 return JS_FALSE;
6538 break;
6540 #endif /* JS_HAS_GENERATORS */
6542 pn2 = pn->pn_head;
6543 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6544 if (op == JSOP_NEWINIT && !EmitNumberOp(cx, atomIndex, cg))
6545 return JS_FALSE;
6546 if (pn2->pn_type == TOK_COMMA && pn2->pn_arity == PN_NULLARY) {
6547 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6548 return JS_FALSE;
6549 } else {
6550 if (!js_EmitTree(cx, cg, pn2))
6551 return JS_FALSE;
6553 if (op == JSOP_NEWINIT && js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6554 return JS_FALSE;
6556 JS_ASSERT(atomIndex == pn->pn_count);
6558 if (pn->pn_xflags & PNX_ENDCOMMA) {
6559 /* Emit a source note so we know to decompile an extra comma. */
6560 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6561 return JS_FALSE;
6564 if (op == JSOP_NEWINIT) {
6566 * Emit an op to finish the array and, secondarily, to aid in sharp
6567 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6569 if (!EmitEndInit(cx, cg, atomIndex))
6570 return JS_FALSE;
6571 break;
6574 JS_ASSERT(atomIndex < JS_BIT(16));
6575 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY, atomIndex);
6576 break;
6578 case TOK_RC:
6579 #if JS_HAS_SHARP_VARS
6580 sharpnum = -1;
6581 do_emit_object:
6582 #endif
6583 #if JS_HAS_DESTRUCTURING_SHORTHAND
6584 if (pn->pn_xflags & PNX_DESTRUCT) {
6585 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
6586 JSMSG_BAD_OBJECT_INIT);
6587 return JS_FALSE;
6589 #endif
6591 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6592 * a new object and in source order evaluating each property value and
6593 * adding the property to the object, without invoking latent setters.
6594 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6595 * ignore setters and to avoid dup'ing and popping the object as each
6596 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6598 if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
6599 return JS_FALSE;
6601 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6602 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6603 pn3 = pn2->pn_left;
6604 if (pn3->pn_type == TOK_NUMBER) {
6605 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6606 return JS_FALSE;
6609 /* Emit code for the property initializer. */
6610 if (!js_EmitTree(cx, cg, pn2->pn_right))
6611 return JS_FALSE;
6613 #if JS_HAS_GETTER_SETTER
6614 op = PN_OP(pn2);
6615 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6616 if (js_Emit1(cx, cg, op) < 0)
6617 return JS_FALSE;
6619 #endif
6620 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6621 if (pn3->pn_type == TOK_NUMBER) {
6622 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6623 return JS_FALSE;
6624 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6625 return JS_FALSE;
6626 } else {
6627 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6628 pn3->pn_type == TOK_STRING);
6629 ale = cg->atomList.add(cg->compiler, pn3->pn_atom);
6630 if (!ale)
6631 return JS_FALSE;
6633 JSOp initOp = (PN_OP(pn2->pn_right) == JSOP_LAMBDA &&
6634 !(pn2->pn_right->pn_funbox->tcflags
6635 & (TCF_FUN_USES_ARGUMENTS | TCF_FUN_USES_OWN_NAME))
6636 #if JS_HAS_GETTER_SETTER
6637 && op != JSOP_GETTER && op != JSOP_SETTER
6638 #endif
6640 ? JSOP_INITMETHOD
6641 : JSOP_INITPROP;
6642 EMIT_INDEX_OP(initOp, ALE_INDEX(ale));
6646 if (!EmitEndInit(cx, cg, pn->pn_count))
6647 return JS_FALSE;
6648 break;
6650 #if JS_HAS_SHARP_VARS
6651 case TOK_DEFSHARP:
6652 JS_ASSERT(cg->hasSharps());
6653 sharpnum = pn->pn_num;
6654 pn = pn->pn_kid;
6655 if (pn->pn_type == TOK_RB)
6656 goto do_emit_array;
6657 # if JS_HAS_GENERATORS
6658 if (pn->pn_type == TOK_ARRAYCOMP)
6659 goto do_emit_array;
6660 # endif
6661 if (pn->pn_type == TOK_RC)
6662 goto do_emit_object;
6664 if (!js_EmitTree(cx, cg, pn))
6665 return JS_FALSE;
6666 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
6667 break;
6669 case TOK_USESHARP:
6670 JS_ASSERT(cg->hasSharps());
6671 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
6672 break;
6673 #endif /* JS_HAS_SHARP_VARS */
6675 case TOK_NAME:
6677 * Cope with a left-over function definition that was replaced by a use
6678 * of a later function definition of the same name. See FunctionDef and
6679 * MakeDefIntoUse in jsparse.cpp.
6681 if (pn->pn_op == JSOP_NOP)
6682 return JS_TRUE;
6683 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
6684 return JS_FALSE;
6685 break;
6687 #if JS_HAS_XML_SUPPORT
6688 case TOK_XMLATTR:
6689 case TOK_XMLSPACE:
6690 case TOK_XMLTEXT:
6691 case TOK_XMLCDATA:
6692 case TOK_XMLCOMMENT:
6693 #endif
6694 case TOK_STRING:
6695 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
6696 break;
6698 case TOK_NUMBER:
6699 ok = EmitNumberOp(cx, pn->pn_dval, cg);
6700 break;
6702 case TOK_REGEXP:
6704 * If the regexp's script is one-shot, we can avoid the extra
6705 * fork-on-exec costs of JSOP_REGEXP by selecting JSOP_OBJECT.
6706 * Otherwise, to avoid incorrect proto, parent, and lastIndex
6707 * sharing among threads and sequentially across re-execution,
6708 * select JSOP_REGEXP.
6710 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
6711 if (cg->flags & TCF_COMPILE_N_GO) {
6712 ok = EmitObjectOp(cx, pn->pn_objbox, JSOP_OBJECT, cg);
6713 } else {
6714 ok = EmitIndexOp(cx, JSOP_REGEXP,
6715 cg->regexpList.index(pn->pn_objbox),
6716 cg);
6718 break;
6720 #if JS_HAS_XML_SUPPORT
6721 case TOK_ANYNAME:
6722 #endif
6723 case TOK_PRIMARY:
6724 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6725 return JS_FALSE;
6726 break;
6728 #if JS_HAS_DEBUGGER_KEYWORD
6729 case TOK_DEBUGGER:
6730 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6731 return JS_FALSE;
6732 break;
6733 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6735 #if JS_HAS_XML_SUPPORT
6736 case TOK_XMLELEM:
6737 case TOK_XMLLIST:
6738 if (pn->pn_op == JSOP_XMLOBJECT) {
6739 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6740 break;
6743 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
6744 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
6745 case TOK_XMLETAGO:
6746 JS_ASSERT(0);
6747 /* FALL THROUGH */
6748 case TOK_XMLPTAGC:
6749 case TOK_XMLSTAGO:
6750 break;
6751 default:
6752 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6753 return JS_FALSE;
6756 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6757 if (pn2->pn_type == TOK_LC &&
6758 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6759 return JS_FALSE;
6761 if (!js_EmitTree(cx, cg, pn2))
6762 return JS_FALSE;
6763 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6764 return JS_FALSE;
6767 if (pn->pn_xflags & PNX_XMLROOT) {
6768 if (pn->pn_count == 0) {
6769 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6770 atom = cx->runtime->atomState.emptyAtom;
6771 ale = cg->atomList.add(cg->compiler, atom);
6772 if (!ale)
6773 return JS_FALSE;
6774 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6776 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6777 return JS_FALSE;
6779 #ifdef DEBUG
6780 else
6781 JS_ASSERT(pn->pn_count != 0);
6782 #endif
6783 break;
6785 case TOK_XMLPTAGC:
6786 if (pn->pn_op == JSOP_XMLOBJECT) {
6787 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6788 break;
6790 /* FALL THROUGH */
6792 case TOK_XMLSTAGO:
6793 case TOK_XMLETAGO:
6795 uint32 i;
6797 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6798 return JS_FALSE;
6800 ale = cg->atomList.add(cg->compiler,
6801 (pn->pn_type == TOK_XMLETAGO)
6802 ? cx->runtime->atomState.etagoAtom
6803 : cx->runtime->atomState.stagoAtom);
6804 if (!ale)
6805 return JS_FALSE;
6806 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6808 JS_ASSERT(pn->pn_count != 0);
6809 pn2 = pn->pn_head;
6810 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6811 return JS_FALSE;
6812 if (!js_EmitTree(cx, cg, pn2))
6813 return JS_FALSE;
6814 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6815 return JS_FALSE;
6817 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6818 if (pn2->pn_type == TOK_LC &&
6819 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6820 return JS_FALSE;
6822 if (!js_EmitTree(cx, cg, pn2))
6823 return JS_FALSE;
6824 if ((i & 1) && pn2->pn_type == TOK_LC) {
6825 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6826 return JS_FALSE;
6828 if (js_Emit1(cx, cg,
6829 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6830 return JS_FALSE;
6834 ale = cg->atomList.add(cg->compiler,
6835 (pn->pn_type == TOK_XMLPTAGC)
6836 ? cx->runtime->atomState.ptagcAtom
6837 : cx->runtime->atomState.tagcAtom);
6838 if (!ale)
6839 return JS_FALSE;
6840 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6841 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6842 return JS_FALSE;
6844 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
6845 return JS_FALSE;
6846 break;
6849 case TOK_XMLNAME:
6850 if (pn->pn_arity == PN_LIST) {
6851 JS_ASSERT(pn->pn_count != 0);
6852 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6853 if (pn2->pn_type == TOK_LC &&
6854 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6855 return JS_FALSE;
6857 if (!js_EmitTree(cx, cg, pn2))
6858 return JS_FALSE;
6859 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6860 return JS_FALSE;
6862 } else {
6863 JS_ASSERT(pn->pn_arity == PN_NULLARY);
6864 ok = (pn->pn_op == JSOP_OBJECT)
6865 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
6866 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
6868 break;
6870 case TOK_XMLPI:
6871 ale = cg->atomList.add(cg->compiler, pn->pn_atom2);
6872 if (!ale)
6873 return JS_FALSE;
6874 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6875 return JS_FALSE;
6876 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6877 return JS_FALSE;
6878 break;
6879 #endif /* JS_HAS_XML_SUPPORT */
6881 default:
6882 JS_ASSERT(0);
6885 if (ok && --cg->emitLevel == 0) {
6886 if (cg->spanDeps)
6887 ok = OptimizeSpanDeps(cx, cg);
6888 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
6889 return JS_FALSE;
6892 return ok;
6896 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6897 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6899 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6900 {"null", 0, 0, 0},
6901 {"if", 0, 0, 0},
6902 {"if-else", 2, 0, 1},
6903 {"for", 3, 1, 1},
6904 {"while", 1, 0, 1},
6905 {"continue", 0, 0, 0},
6906 {"decl", 1, 1, 1},
6907 {"pcdelta", 1, 0, 1},
6908 {"assignop", 0, 0, 0},
6909 {"cond", 1, 0, 1},
6910 {"brace", 1, 0, 1},
6911 {"hidden", 0, 0, 0},
6912 {"pcbase", 1, 0, -1},
6913 {"label", 1, 0, 0},
6914 {"labelbrace", 1, 0, 0},
6915 {"endbrace", 0, 0, 0},
6916 {"break2label", 1, 0, 0},
6917 {"cont2label", 1, 0, 0},
6918 {"switch", 2, 0, 1},
6919 {"funcdef", 1, 0, 0},
6920 {"catch", 1, 0, 1},
6921 {"extended", -1, 0, 0},
6922 {"newline", 0, 0, 0},
6923 {"setline", 1, 0, 0},
6924 {"xdelta", 0, 0, 0},
6927 static intN
6928 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6930 intN index;
6931 JSArenaPool *pool;
6932 size_t size;
6934 index = CG_NOTE_COUNT(cg);
6935 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6936 pool = cg->notePool;
6937 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6938 if (!CG_NOTES(cg)) {
6939 /* Allocate the first note array lazily; leave noteMask alone. */
6940 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6941 } else {
6942 /* Grow by doubling note array size; update noteMask on success. */
6943 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6944 if (CG_NOTES(cg))
6945 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6947 if (!CG_NOTES(cg)) {
6948 js_ReportOutOfScriptQuota(cx);
6949 return -1;
6953 CG_NOTE_COUNT(cg) = index + 1;
6954 return index;
6957 intN
6958 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
6960 intN index, n;
6961 jssrcnote *sn;
6962 ptrdiff_t offset, delta, xdelta;
6965 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6966 * incrementing CG_NOTE_COUNT(cg).
6968 index = AllocSrcNote(cx, cg);
6969 if (index < 0)
6970 return -1;
6971 sn = &CG_NOTES(cg)[index];
6974 * Compute delta from the last annotated bytecode's offset. If it's too
6975 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6977 offset = CG_OFFSET(cg);
6978 delta = offset - CG_LAST_NOTE_OFFSET(cg);
6979 CG_LAST_NOTE_OFFSET(cg) = offset;
6980 if (delta >= SN_DELTA_LIMIT) {
6981 do {
6982 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
6983 SN_MAKE_XDELTA(sn, xdelta);
6984 delta -= xdelta;
6985 index = AllocSrcNote(cx, cg);
6986 if (index < 0)
6987 return -1;
6988 sn = &CG_NOTES(cg)[index];
6989 } while (delta >= SN_DELTA_LIMIT);
6993 * Initialize type and delta, then allocate the minimum number of notes
6994 * needed for type's arity. Usually, we won't need more, but if an offset
6995 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6997 SN_MAKE_NOTE(sn, type, delta);
6998 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
6999 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
7000 return -1;
7002 return index;
7005 intN
7006 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7007 ptrdiff_t offset)
7009 intN index;
7011 index = js_NewSrcNote(cx, cg, type);
7012 if (index >= 0) {
7013 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
7014 return -1;
7016 return index;
7019 intN
7020 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7021 ptrdiff_t offset1, ptrdiff_t offset2)
7023 intN index;
7025 index = js_NewSrcNote(cx, cg, type);
7026 if (index >= 0) {
7027 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
7028 return -1;
7029 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
7030 return -1;
7032 return index;
7035 static JSBool
7036 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
7038 JSArenaPool *pool;
7039 size_t size;
7041 /* Grow by doubling note array size; update noteMask on success. */
7042 pool = cg->notePool;
7043 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7044 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
7045 if (!CG_NOTES(cg)) {
7046 js_ReportOutOfScriptQuota(cx);
7047 return JS_FALSE;
7049 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7050 return JS_TRUE;
7053 jssrcnote *
7054 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
7055 ptrdiff_t delta)
7057 ptrdiff_t base, limit, newdelta, diff;
7058 intN index;
7061 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7062 * main script note deltas, and only by a small positive amount.
7064 JS_ASSERT(cg->current == &cg->main);
7065 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7067 base = SN_DELTA(sn);
7068 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7069 newdelta = base + delta;
7070 if (newdelta < limit) {
7071 SN_SET_DELTA(sn, newdelta);
7072 } else {
7073 index = sn - cg->main.notes;
7074 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
7075 if (!GrowSrcNotes(cx, cg))
7076 return NULL;
7077 sn = cg->main.notes + index;
7079 diff = cg->main.noteCount - index;
7080 cg->main.noteCount++;
7081 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
7082 SN_MAKE_XDELTA(sn, delta);
7083 sn++;
7085 return sn;
7088 JS_FRIEND_API(uintN)
7089 js_SrcNoteLength(jssrcnote *sn)
7091 uintN arity;
7092 jssrcnote *base;
7094 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7095 for (base = sn++; arity; sn++, arity--) {
7096 if (*sn & SN_3BYTE_OFFSET_FLAG)
7097 sn += 2;
7099 return sn - base;
7102 JS_FRIEND_API(ptrdiff_t)
7103 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7105 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7106 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7107 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7108 for (sn++; which; sn++, which--) {
7109 if (*sn & SN_3BYTE_OFFSET_FLAG)
7110 sn += 2;
7112 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7113 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7114 | (sn[1] << 8)
7115 | sn[2]);
7117 return (ptrdiff_t)*sn;
7120 JSBool
7121 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7122 uintN which, ptrdiff_t offset)
7124 jssrcnote *sn;
7125 ptrdiff_t diff;
7127 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7128 ReportStatementTooLarge(cx, cg);
7129 return JS_FALSE;
7132 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7133 sn = &CG_NOTES(cg)[index];
7134 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7135 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7136 for (sn++; which; sn++, which--) {
7137 if (*sn & SN_3BYTE_OFFSET_FLAG)
7138 sn += 2;
7141 /* See if the new offset requires three bytes. */
7142 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7143 /* Maybe this offset was already set to a three-byte value. */
7144 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7145 /* Losing, need to insert another two bytes for this offset. */
7146 index = sn - CG_NOTES(cg);
7149 * Simultaneously test to see if the source note array must grow to
7150 * accommodate either the first or second byte of additional storage
7151 * required by this 3-byte offset.
7153 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7154 if (!GrowSrcNotes(cx, cg))
7155 return JS_FALSE;
7156 sn = CG_NOTES(cg) + index;
7158 CG_NOTE_COUNT(cg) += 2;
7160 diff = CG_NOTE_COUNT(cg) - (index + 3);
7161 JS_ASSERT(diff >= 0);
7162 if (diff > 0)
7163 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7165 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7166 *sn++ = (jssrcnote)(offset >> 8);
7168 *sn = (jssrcnote)offset;
7169 return JS_TRUE;
7172 #ifdef DEBUG_notme
7173 #define DEBUG_srcnotesize
7174 #endif
7176 #ifdef DEBUG_srcnotesize
7177 #define NBINS 10
7178 static uint32 hist[NBINS];
7180 void DumpSrcNoteSizeHist()
7182 static FILE *fp;
7183 int i, n;
7185 if (!fp) {
7186 fp = fopen("/tmp/srcnotes.hist", "w");
7187 if (!fp)
7188 return;
7189 setvbuf(fp, NULL, _IONBF, 0);
7191 fprintf(fp, "SrcNote size histogram:\n");
7192 for (i = 0; i < NBINS; i++) {
7193 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7194 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7195 fputc('*', fp);
7196 fputc('\n', fp);
7198 fputc('\n', fp);
7200 #endif
7203 * Fill in the storage at notes with prolog and main srcnotes; the space at
7204 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7205 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7206 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7208 JSBool
7209 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7211 uintN prologCount, mainCount, totalCount;
7212 ptrdiff_t offset, delta;
7213 jssrcnote *sn;
7215 JS_ASSERT(cg->current == &cg->main);
7217 prologCount = cg->prolog.noteCount;
7218 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7219 CG_SWITCH_TO_PROLOG(cg);
7220 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7221 return JS_FALSE;
7222 prologCount = cg->prolog.noteCount;
7223 CG_SWITCH_TO_MAIN(cg);
7224 } else {
7226 * Either no prolog srcnotes, or no line number change over prolog.
7227 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7228 * of the first main note, by adding to its delta and possibly even
7229 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7230 * that came at and after the last annotated bytecode.
7232 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7233 JS_ASSERT(offset >= 0);
7234 if (offset > 0 && cg->main.noteCount != 0) {
7235 /* NB: Use as much of the first main note's delta as we can. */
7236 sn = cg->main.notes;
7237 delta = SN_IS_XDELTA(sn)
7238 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7239 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7240 if (offset < delta)
7241 delta = offset;
7242 for (;;) {
7243 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7244 return JS_FALSE;
7245 offset -= delta;
7246 if (offset == 0)
7247 break;
7248 delta = JS_MIN(offset, SN_XDELTA_MASK);
7249 sn = cg->main.notes;
7254 mainCount = cg->main.noteCount;
7255 totalCount = prologCount + mainCount;
7256 if (prologCount)
7257 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7258 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7259 SN_MAKE_TERMINATOR(&notes[totalCount]);
7261 #ifdef DEBUG_notme
7262 { int bin = JS_CeilingLog2(totalCount);
7263 if (bin >= NBINS)
7264 bin = NBINS - 1;
7265 ++hist[bin];
7267 #endif
7268 return JS_TRUE;
7271 static JSBool
7272 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7273 uintN stackDepth, size_t start, size_t end)
7275 JSTryNode *tryNode;
7277 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7278 JS_ASSERT(start <= end);
7279 JS_ASSERT((size_t)(uint32)start == start);
7280 JS_ASSERT((size_t)(uint32)end == end);
7282 JS_ARENA_ALLOCATE_TYPE(tryNode, JSTryNode, &cx->tempPool);
7283 if (!tryNode) {
7284 js_ReportOutOfScriptQuota(cx);
7285 return JS_FALSE;
7288 tryNode->note.kind = kind;
7289 tryNode->note.stackDepth = (uint16)stackDepth;
7290 tryNode->note.start = (uint32)start;
7291 tryNode->note.length = (uint32)(end - start);
7292 tryNode->prev = cg->lastTryNode;
7293 cg->lastTryNode = tryNode;
7294 cg->ntrynotes++;
7295 return JS_TRUE;
7298 void
7299 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7301 JSTryNode *tryNode;
7302 JSTryNote *tn;
7304 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7305 tn = array->vector + array->length;
7306 tryNode = cg->lastTryNode;
7307 do {
7308 *--tn = tryNode->note;
7309 } while ((tryNode = tryNode->prev) != NULL);
7310 JS_ASSERT(tn == array->vector);
7314 * Find the index of the given object for code generator.
7316 * Since the emitter refers to each parsed object only once, for the index we
7317 * use the number of already indexes objects. We also add the object to a list
7318 * to convert the list to a fixed-size array when we complete code generation,
7319 * see JSCGObjectList::finish below.
7321 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7322 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7323 * directly used to store and fetch a reference to a cloned RegExp object that
7324 * shares the same JSRegExp private data created for the object literal in
7325 * objbox. We need a cloned object to hold lastIndex and other direct properties
7326 * that should not be shared among threads sharing a precompiled function or
7327 * script.
7329 * If the code being compiled is function code, allocate a reserved slot in
7330 * the cloned function object that shares its precompiled script with other
7331 * cloned function objects and with the compiler-created clone-parent. There
7332 * are nregexps = script->regexps()->length such reserved slots in each
7333 * function object cloned from fun->object. NB: during compilation, a funobj
7334 * slots element must never be allocated, because js_AllocSlot could hand out
7335 * one of the slots that should be given to a regexp clone.
7337 * If the code being compiled is global code, the cloned regexp are stored in
7338 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7339 * fp->nvars to ngvars + nregexps.
7341 * The slots initially contain undefined or null. We populate them lazily when
7342 * JSOP_REGEXP is executed for the first time.
7344 * Why clone regexp objects? ECMA specifies that when a regular expression
7345 * literal is scanned, a RegExp object is created. In the spec, compilation
7346 * and execution happen indivisibly, but in this implementation and many of
7347 * its embeddings, code is precompiled early and re-executed in multiple
7348 * threads, or using multiple global objects, or both, for efficiency.
7350 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7351 * objects, which makes for collisions on the lastIndex property (especially
7352 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
7353 * __parent__ refer to the pre-compilation prototype and global objects, a
7354 * pigeon-hole problem for instanceof tests.
7356 uintN
7357 JSCGObjectList::index(JSObjectBox *objbox)
7359 JS_ASSERT(!objbox->emitLink);
7360 objbox->emitLink = lastbox;
7361 lastbox = objbox;
7362 return length++;
7365 void
7366 JSCGObjectList::finish(JSObjectArray *array)
7368 JSObject **cursor;
7369 JSObjectBox *objbox;
7371 JS_ASSERT(length <= INDEX_LIMIT);
7372 JS_ASSERT(length == array->length);
7374 cursor = array->vector + array->length;
7375 objbox = lastbox;
7376 do {
7377 --cursor;
7378 JS_ASSERT(!*cursor);
7379 *cursor = objbox->object;
7380 } while ((objbox = objbox->emitLink) != NULL);
7381 JS_ASSERT(cursor == array->vector);