Bug 506786 - JSScope::trace method. r=brendan.
[mozilla-central.git] / js / src / jsemit.cpp
blob5105d8bba4986851d78e43917919799566320e68
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
82 static JSBool
83 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
84 uintN stackDepth, size_t start, size_t end);
86 JSCodeGenerator::JSCodeGenerator(JSCompiler *jsc,
87 JSArenaPool *cpool, JSArenaPool *npool,
88 uintN lineno)
89 : JSTreeContext(jsc),
90 codePool(cpool), notePool(npool),
91 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL),
94 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
96 arrayCompDepth(0),
97 emitLevel(0)
99 flags = TCF_COMPILING;
100 memset(&prolog, 0, sizeof prolog);
101 memset(&main, 0, sizeof main);
102 current = &main;
103 firstLine = prolog.currentLine = main.currentLine = lineno;
104 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
105 memset(&upvarMap, 0, sizeof upvarMap);
108 JSCodeGenerator::~JSCodeGenerator()
110 JS_ARENA_RELEASE(codePool, codeMark);
111 JS_ARENA_RELEASE(notePool, noteMark);
113 /* NB: non-null only after OOM. */
114 if (spanDeps)
115 compiler->context->free(spanDeps);
117 if (upvarMap.vector)
118 compiler->context->free(upvarMap.vector);
121 static ptrdiff_t
122 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
124 jsbytecode *base, *limit, *next;
125 ptrdiff_t offset, length;
126 size_t incr, size;
128 base = CG_BASE(cg);
129 next = CG_NEXT(cg);
130 limit = CG_LIMIT(cg);
131 offset = next - base;
132 if (next + delta > limit) {
133 length = offset + delta;
134 length = (length <= BYTECODE_CHUNK)
135 ? BYTECODE_CHUNK
136 : JS_BIT(JS_CeilingLog2(length));
137 incr = BYTECODE_SIZE(length);
138 if (!base) {
139 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
140 } else {
141 size = BYTECODE_SIZE(limit - base);
142 incr -= size;
143 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
145 if (!base) {
146 js_ReportOutOfScriptQuota(cx);
147 return -1;
149 CG_BASE(cg) = base;
150 CG_LIMIT(cg) = base + length;
151 CG_NEXT(cg) = base + offset;
153 return offset;
156 static void
157 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
159 jsbytecode *pc;
160 JSOp op;
161 const JSCodeSpec *cs;
162 uintN extra, depth, nuses;
163 intN ndefs;
165 pc = CG_CODE(cg, target);
166 op = (JSOp) *pc;
167 cs = &js_CodeSpec[op];
168 #ifdef JS_TRACER
169 extern uint8 js_opcode2extra[];
170 extra = js_opcode2extra[op];
171 #else
172 extra = 0;
173 #endif
174 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
175 depth = (uintN) cg->stackDepth +
176 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
177 extra;
178 if (depth > cg->maxStackDepth)
179 cg->maxStackDepth = depth;
182 nuses = js_GetStackUses(cs, op, pc);
183 cg->stackDepth -= nuses;
184 JS_ASSERT(cg->stackDepth >= 0);
185 if (cg->stackDepth < 0) {
186 char numBuf[12];
187 JSTokenStream *ts;
189 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
190 ts = &cg->compiler->tokenStream;
191 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
192 js_GetErrorMessage, NULL,
193 JSMSG_STACK_UNDERFLOW,
194 ts->filename ? ts->filename : "stdin",
195 numBuf);
197 ndefs = cs->ndefs;
198 if (ndefs < 0) {
199 JSObject *blockObj;
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op == JSOP_ENTERBLOCK);
203 JS_ASSERT(nuses == 0);
204 blockObj = cg->objectList.lastbox->object;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
208 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
209 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
211 cg->stackDepth += ndefs;
212 if ((uintN)cg->stackDepth > cg->maxStackDepth)
213 cg->maxStackDepth = cg->stackDepth;
216 ptrdiff_t
217 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
219 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
221 if (offset >= 0) {
222 *CG_NEXT(cg)++ = (jsbytecode)op;
223 UpdateDepth(cx, cg, offset);
225 return offset;
228 ptrdiff_t
229 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
231 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
233 if (offset >= 0) {
234 jsbytecode *next = CG_NEXT(cg);
235 next[0] = (jsbytecode)op;
236 next[1] = op1;
237 CG_NEXT(cg) = next + 2;
238 UpdateDepth(cx, cg, offset);
240 return offset;
243 ptrdiff_t
244 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
245 jsbytecode op2)
247 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
249 if (offset >= 0) {
250 jsbytecode *next = CG_NEXT(cg);
251 next[0] = (jsbytecode)op;
252 next[1] = op1;
253 next[2] = op2;
254 CG_NEXT(cg) = next + 3;
255 UpdateDepth(cx, cg, offset);
257 return offset;
260 ptrdiff_t
261 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
263 ptrdiff_t length = 1 + (ptrdiff_t)extra;
264 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
266 if (offset >= 0) {
267 jsbytecode *next = CG_NEXT(cg);
268 *next = (jsbytecode)op;
269 memset(next + 1, 0, BYTECODE_SIZE(extra));
270 CG_NEXT(cg) = next + length;
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
276 if (js_CodeSpec[op].nuses >= 0)
277 UpdateDepth(cx, cg, offset);
279 return offset;
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str[] = "with statement";
284 const char js_finally_block_str[] = "finally block";
285 const char js_script_str[] = "script";
287 static const char *statementName[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
293 "block", /* BLOCK */
294 js_with_statement_str, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str, /* FINALLY */
298 js_finally_block_str, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
307 static const char *
308 StatementName(JSCodeGenerator *cg)
310 if (!cg->topStmt)
311 return js_script_str;
312 return statementName[cg->topStmt->type];
315 static void
316 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
318 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
319 StatementName(cg));
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
329 many jump offsets.
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
339 be extended.
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
386 offset.
388 static int
389 BalanceJumpTargets(JSJumpTarget **jtp)
391 JSJumpTarget *jt, *jt2, *root;
392 int dir, otherDir, heightChanged;
393 JSBool doubleRotate;
395 jt = *jtp;
396 JS_ASSERT(jt->balance != 0);
398 if (jt->balance < -1) {
399 dir = JT_RIGHT;
400 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
401 } else if (jt->balance > 1) {
402 dir = JT_LEFT;
403 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
404 } else {
405 return 0;
408 otherDir = JT_OTHER_DIR(dir);
409 if (doubleRotate) {
410 jt2 = jt->kids[otherDir];
411 *jtp = root = jt2->kids[dir];
413 jt->kids[otherDir] = root->kids[dir];
414 root->kids[dir] = jt;
416 jt2->kids[dir] = root->kids[otherDir];
417 root->kids[otherDir] = jt2;
419 heightChanged = 1;
420 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
421 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
422 root->balance = 0;
423 } else {
424 *jtp = root = jt->kids[otherDir];
425 jt->kids[otherDir] = root->kids[dir];
426 root->kids[dir] = jt;
428 heightChanged = (root->balance != 0);
429 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
432 return heightChanged;
435 typedef struct AddJumpTargetArgs {
436 JSContext *cx;
437 JSCodeGenerator *cg;
438 ptrdiff_t offset;
439 JSJumpTarget *node;
440 } AddJumpTargetArgs;
442 static int
443 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
445 JSJumpTarget *jt;
446 int balanceDelta;
448 jt = *jtp;
449 if (!jt) {
450 JSCodeGenerator *cg = args->cg;
452 jt = cg->jtFreeList;
453 if (jt) {
454 cg->jtFreeList = jt->kids[JT_LEFT];
455 } else {
456 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
457 sizeof *jt);
458 if (!jt) {
459 js_ReportOutOfScriptQuota(args->cx);
460 return 0;
463 jt->offset = args->offset;
464 jt->balance = 0;
465 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
466 cg->numJumpTargets++;
467 args->node = jt;
468 *jtp = jt;
469 return 1;
472 if (jt->offset == args->offset) {
473 args->node = jt;
474 return 0;
477 if (args->offset < jt->offset)
478 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
479 else
480 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
481 if (!args->node)
482 return 0;
484 jt->balance += balanceDelta;
485 return (balanceDelta && jt->balance)
486 ? 1 - BalanceJumpTargets(jtp)
487 : 0;
490 #ifdef DEBUG_brendan
491 static int AVLCheck(JSJumpTarget *jt)
493 int lh, rh;
495 if (!jt) return 0;
496 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
497 lh = AVLCheck(jt->kids[JT_LEFT]);
498 rh = AVLCheck(jt->kids[JT_RIGHT]);
499 JS_ASSERT(jt->balance == rh - lh);
500 return 1 + JS_MAX(lh, rh);
502 #endif
504 static JSBool
505 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
506 ptrdiff_t off)
508 AddJumpTargetArgs args;
510 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
511 ReportStatementTooLarge(cx, cg);
512 return JS_FALSE;
515 args.cx = cx;
516 args.cg = cg;
517 args.offset = sd->top + off;
518 args.node = NULL;
519 AddJumpTarget(&args, &cg->jumpTargets);
520 if (!args.node)
521 return JS_FALSE;
523 #ifdef DEBUG_brendan
524 AVLCheck(cg->jumpTargets);
525 #endif
527 SD_SET_TARGET(sd, args.node);
528 return JS_TRUE;
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
535 static JSBool
536 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
537 ptrdiff_t off)
539 uintN index;
540 JSSpanDep *sdbase, *sd;
541 size_t size;
543 index = cg->numSpanDeps;
544 if (index + 1 == 0) {
545 ReportStatementTooLarge(cx, cg);
546 return JS_FALSE;
549 if ((index & (index - 1)) == 0 &&
550 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
551 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
552 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
553 if (!sdbase)
554 return JS_FALSE;
555 cg->spanDeps = sdbase;
558 cg->numSpanDeps = index + 1;
559 sd = cg->spanDeps + index;
560 sd->top = pc - CG_BASE(cg);
561 sd->offset = sd->before = pc2 - CG_BASE(cg);
563 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
565 if (off != 0) {
566 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
567 if (off > BPDELTA_MAX) {
568 ReportStatementTooLarge(cx, cg);
569 return JS_FALSE;
572 SD_SET_BPDELTA(sd, off);
573 } else if (off == 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd, 0);
576 } else {
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx, cg, sd, off))
579 return JS_FALSE;
582 if (index > SPANDEP_INDEX_MAX)
583 index = SPANDEP_INDEX_HUGE;
584 SET_SPANDEP_INDEX(pc2, index);
585 return JS_TRUE;
588 static jsbytecode *
589 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
591 JSOp op;
592 jsbytecode *pc2;
593 ptrdiff_t off;
594 jsint low, high;
595 uintN njumps, indexlen;
597 op = (JSOp) *pc;
598 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
599 pc2 = pc;
600 off = GET_JUMP_OFFSET(pc2);
601 if (!AddSpanDep(cx, cg, pc, pc2, off))
602 return NULL;
603 pc2 += JUMP_OFFSET_LEN;
604 if (op == JSOP_TABLESWITCH) {
605 low = GET_JUMP_OFFSET(pc2);
606 pc2 += JUMP_OFFSET_LEN;
607 high = GET_JUMP_OFFSET(pc2);
608 pc2 += JUMP_OFFSET_LEN;
609 njumps = (uintN) (high - low + 1);
610 indexlen = 0;
611 } else {
612 njumps = GET_UINT16(pc2);
613 pc2 += UINT16_LEN;
614 indexlen = INDEX_LEN;
616 while (njumps) {
617 --njumps;
618 pc2 += indexlen;
619 off = GET_JUMP_OFFSET(pc2);
620 if (!AddSpanDep(cx, cg, pc, pc2, off))
621 return NULL;
622 pc2 += JUMP_OFFSET_LEN;
624 return 1 + pc2;
627 static JSBool
628 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
630 jsbytecode *pc, *end;
631 JSOp op;
632 const JSCodeSpec *cs;
633 ptrdiff_t off;
635 pc = CG_BASE(cg) + cg->spanDepTodo;
636 end = CG_NEXT(cg);
637 while (pc != end) {
638 JS_ASSERT(pc < end);
639 op = (JSOp)*pc;
640 cs = &js_CodeSpec[op];
642 switch (JOF_TYPE(cs->format)) {
643 case JOF_TABLESWITCH:
644 case JOF_LOOKUPSWITCH:
645 pc = AddSwitchSpanDeps(cx, cg, pc);
646 if (!pc)
647 return JS_FALSE;
648 break;
650 case JOF_JUMP:
651 off = GET_JUMP_OFFSET(pc);
652 if (!AddSpanDep(cx, cg, pc, pc, off))
653 return JS_FALSE;
654 /* FALL THROUGH */
655 default:
656 pc += cs->length;
657 break;
661 return JS_TRUE;
664 static JSSpanDep *
665 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
667 uintN index;
668 ptrdiff_t offset;
669 int lo, hi, mid;
670 JSSpanDep *sd;
672 index = GET_SPANDEP_INDEX(pc);
673 if (index != SPANDEP_INDEX_HUGE)
674 return cg->spanDeps + index;
676 offset = pc - CG_BASE(cg);
677 lo = 0;
678 hi = cg->numSpanDeps - 1;
679 while (lo <= hi) {
680 mid = (lo + hi) / 2;
681 sd = cg->spanDeps + mid;
682 if (sd->before == offset)
683 return sd;
684 if (sd->before < offset)
685 lo = mid + 1;
686 else
687 hi = mid - 1;
690 JS_ASSERT(0);
691 return NULL;
694 static JSBool
695 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
696 ptrdiff_t delta)
698 JSSpanDep *sd;
700 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
701 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
702 SET_JUMP_OFFSET(pc, delta);
703 return JS_TRUE;
706 if (delta > BPDELTA_MAX) {
707 ReportStatementTooLarge(cx, cg);
708 return JS_FALSE;
711 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
712 return JS_FALSE;
714 sd = GetSpanDep(cg, pc);
715 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
716 SD_SET_BPDELTA(sd, delta);
717 return JS_TRUE;
720 static void
721 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
723 if (jt->offset > pivot) {
724 jt->offset += delta;
725 if (jt->kids[JT_LEFT])
726 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
728 if (jt->kids[JT_RIGHT])
729 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
732 static JSSpanDep *
733 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
734 JSSpanDep *guard)
736 int num, hi, mid;
737 JSSpanDep *sdbase, *sd;
739 num = cg->numSpanDeps;
740 JS_ASSERT(num > 0);
741 hi = num - 1;
742 sdbase = cg->spanDeps;
743 while (lo <= hi) {
744 mid = (lo + hi) / 2;
745 sd = sdbase + mid;
746 if (sd->before == offset)
747 return sd;
748 if (sd->before < offset)
749 lo = mid + 1;
750 else
751 hi = mid - 1;
753 if (lo == num)
754 return guard;
755 sd = sdbase + lo;
756 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
757 return sd;
760 static void
761 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
763 if (jt->kids[JT_LEFT])
764 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
765 if (jt->kids[JT_RIGHT])
766 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
767 jt->kids[JT_LEFT] = cg->jtFreeList;
768 cg->jtFreeList = jt;
771 static JSBool
772 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
774 jsbytecode *pc, *oldpc, *base, *limit, *next;
775 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
776 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
777 JSBool done;
778 JSOp op;
779 uint32 type;
780 size_t size, incr;
781 jssrcnote *sn, *snlimit;
782 JSSrcNoteSpec *spec;
783 uintN i, n, noteIndex;
784 JSTryNode *tryNode;
785 #ifdef DEBUG_brendan
786 int passes = 0;
787 #endif
789 base = CG_BASE(cg);
790 sdbase = cg->spanDeps;
791 sdlimit = sdbase + cg->numSpanDeps;
792 offset = CG_OFFSET(cg);
793 growth = 0;
795 do {
796 done = JS_TRUE;
797 delta = 0;
798 top = pivot = -1;
799 sdtop = NULL;
800 pc = NULL;
801 op = JSOP_NOP;
802 type = 0;
803 #ifdef DEBUG_brendan
804 passes++;
805 #endif
807 for (sd = sdbase; sd < sdlimit; sd++) {
808 JS_ASSERT(JT_HAS_TAG(sd->target));
809 sd->offset += delta;
811 if (sd->top != top) {
812 sdtop = sd;
813 top = sd->top;
814 JS_ASSERT(top == sd->before);
815 pivot = sd->offset;
816 pc = base + top;
817 op = (JSOp) *pc;
818 type = JOF_OPTYPE(op);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
826 continue;
829 JS_ASSERT(type == JOF_JUMP ||
830 type == JOF_TABLESWITCH ||
831 type == JOF_LOOKUPSWITCH);
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
835 span = SD_SPAN(sd, pivot);
836 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
837 ptrdiff_t deltaFromTop = 0;
839 done = JS_FALSE;
841 switch (op) {
842 case JSOP_GOTO: op = JSOP_GOTOX; break;
843 case JSOP_IFEQ: op = JSOP_IFEQX; break;
844 case JSOP_IFNE: op = JSOP_IFNEX; break;
845 case JSOP_OR: op = JSOP_ORX; break;
846 case JSOP_AND: op = JSOP_ANDX; break;
847 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
848 case JSOP_CASE: op = JSOP_CASEX; break;
849 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
850 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
851 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
852 default:
853 ReportStatementTooLarge(cx, cg);
854 return JS_FALSE;
856 *pc = (jsbytecode) op;
858 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
859 if (sd2 <= sd) {
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
869 * iterate once only.
871 sd2->offset += deltaFromTop;
872 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
873 } else {
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
879 sd2->offset += delta;
882 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
883 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
884 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
886 sd = sd2 - 1;
891 growth += delta;
892 } while (!done);
894 if (growth) {
895 #ifdef DEBUG_brendan
896 JSTokenStream *ts = &cg->compiler->tokenStream;
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts->filename ? ts->filename : "stdin", cg->firstLine,
900 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
901 passes, offset + growth, offset, growth);
902 #endif
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
908 limit = CG_LIMIT(cg);
909 length = offset + growth;
910 next = base + length;
911 if (next > limit) {
912 JS_ASSERT(length > BYTECODE_CHUNK);
913 size = BYTECODE_SIZE(limit - base);
914 incr = BYTECODE_SIZE(length) - size;
915 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
916 if (!base) {
917 js_ReportOutOfScriptQuota(cx);
918 return JS_FALSE;
920 CG_BASE(cg) = base;
921 CG_LIMIT(cg) = next = base + length;
923 CG_NEXT(cg) = next;
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
931 guard.top = -1;
932 guard.offset = offset + growth;
933 guard.before = offset;
934 guard.target = NULL;
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
945 JS_ASSERT(sd == sdlimit);
946 top = -1;
947 while (--sd >= sdbase) {
948 if (sd->top != top) {
949 top = sd->top;
950 op = (JSOp) base[top];
951 type = JOF_OPTYPE(op);
953 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
954 continue;
955 sd2++;
956 pivot = sd2->offset;
957 JS_ASSERT(top == sd2->before);
960 oldpc = base + sd->before;
961 span = SD_SPAN(sd, pivot);
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
974 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
975 SET_JUMP_OFFSET(oldpc, span);
976 continue;
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
986 pc = base + sd->offset;
987 delta = offset - sd->before;
988 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
996 offset = sd->before + 1;
997 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
998 if (size) {
999 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1000 oldpc + 1 + JUMP_OFFSET_LEN,
1001 size);
1004 SET_JUMPX_OFFSET(pc, span);
1007 if (growth) {
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1016 offset = growth = 0;
1017 sd = sdbase;
1018 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1019 sn < snlimit;
1020 sn = SN_NEXT(sn)) {
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1026 offset += SN_DELTA(sn);
1027 while (sd < sdlimit && sd->before < offset) {
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1033 sd2 = sd + 1;
1034 if (sd2 == sdlimit)
1035 sd2 = &guard;
1036 delta = sd2->offset - (sd2->before + growth);
1037 if (delta > 0) {
1038 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1039 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1040 if (!sn)
1041 return JS_FALSE;
1042 snlimit = cg->main.notes + cg->main.noteCount;
1043 growth += delta;
1045 sd++;
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1054 * instructions!
1056 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1057 if (spec->isSpanDep) {
1058 pivot = offset + spec->offsetBias;
1059 n = spec->arity;
1060 for (i = 0; i < n; i++) {
1061 span = js_GetSrcNoteOffset(sn, i);
1062 if (span == 0)
1063 continue;
1064 target = pivot + span * spec->isSpanDep;
1065 sd2 = FindNearestSpanDep(cg, target,
1066 (target >= pivot)
1067 ? sd - sdbase
1068 : 0,
1069 &guard);
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1077 target += sd2->offset - sd2->before;
1078 span = target - (pivot + growth);
1079 span *= spec->isSpanDep;
1080 noteIndex = sn - cg->main.notes;
1081 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1082 return JS_FALSE;
1083 sn = cg->main.notes + noteIndex;
1084 snlimit = cg->main.notes + cg->main.noteCount;
1088 cg->main.lastNoteOffset += growth;
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1094 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1098 * be returned.
1100 offset = tryNode->note.start;
1101 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1102 delta = sd->offset - sd->before;
1103 tryNode->note.start = offset + delta;
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1109 length = tryNode->note.length;
1110 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1111 if (sd2 != sd) {
1112 tryNode->note.length =
1113 length + sd2->offset - sd2->before - delta;
1118 #ifdef DEBUG_brendan
1120 uintN bigspans = 0;
1121 top = -1;
1122 for (sd = sdbase; sd < sdlimit; sd++) {
1123 offset = sd->offset;
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd->top != top) {
1127 JS_ASSERT(top == -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1129 bigspans != 0);
1130 bigspans = 0;
1131 top = sd->top;
1132 JS_ASSERT(top == sd->before);
1133 op = (JSOp) base[offset];
1134 type = JOF_OPTYPE(op);
1135 JS_ASSERT(type == JOF_JUMP ||
1136 type == JOF_JUMPX ||
1137 type == JOF_TABLESWITCH ||
1138 type == JOF_TABLESWITCHX ||
1139 type == JOF_LOOKUPSWITCH ||
1140 type == JOF_LOOKUPSWITCHX);
1141 pivot = offset;
1144 pc = base + offset;
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1146 span = GET_JUMPX_OFFSET(pc);
1147 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1148 bigspans++;
1149 } else {
1150 JS_ASSERT(type == JOF_TABLESWITCHX ||
1151 type == JOF_LOOKUPSWITCHX);
1153 } else {
1154 span = GET_JUMP_OFFSET(pc);
1156 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1160 #endif
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1167 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1168 cx->free(cg->spanDeps);
1169 cg->spanDeps = NULL;
1170 FreeJumpTargets(cg, cg->jumpTargets);
1171 cg->jumpTargets = NULL;
1172 cg->numSpanDeps = cg->numJumpTargets = 0;
1173 cg->spanDepTodo = CG_OFFSET(cg);
1174 return JS_TRUE;
1177 static ptrdiff_t
1178 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1180 JSBool extend;
1181 ptrdiff_t jmp;
1182 jsbytecode *pc;
1184 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1185 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1186 return -1;
1188 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1189 if (jmp >= 0 && (extend || cg->spanDeps)) {
1190 pc = CG_CODE(cg, jmp);
1191 if (!AddSpanDep(cx, cg, pc, pc, off))
1192 return -1;
1194 return jmp;
1197 static ptrdiff_t
1198 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1200 JSSpanDep *sd;
1201 JSJumpTarget *jt;
1202 ptrdiff_t top;
1204 if (!cg->spanDeps)
1205 return GET_JUMP_OFFSET(pc);
1207 sd = GetSpanDep(cg, pc);
1208 jt = sd->target;
1209 if (!JT_HAS_TAG(jt))
1210 return JT_TO_BPDELTA(jt);
1212 top = sd->top;
1213 while (--sd >= cg->spanDeps && sd->top == top)
1214 continue;
1215 sd++;
1216 return JT_CLR_TAG(jt)->offset - sd->offset;
1219 JSBool
1220 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1221 ptrdiff_t off)
1223 if (!cg->spanDeps) {
1224 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1225 SET_JUMP_OFFSET(pc, off);
1226 return JS_TRUE;
1229 if (!BuildSpanDepTable(cx, cg))
1230 return JS_FALSE;
1233 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1236 bool
1237 JSTreeContext::inStatement(JSStmtType type)
1239 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1240 if (stmt->type == type)
1241 return true;
1243 return false;
1246 void
1247 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1248 ptrdiff_t top)
1250 stmt->type = type;
1251 stmt->flags = 0;
1252 stmt->blockid = tc->blockid();
1253 SET_STATEMENT_TOP(stmt, top);
1254 stmt->label = NULL;
1255 JS_ASSERT(!stmt->blockObj);
1256 stmt->down = tc->topStmt;
1257 tc->topStmt = stmt;
1258 if (STMT_LINKS_SCOPE(stmt)) {
1259 stmt->downScope = tc->topScopeStmt;
1260 tc->topScopeStmt = stmt;
1261 } else {
1262 stmt->downScope = NULL;
1266 void
1267 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1268 ptrdiff_t top)
1270 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1271 stmt->flags |= SIF_SCOPE;
1272 STOBJ_SET_PARENT(blockObj, tc->blockChain);
1273 stmt->downScope = tc->topScopeStmt;
1274 tc->topScopeStmt = stmt;
1275 tc->blockChain = blockObj;
1276 stmt->blockObj = blockObj;
1280 * Emit a backpatch op with offset pointing to the previous jump of this type,
1281 * so that we can walk back up the chain fixing up the op and jump offset.
1283 static ptrdiff_t
1284 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1286 ptrdiff_t offset, delta;
1288 offset = CG_OFFSET(cg);
1289 delta = offset - *lastp;
1290 *lastp = offset;
1291 JS_ASSERT(delta > 0);
1292 return EmitJump(cx, cg, op, delta);
1296 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1297 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1298 * NB: We use cx and cg from our caller's lexical environment, and return
1299 * false on error.
1301 #define EMIT_UINT16_IMM_OP(op, i) \
1302 JS_BEGIN_MACRO \
1303 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1304 return JS_FALSE; \
1305 JS_END_MACRO
1307 static JSBool
1308 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1310 JS_ASSERT(*npops != 0);
1311 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1312 return JS_FALSE;
1313 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1314 *npops = 0;
1315 return JS_TRUE;
1319 * Emit additional bytecode(s) for non-local jumps.
1321 static JSBool
1322 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1324 intN depth, npops;
1325 JSStmtInfo *stmt;
1328 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1329 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1330 * end of a with statement, so we save cg->stackDepth here and restore it
1331 * just before a successful return.
1333 depth = cg->stackDepth;
1334 npops = 0;
1336 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1338 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1339 switch (stmt->type) {
1340 case STMT_FINALLY:
1341 FLUSH_POPS();
1342 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1343 return JS_FALSE;
1344 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1345 return JS_FALSE;
1346 break;
1348 case STMT_WITH:
1349 /* There's a With object on the stack that we need to pop. */
1350 FLUSH_POPS();
1351 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1352 return JS_FALSE;
1353 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1354 return JS_FALSE;
1355 break;
1357 case STMT_FOR_IN_LOOP:
1359 * The iterator and the object being iterated need to be popped.
1361 FLUSH_POPS();
1362 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1363 return JS_FALSE;
1364 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1365 return JS_FALSE;
1366 break;
1368 case STMT_SUBROUTINE:
1370 * There's a [exception or hole, retsub pc-index] pair on the
1371 * stack that we need to pop.
1373 npops += 2;
1374 break;
1376 default:;
1379 if (stmt->flags & SIF_SCOPE) {
1380 uintN i;
1382 /* There is a Block object with locals on the stack to pop. */
1383 FLUSH_POPS();
1384 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1385 return JS_FALSE;
1386 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1387 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1391 FLUSH_POPS();
1392 cg->stackDepth = depth;
1393 return JS_TRUE;
1395 #undef FLUSH_POPS
1398 static ptrdiff_t
1399 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1400 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1402 intN index;
1404 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1405 return -1;
1407 if (label)
1408 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1409 else if (noteType != SRC_NULL)
1410 index = js_NewSrcNote(cx, cg, noteType);
1411 else
1412 index = 0;
1413 if (index < 0)
1414 return -1;
1416 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1419 static JSBool
1420 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1421 jsbytecode *target, jsbytecode op)
1423 jsbytecode *pc, *stop;
1424 ptrdiff_t delta, span;
1426 pc = CG_CODE(cg, last);
1427 stop = CG_CODE(cg, -1);
1428 while (pc != stop) {
1429 delta = GetJumpOffset(cg, pc);
1430 span = target - pc;
1431 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1434 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1435 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1436 * and need to see the JSOP_BACKPATCH* op at *pc).
1438 *pc = op;
1439 pc -= delta;
1441 return JS_TRUE;
1444 void
1445 js_PopStatement(JSTreeContext *tc)
1447 JSStmtInfo *stmt;
1449 stmt = tc->topStmt;
1450 tc->topStmt = stmt->down;
1451 if (STMT_LINKS_SCOPE(stmt)) {
1452 tc->topScopeStmt = stmt->downScope;
1453 if (stmt->flags & SIF_SCOPE) {
1454 tc->blockChain = STOBJ_GET_PARENT(stmt->blockObj);
1455 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1460 JSBool
1461 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1463 JSStmtInfo *stmt;
1465 stmt = cg->topStmt;
1466 if (!STMT_IS_TRYING(stmt) &&
1467 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1468 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1469 JSOP_GOTO))) {
1470 return JS_FALSE;
1472 js_PopStatement(cg);
1473 return JS_TRUE;
1476 JSBool
1477 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1478 JSParseNode *pn)
1480 jsdouble dval;
1481 jsint ival;
1482 JSAtom *valueAtom;
1483 jsval v;
1484 JSAtomListElement *ale;
1486 /* XXX just do numbers for now */
1487 if (pn->pn_type == TOK_NUMBER) {
1488 dval = pn->pn_dval;
1489 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1490 v = INT_TO_JSVAL(ival);
1491 } else {
1493 * We atomize double to root a jsdouble instance that we wrap as
1494 * jsval and store in cg->constList. This works because atoms are
1495 * protected from GC during compilation.
1497 valueAtom = js_AtomizeDouble(cx, dval);
1498 if (!valueAtom)
1499 return JS_FALSE;
1500 v = ATOM_KEY(valueAtom);
1502 ale = cg->constList.add(cg->compiler, atom);
1503 if (!ale)
1504 return JS_FALSE;
1505 ALE_SET_VALUE(ale, v);
1507 return JS_TRUE;
1510 JSStmtInfo *
1511 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1513 JSObject *obj;
1514 JSScope *scope;
1515 JSScopeProperty *sprop;
1517 if (!stmt)
1518 stmt = tc->topScopeStmt;
1519 for (; stmt; stmt = stmt->downScope) {
1520 if (stmt->type == STMT_WITH)
1521 break;
1523 /* Skip "maybe scope" statements that don't contain let bindings. */
1524 if (!(stmt->flags & SIF_SCOPE))
1525 continue;
1527 obj = stmt->blockObj;
1528 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1529 scope = OBJ_SCOPE(obj);
1530 sprop = scope->lookup(ATOM_TO_JSID(atom));
1531 if (sprop) {
1532 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1534 if (slotp) {
1535 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1536 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1537 sprop->shortid;
1539 return stmt;
1543 if (slotp)
1544 *slotp = -1;
1545 return stmt;
1549 * Check if the attributes describe a property holding a compile-time constant
1550 * or a permanent, read-only property without a getter.
1552 #define IS_CONSTANT_PROPERTY(attrs) \
1553 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1554 (JSPROP_READONLY | JSPROP_PERMANENT))
1557 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1558 * name defining a constant.
1560 static JSBool
1561 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1562 jsval *vp)
1564 JSBool ok;
1565 JSStmtInfo *stmt;
1566 JSAtomListElement *ale;
1567 JSObject *obj, *objbox;
1568 JSProperty *prop;
1569 uintN attrs;
1572 * Chase down the cg stack, but only until we reach the outermost cg.
1573 * This enables propagating consts from top-level into switch cases in a
1574 * function compiled along with the top-level script.
1576 *vp = JSVAL_HOLE;
1577 do {
1578 if (cg->flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1579 /* XXX this will need revising if 'const' becomes block-scoped. */
1580 stmt = js_LexicalLookup(cg, atom, NULL);
1581 if (stmt)
1582 return JS_TRUE;
1584 ale = cg->constList.lookup(atom);
1585 if (ale) {
1586 JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1587 *vp = ALE_VALUE(ale);
1588 return JS_TRUE;
1592 * Try looking in the variable object for a direct property that
1593 * is readonly and permanent. We know such a property can't be
1594 * shadowed by another property on obj's prototype chain, or a
1595 * with object or catch variable; nor can prop's value be changed,
1596 * nor can prop be deleted.
1598 if (cg->flags & TCF_IN_FUNCTION) {
1599 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1600 break;
1601 } else {
1602 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
1603 obj = cg->scopeChain;
1604 ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &objbox,
1605 &prop);
1606 if (!ok)
1607 return JS_FALSE;
1608 if (objbox == obj) {
1610 * We're compiling code that will be executed immediately,
1611 * not re-executed against a different scope chain and/or
1612 * variable object. Therefore we can get constant values
1613 * from our variable object here.
1615 ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop,
1616 &attrs);
1617 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1618 ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp);
1619 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1622 if (prop)
1623 OBJ_DROP_PROPERTY(cx, objbox, prop);
1624 if (!ok)
1625 return JS_FALSE;
1626 if (prop)
1627 break;
1630 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1631 return JS_TRUE;
1635 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1636 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1637 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1638 * after the main bytecode sequence.
1640 static JSOp
1641 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1643 uintN indexBase;
1646 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1647 * for big indexes.
1649 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1650 JS_STATIC_ASSERT(INDEX_LIMIT >=
1651 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1653 if (index < JS_BIT(16))
1654 return JSOP_NOP;
1655 indexBase = index >> 16;
1656 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1657 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1658 return JSOP_FALSE;
1659 return JSOP_RESETBASE0;
1662 if (index >= INDEX_LIMIT) {
1663 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1664 JSMSG_TOO_MANY_LITERALS);
1665 return JSOP_FALSE;
1668 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1669 return JSOP_FALSE;
1670 return JSOP_RESETBASE;
1674 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1675 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1676 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1677 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1678 * with single-byte JSOP_INDEXBASE[123] codes.
1680 * Such prefixing currently requires a suffix to restore the "zero segment"
1681 * register setting, but this could be optimized further.
1683 static JSBool
1684 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1686 JSOp bigSuffix;
1688 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1689 if (bigSuffix == JSOP_FALSE)
1690 return JS_FALSE;
1691 EMIT_UINT16_IMM_OP(op, index);
1692 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1696 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1697 * caller's lexical environment, and embedding a false return on error.
1699 #define EMIT_INDEX_OP(op, index) \
1700 JS_BEGIN_MACRO \
1701 if (!EmitIndexOp(cx, op, index, cg)) \
1702 return JS_FALSE; \
1703 JS_END_MACRO
1705 static JSBool
1706 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1708 JSAtomListElement *ale;
1710 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1711 if (op == JSOP_GETPROP &&
1712 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1713 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1715 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
1716 if (!ale)
1717 return JS_FALSE;
1718 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1721 static JSBool
1722 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1723 JSCodeGenerator *cg)
1725 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1726 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1730 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1731 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1732 * other parts of the code there's no necessary relationship between the two.
1733 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1734 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1736 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1737 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1739 static JSBool
1740 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1741 JSCodeGenerator *cg)
1743 JSOp bigSuffix;
1744 ptrdiff_t off;
1745 jsbytecode *pc;
1747 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1748 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1749 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1750 if (bigSuffix == JSOP_FALSE)
1751 return JS_FALSE;
1753 /* Emit [op, slot, index]. */
1754 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1755 if (off < 0)
1756 return JS_FALSE;
1757 pc = CG_CODE(cg, off);
1758 SET_UINT16(pc, slot);
1759 pc += 2;
1760 SET_INDEX(pc, index);
1761 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1765 * Adjust the slot for a block local to account for the number of variables
1766 * that share the same index space with locals. Due to the incremental code
1767 * generation for top-level script, we do the adjustment via code patching in
1768 * JSCompiler::compileScript; see comments there.
1770 * The function returns -1 on failures.
1772 static jsint
1773 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1775 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1776 if (cg->flags & TCF_IN_FUNCTION) {
1777 slot += cg->fun->u.i.nvars;
1778 if ((uintN) slot >= SLOTNO_LIMIT) {
1779 js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1780 JSREPORT_ERROR,
1781 JSMSG_TOO_MANY_LOCALS);
1782 slot = -1;
1785 return slot;
1788 static bool
1789 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1791 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1792 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1793 return false;
1795 JSObject *blockObj = pn->pn_objbox->object;
1796 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1797 if (depth < 0)
1798 return false;
1800 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1801 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1802 slot < limit; slot++) {
1803 jsval v = STOBJ_GET_SLOT(blockObj, slot);
1805 /* Beware the empty destructuring dummy. */
1806 if (JSVAL_IS_VOID(v)) {
1807 JS_ASSERT(slot + 1 <= limit);
1808 continue;
1811 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1812 JS_ASSERT(dn->pn_defn);
1813 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1814 dn->pn_cookie += depth;
1815 #ifdef DEBUG
1816 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1817 JS_ASSERT(pnu->pn_lexdef == dn);
1818 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1819 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1821 #endif
1824 OBJ_SCOPE(blockObj)->freeslot = JSSLOT_FREE(&js_BlockClass);
1825 return js_GrowSlots(cx, blockObj, JSSLOT_FREE(&js_BlockClass));
1829 * When eval is called from a function, the eval code or function code it
1830 * compiles may reference upvars that live in the eval-calling function. The
1831 * eval-invoked compiler does not have explicit definitions for these upvars
1832 * and we do not attempt to create them a-priori (by inspecting the function's
1833 * args and vars) -- we could, but we'd take an avoidable penalty for each
1834 * function local not referenced by any upvar. Instead, we map such upvars
1835 * lazily, growing upvarMap.vector by powers of two.
1837 * This function knows that it is called with pn pointing to a PN_NAME-arity
1838 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1839 * static level of cg at least one greater than the eval-calling function's
1840 * static level.
1842 static bool
1843 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1845 JSContext *cx = cg->compiler->context;
1846 JSFunction *fun = cg->compiler->callerFrame->fun;
1847 uintN upvarLevel = fun->u.i.script->staticLevel;
1849 JSFunctionBox *funbox = cg->funbox;
1850 if (funbox) {
1852 * Treat top-level function definitions as escaping (i.e., as funargs),
1853 * required since we compile each such top level function or statement
1854 * and throw away the AST, so we can't yet see all funarg uses of this
1855 * function being compiled (cg->funbox->object). See bug 493177.
1857 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1858 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1859 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1860 ((JSFunction *) funbox->object)->atom);
1861 return true;
1864 while (funbox->level >= upvarLevel) {
1865 if (funbox->node->pn_dflags & PND_FUNARG)
1866 return true;
1867 funbox = funbox->parent;
1868 if (!funbox)
1869 break;
1873 JSAtom *atom = pn->pn_atom;
1875 uintN index;
1876 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1877 if (localKind == JSLOCAL_NONE)
1878 return true;
1880 JS_ASSERT(cg->staticLevel > upvarLevel);
1881 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1882 return true;
1884 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1885 if (!ale) {
1886 if ((cg->flags & TCF_IN_FUNCTION) &&
1887 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1888 return false;
1891 ale = cg->upvarList.add(cg->compiler, atom);
1892 if (!ale)
1893 return false;
1894 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1896 uint32 *vector = cg->upvarMap.vector;
1897 uint32 length = cg->upvarMap.length;
1899 JS_ASSERT(ALE_INDEX(ale) <= length);
1900 if (ALE_INDEX(ale) == length) {
1901 length = 2 * JS_MAX(2, length);
1902 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1903 if (!vector)
1904 return false;
1905 cg->upvarMap.vector = vector;
1906 cg->upvarMap.length = length;
1909 if (localKind != JSLOCAL_ARG)
1910 index += fun->nargs;
1911 JS_ASSERT(index < JS_BIT(16));
1913 uintN skip = cg->staticLevel - upvarLevel;
1914 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1917 pn->pn_op = JSOP_GETUPVAR;
1918 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1919 pn->pn_dflags |= PND_BOUND;
1920 return true;
1924 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1925 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1926 * It returns false on error, true on success.
1928 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1929 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1930 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1931 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1932 * pn->pn_op was modified, if this function finds an argument or local variable
1933 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1934 * successful return.
1936 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1937 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1938 * in js_EmitTree.
1940 static JSBool
1941 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1943 JSDefinition *dn;
1944 JSOp op;
1945 JSAtom *atom;
1946 uint32 cookie;
1947 JSDefinition::Kind dn_kind;
1948 JSAtomListElement *ale;
1949 uintN index;
1951 JS_ASSERT(pn->pn_type == TOK_NAME);
1953 /* Idempotency tests come first, since we may be called more than once. */
1954 if (pn->pn_dflags & PND_BOUND)
1955 return JS_TRUE;
1957 /* No cookie initialized for these two, they're pre-bound by definition. */
1958 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
1961 * The parser linked all uses (including forward references) to their
1962 * definitions, unless a with statement or direct eval intervened.
1964 if (pn->pn_used) {
1965 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
1966 dn = pn->pn_lexdef;
1967 JS_ASSERT(dn->pn_defn);
1968 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1969 } else {
1970 if (!pn->pn_defn)
1971 return JS_TRUE;
1972 dn = (JSDefinition *) pn;
1975 op = PN_OP(pn);
1976 if (op == JSOP_NOP)
1977 return JS_TRUE;
1979 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1980 atom = pn->pn_atom;
1981 cookie = dn->pn_cookie;
1982 dn_kind = dn->kind();
1985 * Turn attempts to mutate const-declared bindings into get ops (for
1986 * pre-increment and pre-decrement ops, our caller will have to emit
1987 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1989 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1990 * bindings visible to the compiler are permanent in JS unless the
1991 * declaration originates in eval code. We detect eval code by testing
1992 * cg->compiler->callerFrame, which is set only by eval or a debugger
1993 * equivalent.
1995 * Note that this callerFrame non-null test must be qualified by testing
1996 * !cg->funbox to exclude function code nested in eval code, which is not
1997 * subject to the deletable binding exception.
1999 switch (op) {
2000 case JSOP_NAME:
2001 case JSOP_SETCONST:
2002 break;
2003 case JSOP_DELNAME:
2004 if (dn_kind != JSDefinition::UNKNOWN) {
2005 if (cg->compiler->callerFrame && !cg->funbox)
2006 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2007 else
2008 pn->pn_op = JSOP_FALSE;
2009 pn->pn_dflags |= PND_BOUND;
2010 return JS_TRUE;
2012 break;
2013 default:
2014 if (pn->isConst())
2015 pn->pn_op = op = JSOP_NAME;
2018 if (cookie == FREE_UPVAR_COOKIE) {
2019 JSStackFrame *caller = cg->compiler->callerFrame;
2020 if (caller) {
2021 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2024 * Don't generate upvars on the left side of a for loop. See
2025 * bug 470758.
2027 if (cg->flags & TCF_IN_FOR_INIT)
2028 return JS_TRUE;
2030 JS_ASSERT(caller->script);
2031 if (!caller->fun)
2032 return JS_TRUE;
2035 * Make sure the variable object used by the compiler to initialize
2036 * parent links matches the caller's varobj. Compile-n-go compiler-
2037 * created function objects have the top-level cg's scopeChain set
2038 * as their parent by JSCompiler::newFunction.
2040 JSObject *scopeobj = (cg->flags & TCF_IN_FUNCTION)
2041 ? STOBJ_GET_PARENT(FUN_OBJECT(cg->fun))
2042 : cg->scopeChain;
2043 if (scopeobj != caller->varobj)
2044 return JS_TRUE;
2047 * We are compiling eval or debug script inside a function frame
2048 * and the scope chain matches the function's variable object.
2049 * Optimize access to function's arguments and variable and the
2050 * arguments object.
2052 if (op != JSOP_NAME)
2053 return JS_TRUE;
2055 return MakeUpvarForEval(pn, cg);
2057 return JS_TRUE;
2060 if (dn->pn_dflags & PND_GVAR) {
2062 * If this is a global reference from within a function, leave pn_op as
2063 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2064 * only we could depend on the global frame's slots being valid for all
2065 * calls to the function.
2067 if (cg->flags & TCF_IN_FUNCTION)
2068 return JS_TRUE;
2071 * We are optimizing global variables and there may be no pre-existing
2072 * global property named atom when this global script runs. If atom was
2073 * declared via const or var, optimize pn to access fp->vars using the
2074 * appropriate JSOP_*GVAR op.
2076 * FIXME: should be able to optimize global function access too.
2078 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2080 switch (op) {
2081 case JSOP_NAME: op = JSOP_GETGVAR; break;
2082 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2083 case JSOP_SETCONST: /* NB: no change */ break;
2084 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2085 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2086 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2087 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2088 case JSOP_FORNAME: /* NB: no change */ break;
2089 case JSOP_DELNAME: /* NB: no change */ break;
2090 default: JS_NOT_REACHED("gvar");
2092 pn->pn_op = op;
2093 pn->pn_cookie = cookie;
2094 pn->pn_dflags |= PND_BOUND;
2095 return JS_TRUE;
2098 uintN level = UPVAR_FRAME_SKIP(cookie);
2099 JS_ASSERT(cg->staticLevel >= level);
2102 * A JSDefinition witnessed as a declaration by the parser cannot be an
2103 * upvar, unless it is the degenerate kind of upvar selected above (in the
2104 * code before the PND_GVAR test) for the special case of compile-and-go
2105 * code generated from eval called from a function, where the eval code
2106 * uses local vars defined in the function. We detect this upvar-for-eval
2107 * case by checking dn's op.
2109 if (PN_OP(dn) == JSOP_GETUPVAR) {
2110 JS_ASSERT(cg->staticLevel >= level);
2111 if (op != JSOP_NAME)
2112 return JS_TRUE;
2114 #ifdef DEBUG
2115 JSStackFrame *caller = cg->compiler->callerFrame;
2116 JS_ASSERT(caller);
2118 JSTreeContext *tc = cg;
2119 while (tc->staticLevel != level)
2120 tc = tc->parent;
2121 JS_ASSERT(tc->flags & TCF_COMPILING);
2123 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2124 JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
2125 JS_ASSERT(!(evalcg->flags & TCF_IN_FOR_INIT));
2126 JS_ASSERT(caller->script);
2127 JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
2128 #endif
2130 if (cg->staticLevel == level) {
2131 pn->pn_op = JSOP_GETUPVAR;
2132 pn->pn_cookie = cookie;
2133 pn->pn_dflags |= PND_BOUND;
2134 return JS_TRUE;
2137 return MakeUpvarForEval(pn, cg);
2140 uintN skip = cg->staticLevel - level;
2141 if (skip != 0) {
2142 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
2143 JS_ASSERT(cg->lexdeps.lookup(atom));
2144 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2145 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2148 * If op is a mutating opcode, this upvar's static level is too big to
2149 * index into the display, or the function is heavyweight, we fall back
2150 * on JSOP_*NAME*.
2152 if (op != JSOP_NAME)
2153 return JS_TRUE;
2154 if (level >= JS_DISPLAY_SIZE)
2155 return JS_TRUE;
2156 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2157 return JS_TRUE;
2159 if (FUN_FLAT_CLOSURE(cg->fun)) {
2160 op = JSOP_GETDSLOT;
2161 } else {
2163 * The function we're compiling may not be heavyweight, but if it
2164 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2165 * JSCompiler::analyzeFunctions has arranged for this function's
2166 * enclosing functions to be heavyweight, so we can safely stick
2167 * with JSOP_NAME/JSOP_CALLNAME.
2169 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2170 return JS_TRUE;
2173 * Generator functions may be resumed from any call stack, which
2174 * defeats the display optimization to static link searching used
2175 * by JSOP_{GET,CALL}UPVAR.
2177 if (cg->flags & TCF_FUN_IS_GENERATOR)
2178 return JS_TRUE;
2180 op = JSOP_GETUPVAR;
2183 ale = cg->upvarList.lookup(atom);
2184 if (ale) {
2185 index = ALE_INDEX(ale);
2186 } else {
2187 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2188 return JS_FALSE;
2190 ale = cg->upvarList.add(cg->compiler, atom);
2191 if (!ale)
2192 return JS_FALSE;
2193 index = ALE_INDEX(ale);
2194 JS_ASSERT(index == cg->upvarList.count - 1);
2196 uint32 *vector = cg->upvarMap.vector;
2197 if (!vector) {
2198 uint32 length = cg->lexdeps.count;
2200 vector = (uint32 *) js_calloc(length * sizeof *vector);
2201 if (!vector) {
2202 JS_ReportOutOfMemory(cx);
2203 return JS_FALSE;
2205 cg->upvarMap.vector = vector;
2206 cg->upvarMap.length = length;
2209 uintN slot = UPVAR_FRAME_SLOT(cookie);
2210 if (dn_kind != JSDefinition::ARG) {
2211 JSTreeContext *tc = cg;
2212 do {
2213 tc = tc->parent;
2214 } while (tc->staticLevel != level);
2215 if (tc->flags & TCF_IN_FUNCTION)
2216 slot += tc->fun->nargs;
2219 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2222 pn->pn_op = op;
2223 pn->pn_cookie = index;
2224 pn->pn_dflags |= PND_BOUND;
2225 return JS_TRUE;
2229 * We are compiling a function body and may be able to optimize name
2230 * to stack slot. Look for an argument or variable in the function and
2231 * rewrite pn_op and update pn accordingly.
2233 switch (dn_kind) {
2234 case JSDefinition::UNKNOWN:
2235 return JS_TRUE;
2237 case JSDefinition::LET:
2238 switch (op) {
2239 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2240 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2241 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2242 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2243 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2244 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2245 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2246 default: JS_NOT_REACHED("let");
2248 break;
2250 case JSDefinition::ARG:
2251 switch (op) {
2252 case JSOP_NAME: op = JSOP_GETARG; break;
2253 case JSOP_SETNAME: op = JSOP_SETARG; break;
2254 case JSOP_INCNAME: op = JSOP_INCARG; break;
2255 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2256 case JSOP_DECNAME: op = JSOP_DECARG; break;
2257 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2258 case JSOP_FORNAME: op = JSOP_FORARG; break;
2259 default: JS_NOT_REACHED("arg");
2261 JS_ASSERT(!pn->isConst());
2262 break;
2264 case JSDefinition::VAR:
2265 if (PN_OP(dn) == JSOP_CALLEE) {
2266 JS_ASSERT(op != JSOP_CALLEE);
2267 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2269 switch (op) {
2270 default:
2272 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as
2273 * we cannot be sure cg->fun is not something of the form:
2275 * var ff = (function f(s) { eval(s); return f; });
2277 * where a caller invokes ff("var f = 42"). The result returned
2278 * for such an invocation must be 42, since the callee name is
2279 * lexically bound in an outer declarative environment from the
2280 * function's activation. See jsfun.cpp:call_resolve.
2282 JS_ASSERT(op != JSOP_DELNAME);
2283 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2284 op = JSOP_CALLEE;
2285 pn->pn_dflags |= PND_CONST;
2287 break;
2289 pn->pn_op = op;
2290 pn->pn_dflags |= PND_BOUND;
2291 return JS_TRUE;
2293 /* FALL THROUGH */
2295 default:
2296 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2297 dn_kind == JSDefinition::VAR ||
2298 dn_kind == JSDefinition::CONST);
2299 switch (op) {
2300 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2301 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2302 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2303 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2304 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2305 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2306 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2307 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2308 default: JS_NOT_REACHED("local");
2310 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2311 break;
2314 JS_ASSERT(op != PN_OP(pn));
2315 pn->pn_op = op;
2316 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2317 pn->pn_dflags |= PND_BOUND;
2318 return JS_TRUE;
2322 * If pn contains a useful expression, return true with *answer set to true.
2323 * If pn contains a useless expression, return true with *answer set to false.
2324 * Return false on error.
2326 * The caller should initialize *answer to false and invoke this function on
2327 * an expression statement or similar subtree to decide whether the tree could
2328 * produce code that has any side effects. For an expression statement, we
2329 * define useless code as code with no side effects, because the main effect,
2330 * the value left on the stack after the code executes, will be discarded by a
2331 * pop bytecode.
2333 static JSBool
2334 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2335 JSBool *answer)
2337 JSBool ok;
2338 JSParseNode *pn2;
2340 ok = JS_TRUE;
2341 if (!pn || *answer)
2342 return ok;
2344 switch (pn->pn_arity) {
2345 case PN_FUNC:
2347 * A named function, contrary to ES3, is no longer useful, because we
2348 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2349 * Object instance and binding a readonly, permanent property in it
2350 * (the object and binding can be detected and hijacked or captured).
2351 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2353 *answer = JS_FALSE;
2354 break;
2356 case PN_LIST:
2357 if (pn->pn_op == JSOP_NOP ||
2358 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2359 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2361 * Non-operators along with ||, &&, ===, and !== never invoke
2362 * toString or valueOf.
2364 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2365 ok &= CheckSideEffects(cx, cg, pn2, answer);
2366 } else {
2368 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2369 * are presumed to be useful, because they may have side effects
2370 * even if their main effect (their return value) is discarded.
2372 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2373 * to avoid too much recursion. All such lists must be presumed
2374 * to be useful because each index operation could invoke a getter
2375 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2376 * does not apply here: arguments[i][j] might invoke a getter).
2378 * Likewise, array and object initialisers may call prototype
2379 * setters (the __defineSetter__ built-in, and writable __proto__
2380 * on Array.prototype create this hazard). Initialiser list nodes
2381 * have JSOP_NEWINIT in their pn_op.
2383 *answer = JS_TRUE;
2385 break;
2387 case PN_TERNARY:
2388 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2389 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2390 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2391 break;
2393 case PN_BINARY:
2394 if (pn->pn_type == TOK_ASSIGN) {
2396 * Assignment is presumed to be useful, even if the next operation
2397 * is another assignment overwriting this one's ostensible effect,
2398 * because the left operand may be a property with a setter that
2399 * has side effects.
2401 * The only exception is assignment of a useless value to a const
2402 * declared in the function currently being compiled.
2404 pn2 = pn->pn_left;
2405 if (pn2->pn_type != TOK_NAME) {
2406 *answer = JS_TRUE;
2407 } else {
2408 if (!BindNameToSlot(cx, cg, pn2))
2409 return JS_FALSE;
2410 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2411 return JS_FALSE;
2412 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2413 *answer = JS_TRUE;
2415 } else {
2416 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2417 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2419 * ||, &&, ===, and !== do not convert their operands via
2420 * toString or valueOf method calls.
2422 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2423 CheckSideEffects(cx, cg, pn->pn_right, answer);
2424 } else {
2426 * We can't easily prove that neither operand ever denotes an
2427 * object with a toString or valueOf method.
2429 *answer = JS_TRUE;
2432 break;
2434 case PN_UNARY:
2435 switch (pn->pn_type) {
2436 case TOK_RP:
2437 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2438 break;
2440 case TOK_DELETE:
2441 pn2 = pn->pn_kid;
2442 switch (pn2->pn_type) {
2443 case TOK_NAME:
2444 if (!BindNameToSlot(cx, cg, pn2))
2445 return JS_FALSE;
2446 if (pn2->isConst()) {
2447 *answer = JS_FALSE;
2448 break;
2450 /* FALL THROUGH */
2451 case TOK_DOT:
2452 #if JS_HAS_XML_SUPPORT
2453 case TOK_DBLDOT:
2454 #endif
2455 #if JS_HAS_LVALUE_RETURN
2456 case TOK_LP:
2457 #endif
2458 case TOK_LB:
2459 /* All these delete addressing modes have effects too. */
2460 *answer = JS_TRUE;
2461 break;
2462 default:
2463 ok = CheckSideEffects(cx, cg, pn2, answer);
2464 break;
2466 break;
2468 case TOK_UNARYOP:
2469 if (pn->pn_op == JSOP_NOT) {
2470 /* ! does not convert its operand via toString or valueOf. */
2471 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2472 break;
2474 /* FALL THROUGH */
2476 default:
2478 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2479 * have direct effects. Of the remaining unary-arity node types,
2480 * we can't easily prove that the operand never denotes an object
2481 * with a toString or valueOf method.
2483 *answer = JS_TRUE;
2484 break;
2486 break;
2488 case PN_NAME:
2490 * Take care to avoid trying to bind a label name (labels, both for
2491 * statements and property values in object initialisers, have pn_op
2492 * defaulted to JSOP_NOP).
2494 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2495 if (!BindNameToSlot(cx, cg, pn))
2496 return JS_FALSE;
2497 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2498 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2500 * Not an argument or local variable use, and not a use of a
2501 * unshadowed named function expression's given name, so this
2502 * expression could invoke a getter that has side effects.
2504 *answer = JS_TRUE;
2507 pn2 = pn->maybeExpr();
2508 if (pn->pn_type == TOK_DOT) {
2509 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2510 return JS_FALSE;
2511 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2512 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2514 * Any dotted property reference could call a getter, except
2515 * for arguments.length where arguments is unambiguous.
2517 *answer = JS_TRUE;
2520 ok = CheckSideEffects(cx, cg, pn2, answer);
2521 break;
2523 case PN_NAMESET:
2524 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2525 break;
2527 case PN_NULLARY:
2528 if (pn->pn_type == TOK_DEBUGGER)
2529 *answer = JS_TRUE;
2530 break;
2532 return ok;
2535 static JSBool
2536 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2537 JSBool callContext)
2539 JSOp op;
2541 if (!BindNameToSlot(cx, cg, pn))
2542 return JS_FALSE;
2543 op = PN_OP(pn);
2545 if (callContext) {
2546 switch (op) {
2547 case JSOP_NAME:
2548 op = JSOP_CALLNAME;
2549 break;
2550 case JSOP_GETGVAR:
2551 op = JSOP_CALLGVAR;
2552 break;
2553 case JSOP_GETARG:
2554 op = JSOP_CALLARG;
2555 break;
2556 case JSOP_GETLOCAL:
2557 op = JSOP_CALLLOCAL;
2558 break;
2559 case JSOP_GETUPVAR:
2560 op = JSOP_CALLUPVAR;
2561 break;
2562 case JSOP_GETDSLOT:
2563 op = JSOP_CALLDSLOT;
2564 break;
2565 default:
2566 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2567 break;
2571 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2572 if (js_Emit1(cx, cg, op) < 0)
2573 return JS_FALSE;
2574 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2575 return JS_FALSE;
2576 } else {
2577 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2578 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2579 } else {
2580 if (!EmitAtomOp(cx, pn, op, cg))
2581 return JS_FALSE;
2585 return JS_TRUE;
2588 #if JS_HAS_XML_SUPPORT
2589 static JSBool
2590 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2592 JSParseNode *pn2;
2593 uintN oldflags;
2595 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2596 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2597 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2599 pn2 = pn->pn_kid;
2600 oldflags = cg->flags;
2601 cg->flags &= ~TCF_IN_FOR_INIT;
2602 if (!js_EmitTree(cx, cg, pn2))
2603 return JS_FALSE;
2604 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2605 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2606 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2607 return JS_FALSE;
2610 return js_Emit1(cx, cg, op) >= 0;
2612 #endif
2614 static JSBool
2615 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2618 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2619 * deoptimize away from fast paths in the interpreter and trace recorder,
2620 * which skip dense array instances by going up to Array.prototype before
2621 * looking up the property name.
2623 JSAtomListElement *ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2624 if (!ale)
2625 return JS_FALSE;
2626 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2627 return JS_FALSE;
2628 if (js_Emit1(cx, cg, op) < 0)
2629 return JS_FALSE;
2630 return JS_TRUE;
2633 static JSBool
2634 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2635 JSBool callContext)
2637 JSParseNode *pn2, *pndot, *pnup, *pndown;
2638 ptrdiff_t top;
2640 JS_ASSERT(pn->pn_arity == PN_NAME);
2641 pn2 = pn->maybeExpr();
2643 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2644 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2645 (pn->pn_atom == cx->runtime->atomState.protoAtom ||
2646 pn->pn_atom == cx->runtime->atomState.parentAtom ||
2647 pn->pn_atom == cx->runtime->atomState.countAtom)) {
2648 if (pn2 && !js_EmitTree(cx, cg, pn2))
2649 return JS_FALSE;
2650 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2653 if (callContext) {
2654 JS_ASSERT(pn->pn_type == TOK_DOT);
2655 JS_ASSERT(op == JSOP_GETPROP);
2656 op = JSOP_CALLPROP;
2657 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2658 if (pn2->pn_op == JSOP_THIS) {
2659 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2660 /* Fast path for gets of |this.foo|. */
2661 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2663 } else if (pn2->pn_type == TOK_NAME) {
2665 * Try to optimize:
2666 * - arguments.length into JSOP_ARGCNT
2667 * - argname.prop into JSOP_GETARGPROP
2668 * - localname.prop into JSOP_GETLOCALPROP
2669 * but don't do this if the property is 'length' -- prefer to emit
2670 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2672 if (!BindNameToSlot(cx, cg, pn2))
2673 return JS_FALSE;
2674 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2675 if (pn2->pn_op == JSOP_ARGUMENTS)
2676 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2677 } else {
2678 switch (pn2->pn_op) {
2679 case JSOP_GETARG:
2680 op = JSOP_GETARGPROP;
2681 goto do_indexconst;
2682 case JSOP_GETLOCAL:
2683 op = JSOP_GETLOCALPROP;
2684 do_indexconst: {
2685 JSAtomListElement *ale;
2686 jsatomid atomIndex;
2688 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2689 if (!ale)
2690 return JS_FALSE;
2691 atomIndex = ALE_INDEX(ale);
2692 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2695 default:;
2702 * If the object operand is also a dotted property reference, reverse the
2703 * list linked via pn_expr temporarily so we can iterate over it from the
2704 * bottom up (reversing again as we go), to avoid excessive recursion.
2706 if (pn2->pn_type == TOK_DOT) {
2707 pndot = pn2;
2708 pnup = NULL;
2709 top = CG_OFFSET(cg);
2710 for (;;) {
2711 /* Reverse pndot->pn_expr to point up, not down. */
2712 pndot->pn_offset = top;
2713 JS_ASSERT(!pndot->pn_used);
2714 pndown = pndot->pn_expr;
2715 pndot->pn_expr = pnup;
2716 if (pndown->pn_type != TOK_DOT)
2717 break;
2718 pnup = pndot;
2719 pndot = pndown;
2722 /* pndown is a primary expression, not a dotted property reference. */
2723 if (!js_EmitTree(cx, cg, pndown))
2724 return JS_FALSE;
2726 do {
2727 /* Walk back up the list, emitting annotated name ops. */
2728 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2729 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2730 return JS_FALSE;
2734 * Special case deoptimization on __proto__, __count__ and
2735 * __parent__, as above.
2737 if (pndot->pn_arity == PN_NAME &&
2738 (pndot->pn_atom == cx->runtime->atomState.protoAtom ||
2739 pndot->pn_atom == cx->runtime->atomState.parentAtom ||
2740 pndot->pn_atom == cx->runtime->atomState.countAtom)) {
2741 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2742 return JS_FALSE;
2743 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2744 return JS_FALSE;
2747 /* Reverse the pn_expr link again. */
2748 pnup = pndot->pn_expr;
2749 pndot->pn_expr = pndown;
2750 pndown = pndot;
2751 } while ((pndot = pnup) != NULL);
2752 } else {
2753 if (!js_EmitTree(cx, cg, pn2))
2754 return JS_FALSE;
2757 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2758 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2759 return JS_FALSE;
2762 return EmitAtomOp(cx, pn, op, cg);
2765 static JSBool
2766 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2768 ptrdiff_t top;
2769 JSParseNode *left, *right, *next, ltmp, rtmp;
2770 jsint slot;
2772 top = CG_OFFSET(cg);
2773 if (pn->pn_arity == PN_LIST) {
2774 /* Left-associative operator chain to avoid too much recursion. */
2775 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2776 JS_ASSERT(pn->pn_count >= 3);
2777 left = pn->pn_head;
2778 right = pn->last();
2779 next = left->pn_next;
2780 JS_ASSERT(next != right);
2783 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2784 * one or more index expression and JSOP_GETELEM op pairs.
2786 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2787 if (!BindNameToSlot(cx, cg, left))
2788 return JS_FALSE;
2789 if (left->pn_op == JSOP_ARGUMENTS &&
2790 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2791 (jsuint)slot < JS_BIT(16)) {
2793 * arguments[i]() requires arguments object as "this".
2794 * Check that we never generates list for that usage.
2796 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2797 left->pn_offset = next->pn_offset = top;
2798 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2799 left = next;
2800 next = left->pn_next;
2805 * Check whether we generated JSOP_ARGSUB, just above, and have only
2806 * one more index expression to emit. Given arguments[0][j], we must
2807 * skip the while loop altogether, falling through to emit code for j
2808 * (in the subtree referenced by right), followed by the annotated op,
2809 * at the bottom of this function.
2811 JS_ASSERT(next != right || pn->pn_count == 3);
2812 if (left == pn->pn_head) {
2813 if (!js_EmitTree(cx, cg, left))
2814 return JS_FALSE;
2816 while (next != right) {
2817 if (!js_EmitTree(cx, cg, next))
2818 return JS_FALSE;
2819 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2820 return JS_FALSE;
2821 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2822 return JS_FALSE;
2823 next = next->pn_next;
2825 } else {
2826 if (pn->pn_arity == PN_NAME) {
2828 * Set left and right so pn appears to be a TOK_LB node, instead
2829 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2830 * EmitDestructuringOps nearer below. In the destructuring case,
2831 * the base expression (pn_expr) of the name may be null, which
2832 * means we have to emit a JSOP_BINDNAME.
2834 left = pn->maybeExpr();
2835 if (!left) {
2836 left = &ltmp;
2837 left->pn_type = TOK_STRING;
2838 left->pn_op = JSOP_BINDNAME;
2839 left->pn_arity = PN_NULLARY;
2840 left->pn_pos = pn->pn_pos;
2841 left->pn_atom = pn->pn_atom;
2843 right = &rtmp;
2844 right->pn_type = TOK_STRING;
2845 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2846 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2847 ? JSOP_QNAMEPART
2848 : JSOP_STRING;
2849 right->pn_arity = PN_NULLARY;
2850 right->pn_pos = pn->pn_pos;
2851 right->pn_atom = pn->pn_atom;
2852 } else {
2853 JS_ASSERT(pn->pn_arity == PN_BINARY);
2854 left = pn->pn_left;
2855 right = pn->pn_right;
2858 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2859 if (op == JSOP_GETELEM &&
2860 left->pn_type == TOK_NAME &&
2861 right->pn_type == TOK_NUMBER) {
2862 if (!BindNameToSlot(cx, cg, left))
2863 return JS_FALSE;
2864 if (left->pn_op == JSOP_ARGUMENTS &&
2865 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2866 (jsuint)slot < JS_BIT(16)) {
2867 left->pn_offset = right->pn_offset = top;
2868 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2869 return JS_TRUE;
2873 if (!js_EmitTree(cx, cg, left))
2874 return JS_FALSE;
2877 /* The right side of the descendant operator is implicitly quoted. */
2878 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2879 right->pn_op == JSOP_QNAMEPART);
2880 if (!js_EmitTree(cx, cg, right))
2881 return JS_FALSE;
2882 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2883 return JS_FALSE;
2884 return js_Emit1(cx, cg, op) >= 0;
2887 static JSBool
2888 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2890 jsint ival;
2891 uint32 u;
2892 ptrdiff_t off;
2893 jsbytecode *pc;
2894 JSAtom *atom;
2895 JSAtomListElement *ale;
2897 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2898 if (ival == 0)
2899 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2900 if (ival == 1)
2901 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2902 if ((jsint)(int8)ival == ival)
2903 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2905 u = (uint32)ival;
2906 if (u < JS_BIT(16)) {
2907 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2908 } else if (u < JS_BIT(24)) {
2909 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2910 if (off < 0)
2911 return JS_FALSE;
2912 pc = CG_CODE(cg, off);
2913 SET_UINT24(pc, u);
2914 } else {
2915 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2916 if (off < 0)
2917 return JS_FALSE;
2918 pc = CG_CODE(cg, off);
2919 SET_INT32(pc, ival);
2921 return JS_TRUE;
2924 atom = js_AtomizeDouble(cx, dval);
2925 if (!atom)
2926 return JS_FALSE;
2928 ale = cg->atomList.add(cg->compiler, atom);
2929 if (!ale)
2930 return JS_FALSE;
2931 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2934 static JSBool
2935 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2936 JSStmtInfo *stmtInfo)
2938 JSOp switchOp;
2939 JSBool ok, hasDefault, constPropagated;
2940 ptrdiff_t top, off, defaultOffset;
2941 JSParseNode *pn2, *pn3, *pn4;
2942 uint32 caseCount, tableLength;
2943 JSParseNode **table;
2944 jsdouble d;
2945 jsint i, low, high;
2946 jsval v;
2947 JSAtom *atom;
2948 JSAtomListElement *ale;
2949 intN noteIndex;
2950 size_t switchSize, tableSize;
2951 jsbytecode *pc, *savepc;
2952 #if JS_HAS_BLOCK_SCOPE
2953 jsint count;
2954 #endif
2956 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2957 switchOp = JSOP_TABLESWITCH;
2958 ok = JS_TRUE;
2959 hasDefault = constPropagated = JS_FALSE;
2960 defaultOffset = -1;
2963 * If the switch contains let variables scoped by its body, model the
2964 * resulting block on the stack first, before emitting the discriminant's
2965 * bytecode (in case the discriminant contains a stack-model dependency
2966 * such as a let expression).
2968 pn2 = pn->pn_right;
2969 #if JS_HAS_BLOCK_SCOPE
2970 if (pn2->pn_type == TOK_LEXICALSCOPE) {
2972 * Push the body's block scope before discriminant code-gen for proper
2973 * static block scope linkage in case the discriminant contains a let
2974 * expression. The block's locals must lie under the discriminant on
2975 * the stack so that case-dispatch bytecodes can find the discriminant
2976 * on top of stack.
2978 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
2979 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
2980 stmtInfo->type = STMT_SWITCH;
2982 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2983 if (!EmitEnterBlock(cx, pn2, cg))
2984 return JS_FALSE;
2987 * Pop the switch's statement info around discriminant code-gen. Note
2988 * how this leaves cg->blockChain referencing the switch's
2989 * block scope object, which is necessary for correct block parenting
2990 * in the case where the discriminant contains a let expression.
2992 cg->topStmt = stmtInfo->down;
2993 cg->topScopeStmt = stmtInfo->downScope;
2995 #ifdef __GNUC__
2996 else {
2997 count = 0;
2999 #endif
3000 #endif
3003 * Emit code for the discriminant first (or nearly first, in the case of a
3004 * switch whose body is a block scope).
3006 if (!js_EmitTree(cx, cg, pn->pn_left))
3007 return JS_FALSE;
3009 /* Switch bytecodes run from here till end of final case. */
3010 top = CG_OFFSET(cg);
3011 #if !JS_HAS_BLOCK_SCOPE
3012 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3013 #else
3014 if (pn2->pn_type == TOK_LC) {
3015 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3016 } else {
3017 /* Re-push the switch's statement info record. */
3018 cg->topStmt = cg->topScopeStmt = stmtInfo;
3020 /* Set the statement info record's idea of top. */
3021 stmtInfo->update = top;
3023 /* Advance pn2 to refer to the switch case list. */
3024 pn2 = pn2->expr();
3026 #endif
3028 caseCount = pn2->pn_count;
3029 tableLength = 0;
3030 table = NULL;
3032 if (caseCount == 0 ||
3033 (caseCount == 1 &&
3034 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3035 caseCount = 0;
3036 low = 0;
3037 high = -1;
3038 } else {
3039 #define INTMAP_LENGTH 256
3040 jsbitmap intmap_space[INTMAP_LENGTH];
3041 jsbitmap *intmap = NULL;
3042 int32 intmap_bitlen = 0;
3044 low = JSVAL_INT_MAX;
3045 high = JSVAL_INT_MIN;
3047 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3048 if (pn3->pn_type == TOK_DEFAULT) {
3049 hasDefault = JS_TRUE;
3050 caseCount--; /* one of the "cases" was the default */
3051 continue;
3054 JS_ASSERT(pn3->pn_type == TOK_CASE);
3055 if (switchOp == JSOP_CONDSWITCH)
3056 continue;
3058 pn4 = pn3->pn_left;
3059 while (pn4->pn_type == TOK_RP)
3060 pn4 = pn4->pn_kid;
3061 switch (pn4->pn_type) {
3062 case TOK_NUMBER:
3063 d = pn4->pn_dval;
3064 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3065 pn3->pn_val = INT_TO_JSVAL(i);
3066 } else {
3067 atom = js_AtomizeDouble(cx, d);
3068 if (!atom) {
3069 ok = JS_FALSE;
3070 goto release;
3072 pn3->pn_val = ATOM_KEY(atom);
3074 break;
3075 case TOK_STRING:
3076 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3077 break;
3078 case TOK_NAME:
3079 if (!pn4->maybeExpr()) {
3080 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3081 if (!ok)
3082 goto release;
3083 if (v != JSVAL_HOLE) {
3084 if (!JSVAL_IS_PRIMITIVE(v)) {
3086 * XXX JSOP_LOOKUPSWITCH does not support const-
3087 * propagated object values, see bug 407186.
3089 switchOp = JSOP_CONDSWITCH;
3090 continue;
3092 pn3->pn_val = v;
3093 constPropagated = JS_TRUE;
3094 break;
3097 /* FALL THROUGH */
3098 case TOK_PRIMARY:
3099 if (pn4->pn_op == JSOP_TRUE) {
3100 pn3->pn_val = JSVAL_TRUE;
3101 break;
3103 if (pn4->pn_op == JSOP_FALSE) {
3104 pn3->pn_val = JSVAL_FALSE;
3105 break;
3107 /* FALL THROUGH */
3108 default:
3109 switchOp = JSOP_CONDSWITCH;
3110 continue;
3113 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3115 if (switchOp != JSOP_TABLESWITCH)
3116 continue;
3117 if (!JSVAL_IS_INT(pn3->pn_val)) {
3118 switchOp = JSOP_LOOKUPSWITCH;
3119 continue;
3121 i = JSVAL_TO_INT(pn3->pn_val);
3122 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3123 switchOp = JSOP_LOOKUPSWITCH;
3124 continue;
3126 if (i < low)
3127 low = i;
3128 if (high < i)
3129 high = i;
3132 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3133 * We bias i by 65536 if it's negative, and hope that's a rare
3134 * case (because it requires a malloc'd bitmap).
3136 if (i < 0)
3137 i += JS_BIT(16);
3138 if (i >= intmap_bitlen) {
3139 if (!intmap &&
3140 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3141 intmap = intmap_space;
3142 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3143 } else {
3144 /* Just grab 8K for the worst-case bitmap. */
3145 intmap_bitlen = JS_BIT(16);
3146 intmap = (jsbitmap *)
3147 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3148 * sizeof(jsbitmap));
3149 if (!intmap) {
3150 JS_ReportOutOfMemory(cx);
3151 return JS_FALSE;
3154 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3156 if (JS_TEST_BIT(intmap, i)) {
3157 switchOp = JSOP_LOOKUPSWITCH;
3158 continue;
3160 JS_SET_BIT(intmap, i);
3163 release:
3164 if (intmap && intmap != intmap_space)
3165 cx->free(intmap);
3166 if (!ok)
3167 return JS_FALSE;
3170 * Compute table length and select lookup instead if overlarge or
3171 * more than half-sparse.
3173 if (switchOp == JSOP_TABLESWITCH) {
3174 tableLength = (uint32)(high - low + 1);
3175 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3176 switchOp = JSOP_LOOKUPSWITCH;
3177 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3179 * Lookup switch supports only atom indexes below 64K limit.
3180 * Conservatively estimate the maximum possible index during
3181 * switch generation and use conditional switch if it exceeds
3182 * the limit.
3184 if (caseCount + cg->atomList.count > JS_BIT(16))
3185 switchOp = JSOP_CONDSWITCH;
3190 * Emit a note with two offsets: first tells total switch code length,
3191 * second tells offset to first JSOP_CASE if condswitch.
3193 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3194 if (noteIndex < 0)
3195 return JS_FALSE;
3197 if (switchOp == JSOP_CONDSWITCH) {
3199 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3201 switchSize = 0;
3202 } else if (switchOp == JSOP_TABLESWITCH) {
3204 * 3 offsets (len, low, high) before the table, 1 per entry.
3206 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3207 } else {
3209 * JSOP_LOOKUPSWITCH:
3210 * 1 offset (len) and 1 atom index (npairs) before the table,
3211 * 1 atom index and 1 jump offset per entry.
3213 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3214 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3218 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3220 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3221 * to emit the immediate operand(s) by which bytecode readers such as
3222 * BuildSpanDepTable discover the length of the switch opcode *before*
3223 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3224 * also important to zero all unknown jump offset immediate operands,
3225 * so they can be converted to span dependencies with null targets to
3226 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3228 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3229 return JS_FALSE;
3231 off = -1;
3232 if (switchOp == JSOP_CONDSWITCH) {
3233 intN caseNoteIndex = -1;
3234 JSBool beforeCases = JS_TRUE;
3236 /* Emit code for evaluating cases and jumping to case statements. */
3237 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3238 pn4 = pn3->pn_left;
3239 if (pn4 && !js_EmitTree(cx, cg, pn4))
3240 return JS_FALSE;
3241 if (caseNoteIndex >= 0) {
3242 /* off is the previous JSOP_CASE's bytecode offset. */
3243 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3244 CG_OFFSET(cg) - off)) {
3245 return JS_FALSE;
3248 if (!pn4) {
3249 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3250 continue;
3252 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3253 if (caseNoteIndex < 0)
3254 return JS_FALSE;
3255 off = EmitJump(cx, cg, JSOP_CASE, 0);
3256 if (off < 0)
3257 return JS_FALSE;
3258 pn3->pn_offset = off;
3259 if (beforeCases) {
3260 uintN noteCount, noteCountDelta;
3262 /* Switch note's second offset is to first JSOP_CASE. */
3263 noteCount = CG_NOTE_COUNT(cg);
3264 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3265 off - top)) {
3266 return JS_FALSE;
3268 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3269 if (noteCountDelta != 0)
3270 caseNoteIndex += noteCountDelta;
3271 beforeCases = JS_FALSE;
3276 * If we didn't have an explicit default (which could fall in between
3277 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3278 * call in the loop above), link the last case to the implicit default
3279 * for the decompiler.
3281 if (!hasDefault &&
3282 caseNoteIndex >= 0 &&
3283 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3284 CG_OFFSET(cg) - off)) {
3285 return JS_FALSE;
3288 /* Emit default even if no explicit default statement. */
3289 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3290 if (defaultOffset < 0)
3291 return JS_FALSE;
3292 } else {
3293 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3295 if (switchOp == JSOP_TABLESWITCH) {
3296 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3297 SET_JUMP_OFFSET(pc, low);
3298 pc += JUMP_OFFSET_LEN;
3299 SET_JUMP_OFFSET(pc, high);
3300 pc += JUMP_OFFSET_LEN;
3303 * Use malloc to avoid arena bloat for programs with many switches.
3304 * We free table if non-null at label out, so all control flow must
3305 * exit this function through goto out or goto bad.
3307 if (tableLength != 0) {
3308 tableSize = (size_t)tableLength * sizeof *table;
3309 table = (JSParseNode **) cx->malloc(tableSize);
3310 if (!table)
3311 return JS_FALSE;
3312 memset(table, 0, tableSize);
3313 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3314 if (pn3->pn_type == TOK_DEFAULT)
3315 continue;
3316 i = JSVAL_TO_INT(pn3->pn_val);
3317 i -= low;
3318 JS_ASSERT((uint32)i < tableLength);
3319 table[i] = pn3;
3322 } else {
3323 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3325 /* Fill in the number of cases. */
3326 SET_INDEX(pc, caseCount);
3327 pc += INDEX_LEN;
3331 * After this point, all control flow involving JSOP_TABLESWITCH
3332 * must set ok and goto out to exit this function. To keep things
3333 * simple, all switchOp cases exit that way.
3335 MUST_FLOW_THROUGH("out");
3336 if (cg->spanDeps) {
3338 * We have already generated at least one big jump so we must
3339 * explicitly add span dependencies for the switch jumps. When
3340 * called below, js_SetJumpOffset can only do it when patching
3341 * the first big jump or when cg->spanDeps is null.
3343 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3344 goto bad;
3347 if (constPropagated) {
3349 * Skip switchOp, as we are not setting jump offsets in the two
3350 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3351 * unless there was an error.
3353 savepc = CG_NEXT(cg);
3354 CG_NEXT(cg) = pc + 1;
3355 if (switchOp == JSOP_TABLESWITCH) {
3356 for (i = 0; i < (jsint)tableLength; i++) {
3357 pn3 = table[i];
3358 if (pn3 &&
3359 (pn4 = pn3->pn_left) != NULL &&
3360 pn4->pn_type == TOK_NAME) {
3361 /* Note a propagated constant with the const's name. */
3362 JS_ASSERT(!pn4->maybeExpr());
3363 ale = cg->atomList.add(cg->compiler, pn4->pn_atom);
3364 if (!ale)
3365 goto bad;
3366 CG_NEXT(cg) = pc;
3367 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3368 ALE_INDEX(ale)) < 0) {
3369 goto bad;
3372 pc += JUMP_OFFSET_LEN;
3374 } else {
3375 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3376 pn4 = pn3->pn_left;
3377 if (pn4 && pn4->pn_type == TOK_NAME) {
3378 /* Note a propagated constant with the const's name. */
3379 JS_ASSERT(!pn4->maybeExpr());
3380 ale = cg->atomList.add(cg->compiler, pn4->pn_atom);
3381 if (!ale)
3382 goto bad;
3383 CG_NEXT(cg) = pc;
3384 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3385 ALE_INDEX(ale)) < 0) {
3386 goto bad;
3389 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3392 CG_NEXT(cg) = savepc;
3396 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3397 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3398 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3399 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3400 pn4 = pn3->pn_right;
3401 ok = js_EmitTree(cx, cg, pn4);
3402 if (!ok)
3403 goto out;
3404 pn3->pn_offset = pn4->pn_offset;
3405 if (pn3->pn_type == TOK_DEFAULT)
3406 off = pn3->pn_offset - top;
3409 if (!hasDefault) {
3410 /* If no default case, offset for default is to end of switch. */
3411 off = CG_OFFSET(cg) - top;
3414 /* We better have set "off" by now. */
3415 JS_ASSERT(off != -1);
3417 /* Set the default offset (to end of switch if no default). */
3418 if (switchOp == JSOP_CONDSWITCH) {
3419 pc = NULL;
3420 JS_ASSERT(defaultOffset != -1);
3421 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3422 off - (defaultOffset - top));
3423 if (!ok)
3424 goto out;
3425 } else {
3426 pc = CG_CODE(cg, top);
3427 ok = js_SetJumpOffset(cx, cg, pc, off);
3428 if (!ok)
3429 goto out;
3430 pc += JUMP_OFFSET_LEN;
3433 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3434 off = CG_OFFSET(cg) - top;
3435 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3436 if (!ok)
3437 goto out;
3439 if (switchOp == JSOP_TABLESWITCH) {
3440 /* Skip over the already-initialized switch bounds. */
3441 pc += 2 * JUMP_OFFSET_LEN;
3443 /* Fill in the jump table, if there is one. */
3444 for (i = 0; i < (jsint)tableLength; i++) {
3445 pn3 = table[i];
3446 off = pn3 ? pn3->pn_offset - top : 0;
3447 ok = js_SetJumpOffset(cx, cg, pc, off);
3448 if (!ok)
3449 goto out;
3450 pc += JUMP_OFFSET_LEN;
3452 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3453 /* Skip over the already-initialized number of cases. */
3454 pc += INDEX_LEN;
3456 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3457 if (pn3->pn_type == TOK_DEFAULT)
3458 continue;
3459 if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom))
3460 goto bad;
3461 ale = cg->atomList.add(cg->compiler, atom);
3462 if (!ale)
3463 goto bad;
3464 SET_INDEX(pc, ALE_INDEX(ale));
3465 pc += INDEX_LEN;
3467 off = pn3->pn_offset - top;
3468 ok = js_SetJumpOffset(cx, cg, pc, off);
3469 if (!ok)
3470 goto out;
3471 pc += JUMP_OFFSET_LEN;
3475 out:
3476 if (table)
3477 cx->free(table);
3478 if (ok) {
3479 ok = js_PopStatementCG(cx, cg);
3481 #if JS_HAS_BLOCK_SCOPE
3482 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3483 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3484 #endif
3486 return ok;
3488 bad:
3489 ok = JS_FALSE;
3490 goto out;
3493 JSBool
3494 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3496 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3497 /* JSOP_GENERATOR must be the first instruction. */
3498 CG_SWITCH_TO_PROLOG(cg);
3499 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3500 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3501 return JS_FALSE;
3502 CG_SWITCH_TO_MAIN(cg);
3505 return js_EmitTree(cx, cg, body) &&
3506 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3507 js_NewScriptFromCG(cx, cg);
3510 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3511 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3512 JS_BEGIN_MACRO \
3513 uintN line_ = (line); \
3514 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3515 if (delta_ != 0) { \
3516 /* \
3517 * Encode any change in the current source line number by using \
3518 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3519 * whichever consumes less space. \
3521 * NB: We handle backward line number deltas (possible with for \
3522 * loops where the update part is emitted after the body, but its \
3523 * line number is <= any line number in the body) here by letting \
3524 * unsigned delta_ wrap to a very large number, which triggers a \
3525 * SRC_SETLINE. \
3526 */ \
3527 CG_CURRENT_LINE(cg) = line_; \
3528 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3529 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3530 return JS_FALSE; \
3531 } else { \
3532 do { \
3533 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3534 return JS_FALSE; \
3535 } while (--delta_ != 0); \
3538 JS_END_MACRO
3540 /* A function, so that we avoid macro-bloating all the other callsites. */
3541 static JSBool
3542 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3544 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3545 return JS_TRUE;
3548 static JSBool
3549 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3550 JSParseNode *pn, jsatomid *result)
3552 jsatomid atomIndex;
3553 JSAtomListElement *ale;
3555 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
3556 atomIndex = (jsatomid) UPVAR_FRAME_SLOT(pn->pn_cookie);
3557 } else {
3558 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
3559 if (!ale)
3560 return JS_FALSE;
3561 atomIndex = ALE_INDEX(ale);
3564 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3565 (!(cg->flags & TCF_IN_FUNCTION) || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
3566 CG_SWITCH_TO_PROLOG(cg);
3567 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3568 return JS_FALSE;
3569 EMIT_INDEX_OP(prologOp, atomIndex);
3570 CG_SWITCH_TO_MAIN(cg);
3573 if (result)
3574 *result = atomIndex;
3575 return JS_TRUE;
3578 #if JS_HAS_DESTRUCTURING
3580 typedef JSBool
3581 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3582 JSParseNode *pn);
3584 static JSBool
3585 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3586 JSParseNode *pn)
3588 JS_ASSERT(pn->pn_type == TOK_NAME);
3589 if (!BindNameToSlot(cx, cg, pn))
3590 return JS_FALSE;
3592 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3593 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3596 static JSBool
3597 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3598 JSParseNode *pn)
3600 JSParseNode *pn2, *pn3;
3601 DestructuringDeclEmitter emitter;
3603 if (pn->pn_type == TOK_RB) {
3604 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3605 if (pn2->pn_type == TOK_COMMA)
3606 continue;
3607 emitter = (pn2->pn_type == TOK_NAME)
3608 ? EmitDestructuringDecl
3609 : EmitDestructuringDecls;
3610 if (!emitter(cx, cg, prologOp, pn2))
3611 return JS_FALSE;
3613 } else {
3614 JS_ASSERT(pn->pn_type == TOK_RC);
3615 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3616 pn3 = pn2->pn_right;
3617 emitter = (pn3->pn_type == TOK_NAME)
3618 ? EmitDestructuringDecl
3619 : EmitDestructuringDecls;
3620 if (!emitter(cx, cg, prologOp, pn3))
3621 return JS_FALSE;
3624 return JS_TRUE;
3627 static JSBool
3628 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3630 static JSBool
3631 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3633 jsuint slot;
3635 /* Skip any parenthesization. */
3636 while (pn->pn_type == TOK_RP)
3637 pn = pn->pn_kid;
3640 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3641 * destructuring initialiser-form, call ourselves to handle it, then
3642 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3643 * ending with a JSOP_ENUMELEM or equivalent op.
3645 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3646 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3647 return JS_FALSE;
3648 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3649 return JS_FALSE;
3650 } else {
3651 if (pn->pn_type == TOK_NAME) {
3652 if (!BindNameToSlot(cx, cg, pn))
3653 return JS_FALSE;
3654 if (pn->isConst() && !pn->isInitialized())
3655 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3658 switch (pn->pn_op) {
3659 case JSOP_SETNAME:
3661 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3662 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3663 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3665 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3666 return JS_FALSE;
3667 break;
3669 case JSOP_SETCONST:
3670 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3671 return JS_FALSE;
3672 break;
3674 case JSOP_SETLOCAL:
3675 slot = (jsuint) pn->pn_cookie;
3676 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3677 break;
3679 case JSOP_SETARG:
3680 case JSOP_SETGVAR:
3681 slot = (jsuint) pn->pn_cookie;
3682 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3683 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3684 return JS_FALSE;
3685 break;
3687 default:
3688 #if JS_HAS_LVALUE_RETURN || JS_HAS_XML_SUPPORT
3690 ptrdiff_t top;
3692 top = CG_OFFSET(cg);
3693 if (!js_EmitTree(cx, cg, pn))
3694 return JS_FALSE;
3695 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3696 return JS_FALSE;
3697 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3698 return JS_FALSE;
3699 break;
3701 #endif
3702 case JSOP_ENUMELEM:
3703 JS_ASSERT(0);
3707 return JS_TRUE;
3711 * Recursive helper for EmitDestructuringOps.
3713 * Given a value to destructure on the stack, walk over an object or array
3714 * initialiser at pn, emitting bytecodes to match property values and store
3715 * them in the lvalues identified by the matched property names.
3717 static JSBool
3718 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3720 jsuint index;
3721 JSParseNode *pn2, *pn3;
3722 JSBool doElemOp;
3724 #ifdef DEBUG
3725 intN stackDepth = cg->stackDepth;
3726 JS_ASSERT(stackDepth != 0);
3727 JS_ASSERT(pn->pn_arity == PN_LIST);
3728 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3729 #endif
3731 if (pn->pn_count == 0) {
3732 /* Emit a DUP;POP sequence for the decompiler. */
3733 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3734 js_Emit1(cx, cg, JSOP_POP) >= 0;
3737 index = 0;
3738 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3740 * Duplicate the value being destructured to use as a reference base.
3741 * If dup is not the first one, annotate it for the decompiler.
3743 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3744 return JS_FALSE;
3745 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3746 return JS_FALSE;
3749 * Now push the property name currently being matched, which is either
3750 * the array initialiser's current index, or the current property name
3751 * "label" on the left of a colon in the object initialiser. Set pn3
3752 * to the lvalue node, which is in the value-initializing position.
3754 doElemOp = JS_TRUE;
3755 if (pn->pn_type == TOK_RB) {
3756 if (!EmitNumberOp(cx, index, cg))
3757 return JS_FALSE;
3758 pn3 = pn2;
3759 } else {
3760 JS_ASSERT(pn->pn_type == TOK_RC);
3761 JS_ASSERT(pn2->pn_type == TOK_COLON);
3762 pn3 = pn2->pn_left;
3763 if (pn3->pn_type == TOK_NUMBER) {
3765 * If we are emitting an object destructuring initialiser,
3766 * annotate the index op with SRC_INITPROP so we know we are
3767 * not decompiling an array initialiser.
3769 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3770 return JS_FALSE;
3771 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3772 return JS_FALSE;
3773 } else {
3774 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3775 pn3->pn_type == TOK_NAME);
3776 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3777 return JS_FALSE;
3778 doElemOp = JS_FALSE;
3780 pn3 = pn2->pn_right;
3783 if (doElemOp) {
3785 * Ok, get the value of the matching property name. This leaves
3786 * that value on top of the value being destructured, so the stack
3787 * is one deeper than when we started.
3789 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3790 return JS_FALSE;
3791 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3794 /* Nullary comma node makes a hole in the array destructurer. */
3795 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3796 JS_ASSERT(pn->pn_type == TOK_RB);
3797 JS_ASSERT(pn2 == pn3);
3798 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3799 return JS_FALSE;
3800 } else {
3801 if (!EmitDestructuringLHS(cx, cg, pn3))
3802 return JS_FALSE;
3805 JS_ASSERT(cg->stackDepth == stackDepth);
3806 ++index;
3809 return JS_TRUE;
3812 static ptrdiff_t
3813 OpToDeclType(JSOp op)
3815 switch (op) {
3816 case JSOP_NOP:
3817 return SRC_DECL_LET;
3818 case JSOP_DEFCONST:
3819 return SRC_DECL_CONST;
3820 case JSOP_DEFVAR:
3821 return SRC_DECL_VAR;
3822 default:
3823 return SRC_DECL_NONE;
3827 static JSBool
3828 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3829 JSParseNode *pn)
3832 * If we're called from a variable declaration, help the decompiler by
3833 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3834 * If the destructuring initialiser is empty, our helper will emit a
3835 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3837 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
3838 return JS_FALSE;
3841 * Call our recursive helper to emit the destructuring assignments and
3842 * related stack manipulations.
3844 return EmitDestructuringOpsHelper(cx, cg, pn);
3847 static JSBool
3848 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3849 JSParseNode *lhs, JSParseNode *rhs)
3851 jsuint depth, limit, i, nslots;
3852 JSParseNode *pn;
3854 depth = limit = (uintN) cg->stackDepth;
3855 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3856 if (limit == JS_BIT(16)) {
3857 js_ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR,
3858 JSMSG_ARRAY_INIT_TOO_BIG);
3859 return JS_FALSE;
3862 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3863 JS_ASSERT(pn->pn_type != TOK_COMMA);
3864 if (!js_EmitTree(cx, cg, pn))
3865 return JS_FALSE;
3866 ++limit;
3869 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
3870 return JS_FALSE;
3872 i = depth;
3873 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
3874 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3875 JS_ASSERT(i < limit);
3876 jsint slot = AdjustBlockSlot(cx, cg, i);
3877 if (slot < 0)
3878 return JS_FALSE;
3879 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3881 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3882 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3883 return JS_FALSE;
3884 } else {
3885 if (!EmitDestructuringLHS(cx, cg, pn))
3886 return JS_FALSE;
3890 nslots = limit - depth;
3891 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
3892 cg->stackDepth = (uintN) depth;
3893 return JS_TRUE;
3897 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3898 * can emit a group assignment sequence, which results in 0 stack depth delta,
3899 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3901 static JSBool
3902 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3903 JSParseNode *pn, JSOp *pop)
3905 JSParseNode *lhs, *rhs;
3907 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3908 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3909 lhs = pn->pn_left;
3910 rhs = pn->pn_right;
3911 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3912 !(rhs->pn_xflags & PNX_HOLEY) &&
3913 lhs->pn_count <= rhs->pn_count) {
3914 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
3915 return JS_FALSE;
3916 *pop = JSOP_NOP;
3918 return JS_TRUE;
3921 #endif /* JS_HAS_DESTRUCTURING */
3923 static JSBool
3924 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3925 JSBool inLetHead, ptrdiff_t *headNoteIndex)
3927 bool let, forInVar, first;
3928 #if JS_HAS_BLOCK_SCOPE
3929 bool forInLet, popScope;
3930 JSStmtInfo *stmt, *scopeStmt;
3931 #endif
3932 ptrdiff_t off, noteIndex, tmp;
3933 JSParseNode *pn2, *pn3, *next;
3934 JSOp op;
3935 jsatomid atomIndex;
3936 uintN oldflags;
3938 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3939 *headNoteIndex = -1;
3942 * Let blocks and expressions have a parenthesized head in which the new
3943 * scope is not yet open. Initializer evaluation uses the parent node's
3944 * lexical scope. If popScope is true below, then we hide the top lexical
3945 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3946 * it won't find any names in the new let block.
3948 * The same goes for let declarations in the head of any kind of for loop.
3949 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3950 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3951 * in the containing scope, and puts x in the loop body's scope.
3953 let = (pn->pn_op == JSOP_NOP);
3954 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
3955 #if JS_HAS_BLOCK_SCOPE
3956 forInLet = let && forInVar;
3957 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
3958 if (popScope) {
3959 stmt = cg->topStmt;
3960 scopeStmt = cg->topScopeStmt;
3962 # ifdef __GNUC__
3963 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
3964 # endif
3965 JS_ASSERT(!popScope || let);
3966 #endif
3968 off = noteIndex = -1;
3969 for (pn2 = pn->pn_head; ; pn2 = next) {
3970 first = pn2 == pn->pn_head;
3971 next = pn2->pn_next;
3973 if (pn2->pn_type != TOK_NAME) {
3974 #if JS_HAS_DESTRUCTURING
3975 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
3977 * Emit variable binding ops, but not destructuring ops.
3978 * The parser (see Variables, jsparse.c) has ensured that
3979 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
3980 * and that case will emit the destructuring code only after
3981 * emitting an enumerating opcode and a branch that tests
3982 * whether the enumeration ended.
3984 JS_ASSERT(forInVar);
3985 JS_ASSERT(pn->pn_count == 1);
3986 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
3987 return JS_FALSE;
3988 break;
3990 #endif
3993 * A destructuring initialiser assignment preceded by var will
3994 * never occur to the left of 'in' in a for-in loop. As with 'for
3995 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3996 * i' to be hoisted out of the loop.
3998 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
3999 JS_ASSERT(!forInVar);
4002 * To allow the front end to rewrite var f = x; as f = x; when a
4003 * function f(){} precedes the var, detect simple name assignment
4004 * here and initialize the name.
4006 #if !JS_HAS_DESTRUCTURING
4007 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4008 #else
4009 if (pn2->pn_left->pn_type == TOK_NAME)
4010 #endif
4012 pn3 = pn2->pn_right;
4013 pn2 = pn2->pn_left;
4014 goto do_name;
4017 #if JS_HAS_DESTRUCTURING
4018 if (pn->pn_count == 1) {
4020 * If this is the only destructuring assignment in the list,
4021 * try to optimize to a group assignment. If we're in a let
4022 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4023 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4025 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4026 op = JSOP_POP;
4027 if (!MaybeEmitGroupAssignment(cx, cg,
4028 inLetHead ? JSOP_POP : PN_OP(pn),
4029 pn2, &op)) {
4030 return JS_FALSE;
4032 if (op == JSOP_NOP) {
4033 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4034 break;
4038 pn3 = pn2->pn_left;
4039 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4040 return JS_FALSE;
4042 if (!js_EmitTree(cx, cg, pn2->pn_right))
4043 return JS_FALSE;
4046 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4047 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4048 * we will emit at the bottom of this function.
4050 if (!EmitDestructuringOps(cx, cg,
4051 inLetHead ? JSOP_POP : PN_OP(pn),
4052 pn3)) {
4053 return JS_FALSE;
4055 goto emit_note_pop;
4056 #endif
4060 * Load initializer early to share code above that jumps to do_name.
4061 * NB: if this var redeclares an existing binding, then pn2 is linked
4062 * on its definition's use-chain and pn_expr has been overlayed with
4063 * pn_lexdef.
4065 pn3 = pn2->maybeExpr();
4067 do_name:
4068 if (!BindNameToSlot(cx, cg, pn2))
4069 return JS_FALSE;
4071 op = PN_OP(pn2);
4072 if (op == JSOP_ARGUMENTS) {
4073 /* JSOP_ARGUMENTS => no initializer */
4074 JS_ASSERT(!pn3 && !let);
4075 pn3 = NULL;
4076 #ifdef __GNUC__
4077 atomIndex = 0; /* quell GCC overwarning */
4078 #endif
4079 } else {
4080 JS_ASSERT(op != JSOP_CALLEE);
4081 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE || !let);
4082 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4083 return JS_FALSE;
4085 if (pn3) {
4086 JS_ASSERT(!forInVar);
4087 if (op == JSOP_SETNAME) {
4088 JS_ASSERT(!let);
4089 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4091 if (pn->pn_op == JSOP_DEFCONST &&
4092 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4093 return JS_FALSE;
4096 #if JS_HAS_BLOCK_SCOPE
4097 /* Evaluate expr in the outer lexical scope if requested. */
4098 if (popScope) {
4099 cg->topStmt = stmt->down;
4100 cg->topScopeStmt = scopeStmt->downScope;
4102 #endif
4104 oldflags = cg->flags;
4105 cg->flags &= ~TCF_IN_FOR_INIT;
4106 if (!js_EmitTree(cx, cg, pn3))
4107 return JS_FALSE;
4108 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4110 #if JS_HAS_BLOCK_SCOPE
4111 if (popScope) {
4112 cg->topStmt = stmt;
4113 cg->topScopeStmt = scopeStmt;
4115 #endif
4120 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4121 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4122 * a TOK_SEQ node to make the two statements appear as one. Therefore
4123 * if this declaration is part of a for-in loop head, we do not need to
4124 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4125 * js_EmitTree, will annotate appropriately.
4127 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4128 if (forInVar) {
4129 JS_ASSERT(pn->pn_count == 1);
4130 JS_ASSERT(!pn3);
4131 break;
4134 if (first &&
4135 !inLetHead &&
4136 js_NewSrcNote2(cx, cg, SRC_DECL,
4137 (pn->pn_op == JSOP_DEFCONST)
4138 ? SRC_DECL_CONST
4139 : (pn->pn_op == JSOP_DEFVAR)
4140 ? SRC_DECL_VAR
4141 : SRC_DECL_LET) < 0) {
4142 return JS_FALSE;
4144 if (op == JSOP_ARGUMENTS) {
4145 if (js_Emit1(cx, cg, op) < 0)
4146 return JS_FALSE;
4147 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
4148 EMIT_UINT16_IMM_OP(op, atomIndex);
4149 } else {
4150 EMIT_INDEX_OP(op, atomIndex);
4153 #if JS_HAS_DESTRUCTURING
4154 emit_note_pop:
4155 #endif
4156 tmp = CG_OFFSET(cg);
4157 if (noteIndex >= 0) {
4158 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4159 return JS_FALSE;
4161 if (!next)
4162 break;
4163 off = tmp;
4164 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4165 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4166 return JS_FALSE;
4169 /* If this is a let head, emit and return a srcnote on the pop. */
4170 if (inLetHead) {
4171 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4172 if (*headNoteIndex < 0)
4173 return JS_FALSE;
4174 if (!(pn->pn_xflags & PNX_POPVAR))
4175 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4178 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4181 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4182 static JSBool
4183 GettableNoteForNextOp(JSCodeGenerator *cg)
4185 ptrdiff_t offset, target;
4186 jssrcnote *sn, *end;
4188 offset = 0;
4189 target = CG_OFFSET(cg);
4190 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4191 sn = SN_NEXT(sn)) {
4192 if (offset == target && SN_IS_GETTABLE(sn))
4193 return JS_TRUE;
4194 offset += SN_DELTA(sn);
4196 return JS_FALSE;
4198 #endif
4200 /* Top-level named functions need a nop for decompilation. */
4201 static JSBool
4202 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4204 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4205 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4208 /* See the SRC_FOR source note offsetBias comments later in this file. */
4209 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4210 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4212 JSBool
4213 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4215 JSBool ok, useful, wantval;
4216 JSStmtInfo *stmt, stmtInfo;
4217 ptrdiff_t top, off, tmp, beq, jmp;
4218 JSParseNode *pn2, *pn3;
4219 JSAtom *atom;
4220 JSAtomListElement *ale;
4221 jsatomid atomIndex;
4222 uintN index;
4223 ptrdiff_t noteIndex;
4224 JSSrcNoteType noteType;
4225 jsbytecode *pc;
4226 JSOp op;
4227 JSTokenType type;
4228 uint32 argc;
4229 #if JS_HAS_SHARP_VARS
4230 jsint sharpnum;
4231 #endif
4233 JS_CHECK_RECURSION(cx, return JS_FALSE);
4235 ok = JS_TRUE;
4236 cg->emitLevel++;
4237 pn->pn_offset = top = CG_OFFSET(cg);
4239 /* Emit notes to tell the current bytecode's source line number. */
4240 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4242 switch (pn->pn_type) {
4243 case TOK_FUNCTION:
4245 JSFunction *fun;
4246 uintN slot;
4248 #if JS_HAS_XML_SUPPORT
4249 if (pn->pn_arity == PN_NULLARY) {
4250 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4251 return JS_FALSE;
4252 break;
4254 #endif
4256 fun = (JSFunction *) pn->pn_funbox->object;
4257 JS_ASSERT(FUN_INTERPRETED(fun));
4258 if (fun->u.i.script) {
4260 * This second pass is needed to emit JSOP_NOP with a source note
4261 * for the already-emitted function definition prolog opcode. See
4262 * comments in the TOK_LC case.
4264 JS_ASSERT(pn->pn_op == JSOP_NOP);
4265 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
4266 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4267 return JS_FALSE;
4268 break;
4271 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
4272 pn->pn_defn ||
4273 (!pn->pn_used && !pn->isTopLevel()) ||
4274 (fun->flags & JSFUN_LAMBDA));
4276 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4277 FUN_KIND(fun) == JSFUN_INTERPRETED);
4279 /* Generate code for the function's body. */
4280 void *cg2mark = JS_ARENA_MARK(cg->codePool);
4281 void *cg2space;
4282 JS_ARENA_ALLOCATE_TYPE(cg2space, JSCodeGenerator, cg->codePool);
4283 if (!cg2space) {
4284 js_ReportOutOfScriptQuota(cx);
4285 return JS_FALSE;
4287 JSCodeGenerator *cg2 =
4288 new (cg2space) JSCodeGenerator(cg->compiler,
4289 cg->codePool, cg->notePool,
4290 pn->pn_pos.begin.lineno);
4291 cg2->flags = (uint16) (pn->pn_funbox->tcflags | TCF_IN_FUNCTION);
4292 cg2->fun = fun;
4293 cg2->funbox = pn->pn_funbox;
4294 cg2->parent = cg;
4297 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4298 * bits and to reserve the all-ones value, thereby reserving the magic
4299 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4301 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4302 cg2->staticLevel = cg->staticLevel + 1;
4304 /* We measured the max scope depth when we parsed the function. */
4305 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = (uintN) -1);
4306 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4307 pn = NULL;
4309 cg2->~JSCodeGenerator();
4310 JS_ARENA_RELEASE(cg->codePool, cg2mark);
4311 cg2 = NULL;
4312 if (!pn)
4313 return JS_FALSE;
4315 /* Make the function object a literal in the outer script's pool. */
4316 index = cg->objectList.index(pn->pn_funbox);
4318 /* Emit a bytecode pointing to the closure object in its immediate. */
4319 op = PN_OP(pn);
4320 if (op != JSOP_NOP) {
4321 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4322 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4323 return JS_FALSE;
4325 EMIT_INDEX_OP(op, index);
4326 break;
4330 * For a script we emit the code as we parse. Thus the bytecode for
4331 * top-level functions should go in the prolog to predefine their
4332 * names in the variable object before the already-generated main code
4333 * is executed. This extra work for top-level scripts is not necessary
4334 * when we emit the code for a function. It is fully parsed prior to
4335 * invocation of the emitter and calls to js_EmitTree for function
4336 * definitions can be scheduled before generating the rest of code.
4338 if (!(cg->flags & TCF_IN_FUNCTION)) {
4339 JS_ASSERT(!cg->topStmt);
4340 CG_SWITCH_TO_PROLOG(cg);
4341 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4342 EMIT_INDEX_OP(op, index);
4343 CG_SWITCH_TO_MAIN(cg);
4345 /* Emit NOP for the decompiler. */
4346 if (!EmitFunctionDefNop(cx, cg, index))
4347 return JS_FALSE;
4348 } else {
4349 #ifdef DEBUG
4350 JSLocalKind localKind =
4351 #endif
4352 js_LookupLocal(cx, cg->fun, fun->atom, &slot);
4353 JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
4354 JS_ASSERT(index < JS_BIT(20));
4355 pn->pn_index = index;
4356 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4357 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4358 return JS_FALSE;
4360 break;
4363 case TOK_ARGSBODY:
4364 ok = js_EmitTree(cx, cg, pn->last());
4365 break;
4367 case TOK_UPVARS:
4368 JS_ASSERT(cg->lexdeps.count == 0);
4369 JS_ASSERT(pn->pn_names.count != 0);
4370 cg->lexdeps = pn->pn_names;
4371 ok = js_EmitTree(cx, cg, pn->pn_tree);
4372 break;
4374 case TOK_IF:
4375 /* Initialize so we can detect else-if chains and avoid recursion. */
4376 stmtInfo.type = STMT_IF;
4377 beq = jmp = -1;
4378 noteIndex = -1;
4380 if_again:
4381 /* Emit code for the condition before pushing stmtInfo. */
4382 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4383 return JS_FALSE;
4384 top = CG_OFFSET(cg);
4385 if (stmtInfo.type == STMT_IF) {
4386 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4387 } else {
4389 * We came here from the goto further below that detects else-if
4390 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4391 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4392 * to help the decompiler. Actually, we need two offsets, one for
4393 * decompiling any else clause and the second for decompiling an
4394 * else-if chain without bracing, overindenting, or incorrectly
4395 * scoping let declarations.
4397 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4398 stmtInfo.type = STMT_IF;
4399 stmtInfo.update = top;
4400 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4401 return JS_FALSE;
4402 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - jmp))
4403 return JS_FALSE;
4406 /* Emit an annotated branch-if-false around the then part. */
4407 pn3 = pn->pn_kid3;
4408 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4409 if (noteIndex < 0)
4410 return JS_FALSE;
4411 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4412 if (beq < 0)
4413 return JS_FALSE;
4415 /* Emit code for the then and optional else parts. */
4416 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4417 return JS_FALSE;
4418 if (pn3) {
4419 /* Modify stmtInfo so we know we're in the else part. */
4420 stmtInfo.type = STMT_ELSE;
4423 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4424 * around the else part. The js_PopStatementCG call at the bottom
4425 * of this switch case will fix up the backpatch chain linked from
4426 * stmtInfo.breaks.
4428 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4429 if (jmp < 0)
4430 return JS_FALSE;
4432 /* Ensure the branch-if-false comes here, then emit the else. */
4433 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4434 if (pn3->pn_type == TOK_IF) {
4435 pn = pn3;
4436 goto if_again;
4439 if (!js_EmitTree(cx, cg, pn3))
4440 return JS_FALSE;
4443 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4444 * the decompiler's benefit. We can't just "back up" from the pc
4445 * of the else clause, because we don't know whether an extended
4446 * jump was required to leap from the end of the then clause over
4447 * the else clause.
4449 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4450 return JS_FALSE;
4451 } else {
4452 /* No else part, fixup the branch-if-false to come here. */
4453 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4455 ok = js_PopStatementCG(cx, cg);
4456 break;
4458 case TOK_SWITCH:
4459 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4460 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4461 break;
4463 case TOK_WHILE:
4465 * Minimize bytecodes issued for one or more iterations by jumping to
4466 * the condition below the body and closing the loop if the condition
4467 * is true with a backward branch. For iteration count i:
4469 * i test at the top test at the bottom
4470 * = =============== ==================
4471 * 0 ifeq-pass goto; ifne-fail
4472 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4473 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4474 * . . .
4475 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4477 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4478 * test at the top. When JSParseNode trees were added during the ES3
4479 * work (1998-9), the code generation scheme was not optimized, and
4480 * the decompiler continued to take advantage of the branch and jump
4481 * that bracketed the body. But given the SRC_WHILE note, it is easy
4482 * to support the more efficient scheme.
4484 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4485 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4486 if (noteIndex < 0)
4487 return JS_FALSE;
4488 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4489 if (jmp < 0)
4490 return JS_FALSE;
4491 top = CG_OFFSET(cg);
4492 if (!js_Emit1(cx, cg, JSOP_LOOP))
4493 return JS_FALSE;
4494 if (!js_EmitTree(cx, cg, pn->pn_right))
4495 return JS_FALSE;
4496 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4497 if (!js_EmitTree(cx, cg, pn->pn_left))
4498 return JS_FALSE;
4499 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4500 if (beq < 0)
4501 return JS_FALSE;
4502 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4503 return JS_FALSE;
4504 ok = js_PopStatementCG(cx, cg);
4505 break;
4507 case TOK_DO:
4508 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4509 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4510 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4511 return JS_FALSE;
4513 /* Compile the loop body. */
4514 top = CG_OFFSET(cg);
4515 if (!js_Emit1(cx, cg, JSOP_LOOP))
4516 return JS_FALSE;
4517 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4518 if (!js_EmitTree(cx, cg, pn->pn_left))
4519 return JS_FALSE;
4521 /* Set loop and enclosing label update offsets, for continue. */
4522 stmt = &stmtInfo;
4523 do {
4524 stmt->update = CG_OFFSET(cg);
4525 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4527 /* Compile the loop condition, now that continues know where to go. */
4528 if (!js_EmitTree(cx, cg, pn->pn_right))
4529 return JS_FALSE;
4532 * Since we use JSOP_IFNE for other purposes as well as for do-while
4533 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4534 * and the decompiler must get that delta and decompile recursively.
4536 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4537 if (beq < 0)
4538 return JS_FALSE;
4539 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4540 return JS_FALSE;
4541 ok = js_PopStatementCG(cx, cg);
4542 break;
4544 case TOK_FOR:
4545 beq = 0; /* suppress gcc warnings */
4546 jmp = -1;
4547 pn2 = pn->pn_left;
4548 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4550 if (pn2->pn_type == TOK_IN) {
4551 /* Set stmtInfo type for later testing. */
4552 stmtInfo.type = STMT_FOR_IN_LOOP;
4555 * If the left part is 'var x', emit code to define x if necessary
4556 * using a prolog opcode, but do not emit a pop. If the left part
4557 * is 'var x = i', emit prolog code to define x if necessary; then
4558 * emit code to evaluate i, assign the result to x, and pop the
4559 * result off the stack.
4561 * All the logic to do this is implemented in the outer switch's
4562 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4564 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4565 * called here will generate the proper note for the assignment
4566 * op that sets x = i, hoisting the initialized var declaration
4567 * out of the loop: 'var x = i; for (x in o) ...'.
4569 * In the 'for (var x in o) ...' case, nothing but the prolog op
4570 * (if needed) should be generated here, we must emit the note
4571 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4572 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4574 * A 'for (let x = i in o)' loop must not be hoisted, since in
4575 * this form the let variable is scoped by the loop body (but not
4576 * the head). The initializer expression i must be evaluated for
4577 * any side effects. So we hoist only i in the let case.
4579 pn3 = pn2->pn_left;
4580 type = PN_TYPE(pn3);
4581 cg->flags |= TCF_IN_FOR_INIT;
4582 if (TOKEN_TYPE_IS_DECL(type) && !js_EmitTree(cx, cg, pn3))
4583 return JS_FALSE;
4584 cg->flags &= ~TCF_IN_FOR_INIT;
4586 /* Compile the object expression to the right of 'in'. */
4587 if (!js_EmitTree(cx, cg, pn2->pn_right))
4588 return JS_FALSE;
4591 * Emit a bytecode to convert top of stack value to the iterator
4592 * object depending on the loop variant (for-in, for-each-in, or
4593 * destructuring for-in).
4595 JS_ASSERT(pn->pn_op == JSOP_ITER);
4596 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4597 return JS_FALSE;
4599 /* Annotate so the decompiler can find the loop-closing jump. */
4600 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4601 if (noteIndex < 0)
4602 return JS_FALSE;
4605 * Jump down to the loop condition to minimize overhead assuming at
4606 * least one iteration, as the other loop forms do.
4608 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4609 if (jmp < 0)
4610 return JS_FALSE;
4612 top = CG_OFFSET(cg);
4613 SET_STATEMENT_TOP(&stmtInfo, top);
4614 if (!js_Emit1(cx, cg, JSOP_LOOP))
4615 return JS_FALSE;
4617 #ifdef DEBUG
4618 intN loopDepth = cg->stackDepth;
4619 #endif
4622 * Compile a JSOP_FOR* bytecode based on the left hand side.
4624 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4625 * or similar, to signify assignment, rather than declaration, to
4626 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4627 * parameter and emits the appropriate source note, defaulting to
4628 * assignment, so JSOP_SETNAME is not critical here; many similar
4629 * ops could be used -- just not JSOP_NOP (which means 'let').
4631 op = JSOP_SETNAME;
4632 switch (type) {
4633 #if JS_HAS_BLOCK_SCOPE
4634 case TOK_LET:
4635 #endif
4636 case TOK_VAR:
4637 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4638 pn3 = pn3->pn_head;
4639 #if JS_HAS_DESTRUCTURING
4640 if (pn3->pn_type == TOK_ASSIGN) {
4641 pn3 = pn3->pn_left;
4642 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4644 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4645 op = PN_OP(pn2->pn_left);
4646 goto destructuring_for;
4648 #else
4649 JS_ASSERT(pn3->pn_type == TOK_NAME);
4650 #endif
4651 /* FALL THROUGH */
4653 case TOK_NAME:
4655 * Always annotate JSOP_FORLOCAL if given input of the form
4656 * 'for (let x in * o)' -- the decompiler must not hoist the
4657 * 'let x' out of the loop head, or x will be bound in the
4658 * wrong scope. Likewise, but in this case only for the sake
4659 * of higher decompilation fidelity only, do not hoist 'var x'
4660 * when given 'for (var x in o)'.
4662 if ((
4663 #if JS_HAS_BLOCK_SCOPE
4664 type == TOK_LET ||
4665 #endif
4666 (type == TOK_VAR && !pn3->maybeExpr())) &&
4667 js_NewSrcNote2(cx, cg, SRC_DECL,
4668 (type == TOK_VAR)
4669 ? SRC_DECL_VAR
4670 : SRC_DECL_LET) < 0) {
4671 return JS_FALSE;
4673 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4674 op = PN_OP(pn3);
4675 switch (op) {
4676 case JSOP_GETARG: /* FALL THROUGH */
4677 case JSOP_SETARG: op = JSOP_FORARG; break;
4678 case JSOP_GETGVAR: /* FALL THROUGH */
4679 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4680 case JSOP_GETLOCAL: /* FALL THROUGH */
4681 case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4682 default: JS_ASSERT(0);
4684 } else {
4685 pn3->pn_op = JSOP_FORNAME;
4686 if (!BindNameToSlot(cx, cg, pn3))
4687 return JS_FALSE;
4688 op = PN_OP(pn3);
4690 if (pn3->isConst()) {
4691 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
4692 JSMSG_BAD_FOR_LEFTSIDE);
4693 return JS_FALSE;
4695 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4696 atomIndex = (jsatomid) pn3->pn_cookie;
4697 EMIT_UINT16_IMM_OP(op, atomIndex);
4698 } else {
4699 if (!EmitAtomOp(cx, pn3, op, cg))
4700 return JS_FALSE;
4702 break;
4704 case TOK_DOT:
4706 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4707 * has no side effects.
4709 useful = JS_FALSE;
4710 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
4711 return JS_FALSE;
4712 if (!useful) {
4713 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
4714 return JS_FALSE;
4715 break;
4717 /* FALL THROUGH */
4719 #if JS_HAS_DESTRUCTURING
4720 destructuring_for:
4721 #endif
4722 default:
4723 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
4724 return JS_FALSE;
4725 JS_ASSERT(cg->stackDepth >= 3);
4727 #if JS_HAS_DESTRUCTURING
4728 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4729 if (!EmitDestructuringOps(cx, cg, op, pn3))
4730 return JS_FALSE;
4731 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4732 return JS_FALSE;
4733 } else
4734 #endif
4735 #if JS_HAS_LVALUE_RETURN
4736 if (pn3->pn_type == TOK_LP) {
4737 JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4738 if (!js_EmitTree(cx, cg, pn3))
4739 return JS_FALSE;
4740 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4741 return JS_FALSE;
4742 } else
4743 #endif
4744 #if JS_HAS_XML_SUPPORT
4745 if (pn3->pn_type == TOK_UNARYOP) {
4746 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4747 if (!js_EmitTree(cx, cg, pn3))
4748 return JS_FALSE;
4749 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4750 return JS_FALSE;
4751 } else
4752 #endif
4753 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4754 return JS_FALSE;
4755 break;
4758 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4759 JS_ASSERT(cg->stackDepth == loopDepth);
4761 /* Set the first srcnote offset so we can find the start of the loop body. */
4762 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - jmp))
4763 return JS_FALSE;
4765 /* Emit code for the loop body. */
4766 if (!js_EmitTree(cx, cg, pn->pn_right))
4767 return JS_FALSE;
4769 /* Set loop and enclosing "update" offsets, for continue. */
4770 stmt = &stmtInfo;
4771 do {
4772 stmt->update = CG_OFFSET(cg);
4773 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4776 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4778 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4779 if (js_Emit1(cx, cg, JSOP_NEXTITER) < 0)
4780 return JS_FALSE;
4781 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4782 if (beq < 0)
4783 return JS_FALSE;
4785 /* Set the second srcnote offset so we can find the closing jump. */
4786 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
4787 return JS_FALSE;
4788 } else {
4789 /* C-style for (init; cond; update) ... loop. */
4790 op = JSOP_POP;
4791 pn3 = pn2->pn_kid1;
4792 if (!pn3) {
4793 /* No initializer: emit an annotated nop for the decompiler. */
4794 op = JSOP_NOP;
4795 } else {
4796 cg->flags |= TCF_IN_FOR_INIT;
4797 #if JS_HAS_DESTRUCTURING
4798 if (pn3->pn_type == TOK_ASSIGN &&
4799 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4800 return JS_FALSE;
4802 #endif
4803 if (op == JSOP_POP) {
4804 if (!js_EmitTree(cx, cg, pn3))
4805 return JS_FALSE;
4806 if (TOKEN_TYPE_IS_DECL(pn3->pn_type)) {
4808 * Check whether a destructuring-initialized var decl
4809 * was optimized to a group assignment. If so, we do
4810 * not need to emit a pop below, so switch to a nop,
4811 * just for the decompiler.
4813 JS_ASSERT(pn3->pn_arity == PN_LIST);
4814 if (pn3->pn_xflags & PNX_GROUPINIT)
4815 op = JSOP_NOP;
4818 cg->flags &= ~TCF_IN_FOR_INIT;
4822 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4823 * Use tmp to hold the biased srcnote "top" offset, which differs
4824 * from the top local variable by the length of the JSOP_GOTO{,X}
4825 * emitted in between tmp and top if this loop has a condition.
4827 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4828 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
4829 return JS_FALSE;
4830 tmp = CG_OFFSET(cg);
4832 if (pn2->pn_kid2) {
4833 /* Goto the loop condition, which branches back to iterate. */
4834 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4835 if (jmp < 0)
4836 return JS_FALSE;
4839 top = CG_OFFSET(cg);
4840 SET_STATEMENT_TOP(&stmtInfo, top);
4842 /* Emit code for the loop body. */
4843 if (!js_Emit1(cx, cg, JSOP_LOOP))
4844 return JS_FALSE;
4845 if (!js_EmitTree(cx, cg, pn->pn_right))
4846 return JS_FALSE;
4848 /* Set the second note offset so we can find the update part. */
4849 JS_ASSERT(noteIndex != -1);
4850 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4851 CG_OFFSET(cg) - tmp)) {
4852 return JS_FALSE;
4855 /* Set loop and enclosing "update" offsets, for continue. */
4856 stmt = &stmtInfo;
4857 do {
4858 stmt->update = CG_OFFSET(cg);
4859 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4861 /* Check for update code to do before the condition (if any). */
4862 pn3 = pn2->pn_kid3;
4863 if (pn3) {
4864 op = JSOP_POP;
4865 #if JS_HAS_DESTRUCTURING
4866 if (pn3->pn_type == TOK_ASSIGN &&
4867 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4868 return JS_FALSE;
4870 #endif
4871 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
4872 return JS_FALSE;
4874 /* Always emit the POP or NOP, to help the decompiler. */
4875 if (js_Emit1(cx, cg, op) < 0)
4876 return JS_FALSE;
4878 /* Restore the absolute line number for source note readers. */
4879 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4880 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4881 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4882 return JS_FALSE;
4883 CG_CURRENT_LINE(cg) = (uintN) off;
4887 /* Set the first note offset so we can find the loop condition. */
4888 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
4889 CG_OFFSET(cg) - tmp)) {
4890 return JS_FALSE;
4893 if (pn2->pn_kid2) {
4894 /* Fix up the goto from top to target the loop condition. */
4895 JS_ASSERT(jmp >= 0);
4896 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4898 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
4899 return JS_FALSE;
4902 /* The third note offset helps us find the loop-closing jump. */
4903 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
4904 CG_OFFSET(cg) - tmp)) {
4905 return JS_FALSE;
4908 if (pn2->pn_kid2) {
4909 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4910 if (beq < 0)
4911 return JS_FALSE;
4912 } else {
4913 /* No loop condition -- emit the loop-closing jump. */
4914 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4915 if (jmp < 0)
4916 return JS_FALSE;
4920 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
4921 if (!js_PopStatementCG(cx, cg))
4922 return JS_FALSE;
4924 if (pn2->pn_type == TOK_IN) {
4926 * JSOP_ENDITER must have a slot to save an exception thrown from
4927 * the body of for-in loop when closing the iterator object, and
4928 * fortunately it does: the slot that was set by JSOP_NEXTITER to
4929 * the return value of iterator.next().
4931 JS_ASSERT(js_CodeSpec[JSOP_ENDITER].nuses == 2);
4932 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
4933 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
4934 return JS_FALSE;
4937 break;
4939 case TOK_BREAK:
4940 stmt = cg->topStmt;
4941 atom = pn->pn_atom;
4942 if (atom) {
4943 ale = cg->atomList.add(cg->compiler, atom);
4944 if (!ale)
4945 return JS_FALSE;
4946 while (stmt->type != STMT_LABEL || stmt->label != atom)
4947 stmt = stmt->down;
4948 noteType = SRC_BREAK2LABEL;
4949 } else {
4950 ale = NULL;
4951 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
4952 stmt = stmt->down;
4953 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
4956 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
4957 return JS_FALSE;
4958 break;
4960 case TOK_CONTINUE:
4961 stmt = cg->topStmt;
4962 atom = pn->pn_atom;
4963 if (atom) {
4964 /* Find the loop statement enclosed by the matching label. */
4965 JSStmtInfo *loop = NULL;
4966 ale = cg->atomList.add(cg->compiler, atom);
4967 if (!ale)
4968 return JS_FALSE;
4969 while (stmt->type != STMT_LABEL || stmt->label != atom) {
4970 if (STMT_IS_LOOP(stmt))
4971 loop = stmt;
4972 stmt = stmt->down;
4974 stmt = loop;
4975 noteType = SRC_CONT2LABEL;
4976 } else {
4977 ale = NULL;
4978 while (!STMT_IS_LOOP(stmt))
4979 stmt = stmt->down;
4980 noteType = SRC_CONTINUE;
4983 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
4984 return JS_FALSE;
4985 break;
4987 case TOK_WITH:
4988 if (!js_EmitTree(cx, cg, pn->pn_left))
4989 return JS_FALSE;
4990 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
4991 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
4992 return JS_FALSE;
4993 if (!js_EmitTree(cx, cg, pn->pn_right))
4994 return JS_FALSE;
4995 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
4996 return JS_FALSE;
4997 ok = js_PopStatementCG(cx, cg);
4998 break;
5000 case TOK_TRY:
5002 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5003 intN depth;
5004 JSParseNode *lastCatch;
5006 catchJump = -1;
5009 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5010 * for later fixup.
5012 * When a finally block is active (STMT_FINALLY in our tree context),
5013 * non-local jumps (including jumps-over-catches) result in a GOSUB
5014 * being written into the bytecode stream and fixed-up later (c.f.
5015 * EmitBackPatchOp and BackPatch).
5017 js_PushStatement(cg, &stmtInfo,
5018 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5019 CG_OFFSET(cg));
5022 * Since an exception can be thrown at any place inside the try block,
5023 * we need to restore the stack and the scope chain before we transfer
5024 * the control to the exception handler.
5026 * For that we store in a try note associated with the catch or
5027 * finally block the stack depth upon the try entry. The interpreter
5028 * uses this depth to properly unwind the stack and the scope chain.
5030 depth = cg->stackDepth;
5032 /* Mark try location for decompilation, then emit try block. */
5033 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5034 return JS_FALSE;
5035 tryStart = CG_OFFSET(cg);
5036 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5037 return JS_FALSE;
5038 JS_ASSERT(depth == cg->stackDepth);
5040 /* GOSUB to finally, if present. */
5041 if (pn->pn_kid3) {
5042 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5043 return JS_FALSE;
5044 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5045 if (jmp < 0)
5046 return JS_FALSE;
5049 /* Emit (hidden) jump over catch and/or finally. */
5050 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5051 return JS_FALSE;
5052 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5053 if (jmp < 0)
5054 return JS_FALSE;
5056 tryEnd = CG_OFFSET(cg);
5058 /* If this try has a catch block, emit it. */
5059 pn2 = pn->pn_kid2;
5060 lastCatch = NULL;
5061 if (pn2) {
5062 jsint count = 0; /* previous catch block's population */
5065 * The emitted code for a catch block looks like:
5067 * [throwing] only if 2nd+ catch block
5068 * [leaveblock] only if 2nd+ catch block
5069 * enterblock with SRC_CATCH
5070 * exception
5071 * [dup] only if catchguard
5072 * setlocalpop <slot> or destructuring code
5073 * [< catchguard code >] if there's a catchguard
5074 * [ifeq <offset to next catch block>] " "
5075 * [pop] only if catchguard
5076 * < catch block contents >
5077 * leaveblock
5078 * goto <end of catch blocks> non-local; finally applies
5080 * If there's no catch block without a catchguard, the last
5081 * <offset to next catch block> points to rethrow code. This
5082 * code will [gosub] to the finally code if appropriate, and is
5083 * also used for the catch-all trynote for capturing exceptions
5084 * thrown from catch{} blocks.
5086 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5087 ptrdiff_t guardJump, catchNote;
5089 JS_ASSERT(cg->stackDepth == depth);
5090 guardJump = GUARDJUMP(stmtInfo);
5091 if (guardJump != -1) {
5092 /* Fix up and clean up previous catch block. */
5093 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5096 * Account for JSOP_ENTERBLOCK (whose block object count
5097 * is saved below) and pushed exception object that we
5098 * still have after the jumping from the previous guard.
5100 cg->stackDepth = depth + count + 1;
5103 * Move exception back to cx->exception to prepare for
5104 * the next catch. We hide [throwing] from the decompiler
5105 * since it compensates for the hidden JSOP_DUP at the
5106 * start of the previous guarded catch.
5108 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5109 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5110 return JS_FALSE;
5112 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5113 return JS_FALSE;
5114 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
5115 JS_ASSERT(cg->stackDepth == depth);
5119 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5120 * by the call to js_EmitTree immediately below. Save this
5121 * source note's index in stmtInfo for use by the TOK_CATCH:
5122 * case, where the length of the catch guard is set as the
5123 * note's offset.
5125 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5126 if (catchNote < 0)
5127 return JS_FALSE;
5128 CATCHNOTE(stmtInfo) = catchNote;
5131 * Emit the lexical scope and catch body. Save the catch's
5132 * block object population via count, for use when targeting
5133 * guardJump at the next catch (the guard mismatch case).
5135 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5136 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5137 if (!js_EmitTree(cx, cg, pn3))
5138 return JS_FALSE;
5140 /* gosub <finally>, if required */
5141 if (pn->pn_kid3) {
5142 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5143 &GOSUBS(stmtInfo));
5144 if (jmp < 0)
5145 return JS_FALSE;
5146 JS_ASSERT(cg->stackDepth == depth);
5150 * Jump over the remaining catch blocks. This will get fixed
5151 * up to jump to after catch/finally.
5153 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5154 return JS_FALSE;
5155 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5156 if (jmp < 0)
5157 return JS_FALSE;
5160 * Save a pointer to the last catch node to handle try-finally
5161 * and try-catch(guard)-finally special cases.
5163 lastCatch = pn3->expr();
5168 * Last catch guard jumps to the rethrow code sequence if none of the
5169 * guards match. Target guardJump at the beginning of the rethrow
5170 * sequence, just in case a guard expression throws and leaves the
5171 * stack unbalanced.
5173 if (lastCatch && lastCatch->pn_kid2) {
5174 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5176 /* Sync the stack to take into account pushed exception. */
5177 JS_ASSERT(cg->stackDepth == depth);
5178 cg->stackDepth = depth + 1;
5181 * Rethrow the exception, delegating executing of finally if any
5182 * to the exception handler.
5184 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5185 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5186 return JS_FALSE;
5190 JS_ASSERT(cg->stackDepth == depth);
5192 /* Emit finally handler if any. */
5193 finallyStart = 0; /* to quell GCC uninitialized warnings */
5194 if (pn->pn_kid3) {
5196 * Fix up the gosubs that might have been emitted before non-local
5197 * jumps to the finally code.
5199 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5200 return JS_FALSE;
5202 finallyStart = CG_OFFSET(cg);
5204 /* Indicate that we're emitting a subroutine body. */
5205 stmtInfo.type = STMT_SUBROUTINE;
5206 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5207 return JS_FALSE;
5208 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5209 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5210 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5211 return JS_FALSE;
5213 JS_ASSERT(cg->stackDepth == depth);
5215 if (!js_PopStatementCG(cx, cg))
5216 return JS_FALSE;
5218 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5219 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5220 return JS_FALSE;
5223 /* Fix up the end-of-try/catch jumps to come here. */
5224 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5225 return JS_FALSE;
5228 * Add the try note last, to let post-order give us the right ordering
5229 * (first to last for a given nesting level, inner to outer by level).
5231 if (pn->pn_kid2 &&
5232 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5233 return JS_FALSE;
5237 * If we've got a finally, mark try+catch region with additional
5238 * trynote to catch exceptions (re)thrown from a catch block or
5239 * for the try{}finally{} case.
5241 if (pn->pn_kid3 &&
5242 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5243 return JS_FALSE;
5245 break;
5248 case TOK_CATCH:
5250 ptrdiff_t catchStart, guardJump;
5251 JSObject *blockObj;
5254 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5255 * and save the block object atom.
5257 stmt = cg->topStmt;
5258 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5259 stmt->type = STMT_CATCH;
5260 catchStart = stmt->update;
5261 blockObj = stmt->blockObj;
5263 /* Go up one statement info record to the TRY or FINALLY record. */
5264 stmt = stmt->down;
5265 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5267 /* Pick up the pending exception and bind it to the catch variable. */
5268 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5269 return JS_FALSE;
5272 * Dup the exception object if there is a guard for rethrowing to use
5273 * it later when rethrowing or in other catches.
5275 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5276 return JS_FALSE;
5278 pn2 = pn->pn_kid1;
5279 switch (pn2->pn_type) {
5280 #if JS_HAS_DESTRUCTURING
5281 case TOK_RB:
5282 case TOK_RC:
5283 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5284 return JS_FALSE;
5285 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5286 return JS_FALSE;
5287 break;
5288 #endif
5290 case TOK_NAME:
5291 /* Inline and specialize BindNameToSlot for pn2. */
5292 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE);
5293 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie);
5294 break;
5296 default:
5297 JS_ASSERT(0);
5300 /* Emit the guard expression, if there is one. */
5301 if (pn->pn_kid2) {
5302 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5303 return JS_FALSE;
5304 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5305 CG_OFFSET(cg) - catchStart)) {
5306 return JS_FALSE;
5308 /* ifeq <next block> */
5309 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5310 if (guardJump < 0)
5311 return JS_FALSE;
5312 GUARDJUMP(*stmt) = guardJump;
5314 /* Pop duplicated exception object as we no longer need it. */
5315 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5316 return JS_FALSE;
5319 /* Emit the catch body. */
5320 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5321 return JS_FALSE;
5324 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5325 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5327 off = cg->stackDepth;
5328 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5329 return JS_FALSE;
5330 break;
5333 case TOK_VAR:
5334 if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5335 return JS_FALSE;
5336 break;
5338 case TOK_RETURN:
5339 /* Push a return value */
5340 pn2 = pn->pn_kid;
5341 if (pn2) {
5342 if (!js_EmitTree(cx, cg, pn2))
5343 return JS_FALSE;
5344 } else {
5345 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5346 return JS_FALSE;
5350 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5351 * blocks having finally clauses and to exit intermingled let blocks.
5352 * We can't simply transfer control flow to our caller in that case,
5353 * because we must gosub to those finally clauses from inner to outer,
5354 * with the correct stack pointer (i.e., after popping any with,
5355 * for/in, etc., slots nested inside the finally's try).
5357 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5358 * extra JSOP_RETRVAL after the fixups.
5360 top = CG_OFFSET(cg);
5361 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5362 return JS_FALSE;
5363 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5364 return JS_FALSE;
5365 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5366 CG_BASE(cg)[top] = JSOP_SETRVAL;
5367 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5368 return JS_FALSE;
5370 break;
5372 #if JS_HAS_GENERATORS
5373 case TOK_YIELD:
5374 if (!(cg->flags & TCF_IN_FUNCTION)) {
5375 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5376 JSMSG_BAD_RETURN_OR_YIELD,
5377 js_yield_str);
5378 return JS_FALSE;
5380 if (pn->pn_kid) {
5381 if (!js_EmitTree(cx, cg, pn->pn_kid))
5382 return JS_FALSE;
5383 } else {
5384 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5385 return JS_FALSE;
5387 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5388 return JS_FALSE;
5389 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5390 return JS_FALSE;
5391 break;
5392 #endif
5394 case TOK_LC:
5396 #if JS_HAS_XML_SUPPORT
5397 if (pn->pn_arity == PN_UNARY) {
5398 if (!js_EmitTree(cx, cg, pn->pn_kid))
5399 return JS_FALSE;
5400 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5401 return JS_FALSE;
5402 break;
5404 #endif
5406 JS_ASSERT(pn->pn_arity == PN_LIST);
5408 noteIndex = -1;
5409 tmp = CG_OFFSET(cg);
5410 if (pn->pn_xflags & PNX_NEEDBRACES) {
5411 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5412 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5413 return JS_FALSE;
5416 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5418 JSParseNode *pnchild = pn->pn_head;
5419 if (pn->pn_xflags & PNX_FUNCDEFS) {
5421 * This block contains top-level function definitions. To ensure
5422 * that we emit the bytecode defining them before the rest of code
5423 * in the block we use a separate pass over functions. During the
5424 * main pass later the emitter will add JSOP_NOP with source notes
5425 * for the function to preserve the original functions position
5426 * when decompiling.
5428 * Currently this is used only for functions, as compile-as-we go
5429 * mode for scripts does not allow separate emitter passes.
5431 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
5432 if (pn->pn_xflags & PNX_DESTRUCT) {
5434 * Assign the destructuring arguments before defining any
5435 * functions, see bug 419662.
5437 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5438 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_COMMA);
5439 if (!js_EmitTree(cx, cg, pnchild))
5440 return JS_FALSE;
5441 pnchild = pnchild->pn_next;
5444 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5445 if (pn2->pn_type == TOK_FUNCTION) {
5446 if (pn2->pn_op == JSOP_NOP) {
5447 if (!js_EmitTree(cx, cg, pn2))
5448 return JS_FALSE;
5449 } else {
5451 * JSOP_DEFFUN in a top-level block with function
5452 * definitions appears, for example, when "if (true)"
5453 * is optimized away from "if (true) function x() {}".
5454 * See bug 428424.
5456 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5461 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5462 if (!js_EmitTree(cx, cg, pn2))
5463 return JS_FALSE;
5466 if (noteIndex >= 0 &&
5467 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5468 CG_OFFSET(cg) - tmp)) {
5469 return JS_FALSE;
5472 ok = js_PopStatementCG(cx, cg);
5473 break;
5476 case TOK_SEQ:
5477 JS_ASSERT(pn->pn_arity == PN_LIST);
5478 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5479 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5480 if (!js_EmitTree(cx, cg, pn2))
5481 return JS_FALSE;
5483 ok = js_PopStatementCG(cx, cg);
5484 break;
5486 case TOK_SEMI:
5487 pn2 = pn->pn_kid;
5488 if (pn2) {
5490 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5491 * debugger, and eval frames may need the value of the ultimate
5492 * expression statement as the script's result, despite the fact
5493 * that it appears useless to the compiler.
5495 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5496 * calling JS_Compile* to suppress JSOP_POPV.
5498 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5499 if (!useful) {
5500 if (!CheckSideEffects(cx, cg, pn2, &useful))
5501 return JS_FALSE;
5505 * Don't eliminate apparently useless expressions if they are
5506 * labeled expression statements. The tc->topStmt->update test
5507 * catches the case where we are nesting in js_EmitTree for a
5508 * labeled compound statement.
5510 if (!useful &&
5511 (!cg->topStmt ||
5512 cg->topStmt->type != STMT_LABEL ||
5513 cg->topStmt->update < CG_OFFSET(cg))) {
5514 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5515 if (!js_ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5516 JSREPORT_WARNING |
5517 JSREPORT_STRICT,
5518 JSMSG_USELESS_EXPR)) {
5519 return JS_FALSE;
5521 } else {
5522 op = wantval ? JSOP_POPV : JSOP_POP;
5523 #if JS_HAS_DESTRUCTURING
5524 if (!wantval &&
5525 pn2->pn_type == TOK_ASSIGN &&
5526 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5527 return JS_FALSE;
5529 #endif
5530 if (op != JSOP_NOP) {
5531 if (!js_EmitTree(cx, cg, pn2))
5532 return JS_FALSE;
5533 if (js_Emit1(cx, cg, op) < 0)
5534 return JS_FALSE;
5538 break;
5540 case TOK_COLON:
5541 /* Emit an annotated nop so we know to decompile a label. */
5542 atom = pn->pn_atom;
5543 ale = cg->atomList.add(cg->compiler, atom);
5544 if (!ale)
5545 return JS_FALSE;
5546 pn2 = pn->expr();
5547 noteType = (pn2->pn_type == TOK_LC ||
5548 (pn2->pn_type == TOK_LEXICALSCOPE &&
5549 pn2->expr()->pn_type == TOK_LC))
5550 ? SRC_LABELBRACE
5551 : SRC_LABEL;
5552 noteIndex = js_NewSrcNote2(cx, cg, noteType,
5553 (ptrdiff_t) ALE_INDEX(ale));
5554 if (noteIndex < 0 ||
5555 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5556 return JS_FALSE;
5559 /* Emit code for the labeled statement. */
5560 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
5561 stmtInfo.label = atom;
5562 if (!js_EmitTree(cx, cg, pn2))
5563 return JS_FALSE;
5564 if (!js_PopStatementCG(cx, cg))
5565 return JS_FALSE;
5567 /* If the statement was compound, emit a note for the end brace. */
5568 if (noteType == SRC_LABELBRACE) {
5569 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5570 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5571 return JS_FALSE;
5574 break;
5576 case TOK_COMMA:
5578 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5579 * These notes help the decompiler bracket the bytecodes generated
5580 * from each sub-expression that follows a comma.
5582 off = noteIndex = -1;
5583 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5584 if (!js_EmitTree(cx, cg, pn2))
5585 return JS_FALSE;
5586 tmp = CG_OFFSET(cg);
5587 if (noteIndex >= 0) {
5588 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5589 return JS_FALSE;
5591 if (!pn2->pn_next)
5592 break;
5593 off = tmp;
5594 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5595 if (noteIndex < 0 ||
5596 js_Emit1(cx, cg, JSOP_POP) < 0) {
5597 return JS_FALSE;
5600 break;
5602 case TOK_ASSIGN:
5604 * Check left operand type and generate specialized code for it.
5605 * Specialize to avoid ECMA "reference type" values on the operand
5606 * stack, which impose pervasive runtime "GetValue" costs.
5608 pn2 = pn->pn_left;
5609 JS_ASSERT(pn2->pn_type != TOK_RP);
5610 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
5611 switch (pn2->pn_type) {
5612 case TOK_NAME:
5613 if (!BindNameToSlot(cx, cg, pn2))
5614 return JS_FALSE;
5615 if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
5616 atomIndex = (jsatomid) pn2->pn_cookie;
5617 } else {
5618 ale = cg->atomList.add(cg->compiler, pn2->pn_atom);
5619 if (!ale)
5620 return JS_FALSE;
5621 atomIndex = ALE_INDEX(ale);
5622 if (!pn2->isConst())
5623 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
5625 break;
5626 case TOK_DOT:
5627 if (!js_EmitTree(cx, cg, pn2->expr()))
5628 return JS_FALSE;
5629 ale = cg->atomList.add(cg->compiler, pn2->pn_atom);
5630 if (!ale)
5631 return JS_FALSE;
5632 atomIndex = ALE_INDEX(ale);
5633 break;
5634 case TOK_LB:
5635 JS_ASSERT(pn2->pn_arity == PN_BINARY);
5636 if (!js_EmitTree(cx, cg, pn2->pn_left))
5637 return JS_FALSE;
5638 if (!js_EmitTree(cx, cg, pn2->pn_right))
5639 return JS_FALSE;
5640 break;
5641 #if JS_HAS_DESTRUCTURING
5642 case TOK_RB:
5643 case TOK_RC:
5644 break;
5645 #endif
5646 #if JS_HAS_LVALUE_RETURN
5647 case TOK_LP:
5648 if (!js_EmitTree(cx, cg, pn2))
5649 return JS_FALSE;
5650 break;
5651 #endif
5652 #if JS_HAS_XML_SUPPORT
5653 case TOK_UNARYOP:
5654 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5655 if (!js_EmitTree(cx, cg, pn2->pn_kid))
5656 return JS_FALSE;
5657 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5658 return JS_FALSE;
5659 break;
5660 #endif
5661 default:
5662 JS_ASSERT(0);
5665 op = PN_OP(pn);
5666 #if JS_HAS_GETTER_SETTER
5667 if (op == JSOP_GETTER || op == JSOP_SETTER) {
5668 if (pn2->pn_type == TOK_NAME && PN_OP(pn2) != JSOP_SETNAME) {
5670 * x getter = y where x is a local or let variable is not
5671 * supported.
5673 js_ReportCompileErrorNumber(cx,
5674 TS(cg->compiler),
5675 pn2, JSREPORT_ERROR,
5676 JSMSG_BAD_GETTER_OR_SETTER,
5677 (op == JSOP_GETTER)
5678 ? js_getter_str
5679 : js_setter_str);
5680 return JS_FALSE;
5683 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5684 } else
5685 #endif
5686 /* If += or similar, dup the left operand and get its value. */
5687 if (op != JSOP_NOP) {
5688 switch (pn2->pn_type) {
5689 case TOK_NAME:
5690 if (pn2->isConst()) {
5691 if (PN_OP(pn2) == JSOP_CALLEE) {
5692 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
5693 return JS_FALSE;
5694 } else {
5695 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5697 } else if (PN_OP(pn2) == JSOP_SETNAME) {
5698 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5699 return JS_FALSE;
5700 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
5701 } else {
5702 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
5703 ? JSOP_GETGVAR
5704 : (PN_OP(pn2) == JSOP_GETUPVAR)
5705 ? JSOP_GETUPVAR
5706 : (PN_OP(pn2) == JSOP_SETARG)
5707 ? JSOP_GETARG
5708 : JSOP_GETLOCAL,
5709 atomIndex);
5711 break;
5712 case TOK_DOT:
5713 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5714 return JS_FALSE;
5715 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
5716 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
5717 return JS_FALSE;
5718 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
5719 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
5720 return JS_FALSE;
5721 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5722 return JS_FALSE;
5723 } else {
5724 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
5726 break;
5727 case TOK_LB:
5728 #if JS_HAS_LVALUE_RETURN
5729 case TOK_LP:
5730 #endif
5731 #if JS_HAS_XML_SUPPORT
5732 case TOK_UNARYOP:
5733 #endif
5734 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5735 return JS_FALSE;
5736 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5737 return JS_FALSE;
5738 break;
5739 default:;
5743 /* Now emit the right operand (it may affect the namespace). */
5744 if (!js_EmitTree(cx, cg, pn->pn_right))
5745 return JS_FALSE;
5747 /* If += etc., emit the binary operator with a decompiler note. */
5748 if (op != JSOP_NOP) {
5750 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5751 * declared in the current compilation unit, as in this case (just
5752 * a bit further below) we will avoid emitting the assignment op.
5754 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
5755 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5756 return JS_FALSE;
5758 if (js_Emit1(cx, cg, op) < 0)
5759 return JS_FALSE;
5762 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5763 if (pn2->pn_type != TOK_NAME &&
5764 #if JS_HAS_DESTRUCTURING
5765 pn2->pn_type != TOK_RB &&
5766 pn2->pn_type != TOK_RC &&
5767 #endif
5768 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
5769 return JS_FALSE;
5772 /* Finally, emit the specialized assignment bytecode. */
5773 switch (pn2->pn_type) {
5774 case TOK_NAME:
5775 if (pn2->isConst())
5776 break;
5777 /* FALL THROUGH */
5778 case TOK_DOT:
5779 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5780 break;
5781 case TOK_LB:
5782 #if JS_HAS_LVALUE_RETURN
5783 case TOK_LP:
5784 #endif
5785 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5786 return JS_FALSE;
5787 break;
5788 #if JS_HAS_DESTRUCTURING
5789 case TOK_RB:
5790 case TOK_RC:
5791 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5792 return JS_FALSE;
5793 break;
5794 #endif
5795 #if JS_HAS_XML_SUPPORT
5796 case TOK_UNARYOP:
5797 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5798 return JS_FALSE;
5799 break;
5800 #endif
5801 default:
5802 JS_ASSERT(0);
5804 break;
5806 case TOK_HOOK:
5807 /* Emit the condition, then branch if false to the else part. */
5808 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5809 return JS_FALSE;
5810 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5811 if (noteIndex < 0)
5812 return JS_FALSE;
5813 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5814 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5815 return JS_FALSE;
5817 /* Jump around else, fixup the branch, emit else, fixup jump. */
5818 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5819 if (jmp < 0)
5820 return JS_FALSE;
5821 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5824 * Because each branch pushes a single value, but our stack budgeting
5825 * analysis ignores branches, we now have to adjust cg->stackDepth to
5826 * ignore the value pushed by the first branch. Execution will follow
5827 * only one path, so we must decrement cg->stackDepth.
5829 * Failing to do this will foil code, such as the try/catch/finally
5830 * exception handling code generator, that samples cg->stackDepth for
5831 * use at runtime (JSOP_SETSP), or in let expression and block code
5832 * generation, which must use the stack depth to compute local stack
5833 * indexes correctly.
5835 JS_ASSERT(cg->stackDepth > 0);
5836 cg->stackDepth--;
5837 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5838 return JS_FALSE;
5839 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5840 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5841 return JS_FALSE;
5842 break;
5844 case TOK_OR:
5845 case TOK_AND:
5847 * JSOP_OR converts the operand on the stack to boolean, and if true,
5848 * leaves the original operand value on the stack and jumps; otherwise
5849 * it pops and falls into the next bytecode, which evaluates the right
5850 * operand. The jump goes around the right operand evaluation.
5852 * JSOP_AND converts the operand on the stack to boolean, and if false,
5853 * leaves the original operand value on the stack and jumps; otherwise
5854 * it pops and falls into the right operand's bytecode.
5856 if (pn->pn_arity == PN_BINARY) {
5857 if (!js_EmitTree(cx, cg, pn->pn_left))
5858 return JS_FALSE;
5859 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5860 if (top < 0)
5861 return JS_FALSE;
5862 if (!js_EmitTree(cx, cg, pn->pn_right))
5863 return JS_FALSE;
5864 off = CG_OFFSET(cg);
5865 pc = CG_CODE(cg, top);
5866 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5867 *pc = pn->pn_op;
5868 } else {
5869 JS_ASSERT(pn->pn_arity == PN_LIST);
5870 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5872 /* Left-associative operator chain: avoid too much recursion. */
5873 pn2 = pn->pn_head;
5874 if (!js_EmitTree(cx, cg, pn2))
5875 return JS_FALSE;
5876 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5877 if (top < 0)
5878 return JS_FALSE;
5880 /* Emit nodes between the head and the tail. */
5881 jmp = top;
5882 while ((pn2 = pn2->pn_next)->pn_next) {
5883 if (!js_EmitTree(cx, cg, pn2))
5884 return JS_FALSE;
5885 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5886 if (off < 0)
5887 return JS_FALSE;
5888 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5889 return JS_FALSE;
5890 jmp = off;
5893 if (!js_EmitTree(cx, cg, pn2))
5894 return JS_FALSE;
5896 pn2 = pn->pn_head;
5897 off = CG_OFFSET(cg);
5898 do {
5899 pc = CG_CODE(cg, top);
5900 tmp = GetJumpOffset(cg, pc);
5901 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5902 *pc = pn->pn_op;
5903 top += tmp;
5904 } while ((pn2 = pn2->pn_next)->pn_next);
5906 break;
5908 case TOK_BITOR:
5909 case TOK_BITXOR:
5910 case TOK_BITAND:
5911 case TOK_EQOP:
5912 case TOK_RELOP:
5913 case TOK_IN:
5914 case TOK_INSTANCEOF:
5915 case TOK_SHOP:
5916 case TOK_PLUS:
5917 case TOK_MINUS:
5918 case TOK_STAR:
5919 case TOK_DIVOP:
5920 if (pn->pn_arity == PN_LIST) {
5921 /* Left-associative operator chain: avoid too much recursion. */
5922 pn2 = pn->pn_head;
5923 if (!js_EmitTree(cx, cg, pn2))
5924 return JS_FALSE;
5925 op = PN_OP(pn);
5926 while ((pn2 = pn2->pn_next) != NULL) {
5927 if (!js_EmitTree(cx, cg, pn2))
5928 return JS_FALSE;
5929 if (js_Emit1(cx, cg, op) < 0)
5930 return JS_FALSE;
5932 } else {
5933 #if JS_HAS_XML_SUPPORT
5934 uintN oldflags;
5936 case TOK_DBLCOLON:
5937 if (pn->pn_arity == PN_NAME) {
5938 if (!js_EmitTree(cx, cg, pn->expr()))
5939 return JS_FALSE;
5940 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
5941 return JS_FALSE;
5942 break;
5946 * Binary :: has a right operand that brackets arbitrary code,
5947 * possibly including a let (a = b) ... expression. We must clear
5948 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
5950 oldflags = cg->flags;
5951 cg->flags &= ~TCF_IN_FOR_INIT;
5952 #endif
5954 /* Binary operators that evaluate both operands unconditionally. */
5955 if (!js_EmitTree(cx, cg, pn->pn_left))
5956 return JS_FALSE;
5957 if (!js_EmitTree(cx, cg, pn->pn_right))
5958 return JS_FALSE;
5959 #if JS_HAS_XML_SUPPORT
5960 cg->flags |= oldflags & TCF_IN_FOR_INIT;
5961 #endif
5962 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5963 return JS_FALSE;
5965 break;
5967 case TOK_THROW:
5968 #if JS_HAS_XML_SUPPORT
5969 case TOK_AT:
5970 case TOK_DEFAULT:
5971 JS_ASSERT(pn->pn_arity == PN_UNARY);
5972 /* FALL THROUGH */
5973 #endif
5974 case TOK_UNARYOP:
5976 uintN oldflags;
5978 /* Unary op, including unary +/-. */
5979 op = PN_OP(pn);
5980 #if JS_HAS_XML_SUPPORT
5981 if (op == JSOP_XMLNAME) {
5982 if (!EmitXMLName(cx, pn, op, cg))
5983 return JS_FALSE;
5984 break;
5986 #endif
5987 pn2 = pn->pn_kid;
5988 if (op == JSOP_TYPEOF) {
5989 for (pn3 = pn2; pn3->pn_type == TOK_RP; pn3 = pn3->pn_kid)
5990 continue;
5991 if (pn3->pn_type != TOK_NAME)
5992 op = JSOP_TYPEOFEXPR;
5994 oldflags = cg->flags;
5995 cg->flags &= ~TCF_IN_FOR_INIT;
5996 if (!js_EmitTree(cx, cg, pn2))
5997 return JS_FALSE;
5998 cg->flags |= oldflags & TCF_IN_FOR_INIT;
5999 if (js_Emit1(cx, cg, op) < 0)
6000 return JS_FALSE;
6001 break;
6004 case TOK_INC:
6005 case TOK_DEC:
6006 /* Emit lvalue-specialized code for ++/-- operators. */
6007 pn2 = pn->pn_kid;
6008 JS_ASSERT(pn2->pn_type != TOK_RP);
6009 op = PN_OP(pn);
6010 switch (pn2->pn_type) {
6011 default:
6012 JS_ASSERT(pn2->pn_type == TOK_NAME);
6013 pn2->pn_op = op;
6014 if (!BindNameToSlot(cx, cg, pn2))
6015 return JS_FALSE;
6016 op = PN_OP(pn2);
6017 if (op == JSOP_CALLEE) {
6018 if (js_Emit1(cx, cg, op) < 0)
6019 return JS_FALSE;
6020 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
6021 atomIndex = (jsatomid) pn2->pn_cookie;
6022 EMIT_UINT16_IMM_OP(op, atomIndex);
6023 } else {
6024 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6025 if (!EmitAtomOp(cx, pn2, op, cg))
6026 return JS_FALSE;
6027 break;
6029 if (pn2->isConst()) {
6030 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6031 return JS_FALSE;
6032 op = PN_OP(pn);
6033 if (!(js_CodeSpec[op].format & JOF_POST)) {
6034 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6035 return JS_FALSE;
6036 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6037 if (js_Emit1(cx, cg, op) < 0)
6038 return JS_FALSE;
6041 break;
6042 case TOK_DOT:
6043 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6044 return JS_FALSE;
6045 break;
6046 case TOK_LB:
6047 if (!EmitElemOp(cx, pn2, op, cg))
6048 return JS_FALSE;
6049 break;
6050 #if JS_HAS_LVALUE_RETURN
6051 case TOK_LP:
6052 if (!js_EmitTree(cx, cg, pn2))
6053 return JS_FALSE;
6054 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6055 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6056 return JS_FALSE;
6058 if (js_Emit1(cx, cg, op) < 0)
6059 return JS_FALSE;
6060 break;
6061 #endif
6062 #if JS_HAS_XML_SUPPORT
6063 case TOK_UNARYOP:
6064 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6065 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6066 return JS_FALSE;
6067 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6068 return JS_FALSE;
6069 if (js_Emit1(cx, cg, op) < 0)
6070 return JS_FALSE;
6071 break;
6072 #endif
6074 break;
6076 case TOK_DELETE:
6078 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6079 * must evaluate the operand if it appears it might have side effects.
6081 pn2 = pn->pn_kid;
6082 switch (pn2->pn_type) {
6083 case TOK_NAME:
6084 if (!BindNameToSlot(cx, cg, pn2))
6085 return JS_FALSE;
6086 op = PN_OP(pn2);
6087 if (op == JSOP_FALSE) {
6088 if (js_Emit1(cx, cg, op) < 0)
6089 return JS_FALSE;
6090 } else {
6091 if (!EmitAtomOp(cx, pn2, op, cg))
6092 return JS_FALSE;
6094 break;
6095 case TOK_DOT:
6096 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6097 return JS_FALSE;
6098 break;
6099 #if JS_HAS_XML_SUPPORT
6100 case TOK_DBLDOT:
6101 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6102 return JS_FALSE;
6103 break;
6104 #endif
6105 #if JS_HAS_LVALUE_RETURN
6106 case TOK_LP:
6107 top = CG_OFFSET(cg);
6108 if (!js_EmitTree(cx, cg, pn2))
6109 return JS_FALSE;
6110 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6111 return JS_FALSE;
6112 if (js_Emit1(cx, cg, JSOP_DELELEM) < 0)
6113 return JS_FALSE;
6114 break;
6115 #endif
6116 case TOK_LB:
6117 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6118 return JS_FALSE;
6119 break;
6120 default:
6122 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6123 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6125 useful = JS_FALSE;
6126 if (!CheckSideEffects(cx, cg, pn2, &useful))
6127 return JS_FALSE;
6128 if (!useful) {
6129 off = noteIndex = -1;
6130 } else {
6131 if (!js_EmitTree(cx, cg, pn2))
6132 return JS_FALSE;
6133 off = CG_OFFSET(cg);
6134 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6135 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6136 return JS_FALSE;
6138 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6139 return JS_FALSE;
6140 if (noteIndex >= 0) {
6141 tmp = CG_OFFSET(cg);
6142 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6143 return JS_FALSE;
6146 break;
6148 #if JS_HAS_XML_SUPPORT
6149 case TOK_FILTER:
6150 if (!js_EmitTree(cx, cg, pn->pn_left))
6151 return JS_FALSE;
6152 jmp = js_Emit3(cx, cg, JSOP_FILTER, 0, 0);
6153 if (jmp < 0)
6154 return JS_FALSE;
6155 top = CG_OFFSET(cg);
6156 if (!js_Emit1(cx, cg, JSOP_LOOP))
6157 return JS_FALSE;
6158 if (!js_EmitTree(cx, cg, pn->pn_right))
6159 return JS_FALSE;
6160 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6161 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6162 return JS_FALSE;
6163 break;
6164 #endif
6166 case TOK_DOT:
6168 * Pop a stack operand, convert it to object, get a property named by
6169 * this bytecode's immediate-indexed atom operand, and push its value
6170 * (not a reference to it).
6172 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6173 break;
6175 case TOK_LB:
6176 #if JS_HAS_XML_SUPPORT
6177 case TOK_DBLDOT:
6178 #endif
6180 * Pop two operands, convert the left one to object and the right one
6181 * to property name (atom or tagged int), get the named property, and
6182 * push its value. Set the "obj" register to the result of ToObject
6183 * on the left operand.
6185 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6186 break;
6188 case TOK_NEW:
6189 case TOK_LP:
6191 uintN oldflags;
6194 * Emit function call or operator new (constructor call) code.
6195 * First, emit code for the left operand to evaluate the callable or
6196 * constructable object expression.
6198 pn2 = pn->pn_head;
6199 switch (pn2->pn_type) {
6200 case TOK_NAME:
6201 if (!EmitNameOp(cx, cg, pn2, JS_TRUE))
6202 return JS_FALSE;
6203 break;
6204 case TOK_DOT:
6205 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, JS_TRUE))
6206 return JS_FALSE;
6207 break;
6208 case TOK_LB:
6209 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6210 if (!EmitElemOp(cx, pn2, JSOP_CALLELEM, cg))
6211 return JS_FALSE;
6212 break;
6213 case TOK_UNARYOP:
6214 #if JS_HAS_XML_SUPPORT
6215 if (pn2->pn_op == JSOP_XMLNAME) {
6216 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6217 return JS_FALSE;
6218 break;
6220 #endif
6221 /* FALL THROUGH */
6222 default:
6224 * Push null as a placeholder for the global object, per ECMA-262
6225 * 11.2.3 step 6.
6227 if (!js_EmitTree(cx, cg, pn2))
6228 return JS_FALSE;
6229 if (js_Emit1(cx, cg, JSOP_NULL) < 0)
6230 return JS_FALSE;
6233 /* Remember start of callable-object bytecode for decompilation hint. */
6234 off = top;
6237 * Emit code for each argument in order, then emit the JSOP_*CALL or
6238 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6239 * were pushed on the operand stack.
6241 oldflags = cg->flags;
6242 cg->flags &= ~TCF_IN_FOR_INIT;
6243 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6244 if (!js_EmitTree(cx, cg, pn3))
6245 return JS_FALSE;
6247 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6248 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6249 return JS_FALSE;
6251 argc = pn->pn_count - 1;
6252 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6253 return JS_FALSE;
6254 if (PN_OP(pn) == JSOP_EVAL)
6255 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6256 break;
6259 case TOK_LEXICALSCOPE:
6261 JSObjectBox *objbox;
6262 uintN count;
6264 objbox = pn->pn_objbox;
6265 js_PushBlockScope(cg, &stmtInfo, objbox->object, CG_OFFSET(cg));
6268 * If this lexical scope is not for a catch block, let block or let
6269 * expression, or any kind of for loop (where the scope starts in the
6270 * head after the first part if for (;;), else in the body if for-in);
6271 * and if our container is top-level but not a function body, or else
6272 * a block statement; then emit a SRC_BRACE note. All other container
6273 * statements get braces by default from the decompiler.
6275 noteIndex = -1;
6276 type = PN_TYPE(pn->expr());
6277 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6278 (!(stmt = stmtInfo.down)
6279 ? !(cg->flags & TCF_IN_FUNCTION)
6280 : stmt->type == STMT_BLOCK)) {
6281 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6282 /* There must be no source note already output for the next op. */
6283 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6284 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6285 !GettableNoteForNextOp(cg));
6286 #endif
6287 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6288 if (noteIndex < 0)
6289 return JS_FALSE;
6292 JS_ASSERT(CG_OFFSET(cg) == top);
6293 if (!EmitEnterBlock(cx, pn, cg))
6294 return JS_FALSE;
6296 if (!js_EmitTree(cx, cg, pn->pn_expr))
6297 return JS_FALSE;
6299 op = PN_OP(pn);
6300 if (op == JSOP_LEAVEBLOCKEXPR) {
6301 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6302 return JS_FALSE;
6303 } else {
6304 if (noteIndex >= 0 &&
6305 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6306 CG_OFFSET(cg) - top)) {
6307 return JS_FALSE;
6311 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6312 count = OBJ_BLOCK_COUNT(cx, objbox->object);
6313 EMIT_UINT16_IMM_OP(op, count);
6315 ok = js_PopStatementCG(cx, cg);
6316 break;
6319 #if JS_HAS_BLOCK_SCOPE
6320 case TOK_LET:
6321 /* Let statements have their variable declarations on the left. */
6322 if (pn->pn_arity == PN_BINARY) {
6323 pn2 = pn->pn_right;
6324 pn = pn->pn_left;
6325 } else {
6326 pn2 = NULL;
6329 /* Non-null pn2 means that pn is the variable list from a let head. */
6330 JS_ASSERT(pn->pn_arity == PN_LIST);
6331 if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
6332 return JS_FALSE;
6334 /* Thus non-null pn2 is the body of the let block or expression. */
6335 tmp = CG_OFFSET(cg);
6336 if (pn2 && !js_EmitTree(cx, cg, pn2))
6337 return JS_FALSE;
6339 if (noteIndex >= 0 &&
6340 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6341 CG_OFFSET(cg) - tmp)) {
6342 return JS_FALSE;
6344 break;
6345 #endif /* JS_HAS_BLOCK_SCOPE */
6347 #if JS_HAS_GENERATORS
6348 case TOK_ARRAYPUSH: {
6349 jsint slot;
6352 * The array object's stack index is in cg->arrayCompDepth. See below
6353 * under the array initialiser code generator for array comprehension
6354 * special casing.
6356 if (!js_EmitTree(cx, cg, pn->pn_kid))
6357 return JS_FALSE;
6358 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6359 if (slot < 0)
6360 return JS_FALSE;
6361 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6362 break;
6364 #endif
6366 case TOK_RB:
6367 #if JS_HAS_GENERATORS
6368 case TOK_ARRAYCOMP:
6369 #endif
6371 * Emit code for [a, b, c] that is equivalent to constructing a new
6372 * array and in source order evaluating each element value and adding
6373 * it to the array, without invoking latent setters. We use the
6374 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6375 * avoid dup'ing and popping the array as each element is added, as
6376 * JSOP_SETELEM/JSOP_SETPROP would do.
6378 * If no sharp variable is defined, the initializer is not for an array
6379 * comprehension, the initializer is not overlarge, and the initializer
6380 * is not in global code (whose stack growth cannot be precisely modeled
6381 * due to the need to reserve space for global variables and regular
6382 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6383 * array using a fast, all-at-once process rather than a slow, element-
6384 * by-element process.
6386 #if JS_HAS_SHARP_VARS
6387 sharpnum = -1;
6388 do_emit_array:
6389 #endif
6391 op = (JS_LIKELY(pn->pn_count < JS_BIT(16)) && (cg->flags & TCF_IN_FUNCTION))
6392 ? JSOP_NEWARRAY
6393 : JSOP_NEWINIT;
6395 #if JS_HAS_GENERATORS
6396 if (pn->pn_type == TOK_ARRAYCOMP)
6397 op = JSOP_NEWINIT;
6398 #endif
6399 #if JS_HAS_SHARP_VARS
6400 JS_ASSERT_IF(sharpnum >= 0, cg->flags & TCF_HAS_SHARPS);
6401 if (cg->flags & TCF_HAS_SHARPS)
6402 op = JSOP_NEWINIT;
6403 #endif
6405 if (op == JSOP_NEWINIT) {
6406 if (js_Emit2(cx, cg, op, (jsbytecode) JSProto_Array) < 0)
6407 return JS_FALSE;
6408 #if JS_HAS_SHARP_VARS
6409 if (sharpnum >= 0)
6410 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid) sharpnum);
6411 # endif
6414 #if JS_HAS_GENERATORS
6415 if (pn->pn_type == TOK_ARRAYCOMP) {
6416 uintN saveDepth;
6419 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6420 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6421 * its kids under pn2 to generate this comprehension.
6423 JS_ASSERT(cg->stackDepth > 0);
6424 saveDepth = cg->arrayCompDepth;
6425 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6426 if (!js_EmitTree(cx, cg, pn->pn_head))
6427 return JS_FALSE;
6428 cg->arrayCompDepth = saveDepth;
6430 /* Emit the usual op needed for decompilation. */
6431 if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6432 return JS_FALSE;
6433 break;
6435 #endif /* JS_HAS_GENERATORS */
6437 pn2 = pn->pn_head;
6438 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6439 if (op == JSOP_NEWINIT && !EmitNumberOp(cx, atomIndex, cg))
6440 return JS_FALSE;
6441 if (pn2->pn_type == TOK_COMMA) {
6442 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6443 return JS_FALSE;
6444 } else {
6445 if (!js_EmitTree(cx, cg, pn2))
6446 return JS_FALSE;
6448 if (op == JSOP_NEWINIT && js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6449 return JS_FALSE;
6451 JS_ASSERT(atomIndex == pn->pn_count);
6453 if (pn->pn_xflags & PNX_ENDCOMMA) {
6454 /* Emit a source note so we know to decompile an extra comma. */
6455 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6456 return JS_FALSE;
6459 if (op == JSOP_NEWINIT) {
6461 * Emit an op to finish the array and, secondarily, to aid in sharp
6462 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6464 if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6465 return JS_FALSE;
6466 break;
6469 JS_ASSERT(atomIndex < JS_BIT(16));
6470 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY, atomIndex);
6471 break;
6473 case TOK_RC:
6474 #if JS_HAS_SHARP_VARS
6475 sharpnum = -1;
6476 do_emit_object:
6477 #endif
6478 #if JS_HAS_DESTRUCTURING_SHORTHAND
6479 if (pn->pn_xflags & PNX_DESTRUCT) {
6480 js_ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
6481 JSMSG_BAD_OBJECT_INIT);
6482 return JS_FALSE;
6484 #endif
6486 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6487 * a new object and in source order evaluating each property value and
6488 * adding the property to the object, without invoking latent setters.
6489 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6490 * ignore setters and to avoid dup'ing and popping the object as each
6491 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6493 if (js_Emit2(cx, cg, JSOP_NEWINIT, (jsbytecode) JSProto_Object) < 0)
6494 return JS_FALSE;
6496 #if JS_HAS_SHARP_VARS
6497 if (sharpnum >= 0)
6498 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid) sharpnum);
6499 #endif
6501 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6502 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6503 pn3 = pn2->pn_left;
6504 if (pn3->pn_type == TOK_NUMBER) {
6505 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6506 return JS_FALSE;
6509 /* Emit code for the property initializer. */
6510 if (!js_EmitTree(cx, cg, pn2->pn_right))
6511 return JS_FALSE;
6513 #if JS_HAS_GETTER_SETTER
6514 op = PN_OP(pn2);
6515 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6516 if (js_Emit1(cx, cg, op) < 0)
6517 return JS_FALSE;
6519 #endif
6520 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6521 if (pn3->pn_type == TOK_NUMBER) {
6522 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6523 return JS_FALSE;
6524 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6525 return JS_FALSE;
6526 } else {
6527 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6528 pn3->pn_type == TOK_STRING);
6529 ale = cg->atomList.add(cg->compiler, pn3->pn_atom);
6530 if (!ale)
6531 return JS_FALSE;
6532 EMIT_INDEX_OP(JSOP_INITPROP, ALE_INDEX(ale));
6536 /* Emit an op for sharpArray cleanup and decompilation. */
6537 if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6538 return JS_FALSE;
6539 break;
6541 #if JS_HAS_SHARP_VARS
6542 case TOK_DEFSHARP:
6543 JS_ASSERT(cg->flags & TCF_HAS_SHARPS);
6544 sharpnum = pn->pn_num;
6545 pn = pn->pn_kid;
6546 if (pn->pn_type == TOK_RB)
6547 goto do_emit_array;
6548 # if JS_HAS_GENERATORS
6549 if (pn->pn_type == TOK_ARRAYCOMP)
6550 goto do_emit_array;
6551 # endif
6552 if (pn->pn_type == TOK_RC)
6553 goto do_emit_object;
6555 if (!js_EmitTree(cx, cg, pn))
6556 return JS_FALSE;
6557 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid) sharpnum);
6558 break;
6560 case TOK_USESHARP:
6561 JS_ASSERT(cg->flags & TCF_HAS_SHARPS);
6562 EMIT_UINT16_IMM_OP(JSOP_USESHARP, (jsatomid) pn->pn_num);
6563 break;
6564 #endif /* JS_HAS_SHARP_VARS */
6566 case TOK_RP:
6568 uintN oldflags;
6571 * The node for (e) has e as its kid, enabling users who want to nest
6572 * assignment expressions in conditions to avoid the error correction
6573 * done by Condition (from x = y to x == y) by double-parenthesizing.
6575 oldflags = cg->flags;
6576 cg->flags &= ~TCF_IN_FOR_INIT;
6577 if (!js_EmitTree(cx, cg, pn->pn_kid))
6578 return JS_FALSE;
6579 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6580 break;
6583 case TOK_NAME:
6585 * Cope with a left-over function definition that was replaced by a use
6586 * of a later function definition of the same name. See FunctionDef and
6587 * MakeDefIntoUse in jsparse.cpp.
6589 if (pn->pn_op == JSOP_NOP)
6590 return JS_TRUE;
6591 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
6592 return JS_FALSE;
6593 break;
6595 #if JS_HAS_XML_SUPPORT
6596 case TOK_XMLATTR:
6597 case TOK_XMLSPACE:
6598 case TOK_XMLTEXT:
6599 case TOK_XMLCDATA:
6600 case TOK_XMLCOMMENT:
6601 #endif
6602 case TOK_STRING:
6603 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
6604 break;
6606 case TOK_NUMBER:
6607 ok = EmitNumberOp(cx, pn->pn_dval, cg);
6608 break;
6610 case TOK_REGEXP:
6612 * If the regexp's script is one-shot, we can avoid the extra
6613 * fork-on-exec costs of JSOP_REGEXP by selecting JSOP_OBJECT.
6614 * Otherwise, to avoid incorrect proto, parent, and lastIndex
6615 * sharing among threads and sequentially across re-execution,
6616 * select JSOP_REGEXP.
6618 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
6619 if (cg->flags & TCF_COMPILE_N_GO) {
6620 ok = EmitObjectOp(cx, pn->pn_objbox, JSOP_OBJECT, cg);
6621 } else {
6622 ok = EmitIndexOp(cx, JSOP_REGEXP,
6623 cg->regexpList.index(pn->pn_objbox),
6624 cg);
6626 break;
6628 #if JS_HAS_XML_SUPPORT
6629 case TOK_ANYNAME:
6630 #endif
6631 case TOK_PRIMARY:
6632 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6633 return JS_FALSE;
6634 break;
6636 #if JS_HAS_DEBUGGER_KEYWORD
6637 case TOK_DEBUGGER:
6638 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6639 return JS_FALSE;
6640 break;
6641 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6643 #if JS_HAS_XML_SUPPORT
6644 case TOK_XMLELEM:
6645 case TOK_XMLLIST:
6646 if (pn->pn_op == JSOP_XMLOBJECT) {
6647 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6648 break;
6651 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
6652 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
6653 case TOK_XMLETAGO:
6654 JS_ASSERT(0);
6655 /* FALL THROUGH */
6656 case TOK_XMLPTAGC:
6657 case TOK_XMLSTAGO:
6658 break;
6659 default:
6660 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6661 return JS_FALSE;
6664 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6665 if (pn2->pn_type == TOK_LC &&
6666 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6667 return JS_FALSE;
6669 if (!js_EmitTree(cx, cg, pn2))
6670 return JS_FALSE;
6671 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6672 return JS_FALSE;
6675 if (pn->pn_xflags & PNX_XMLROOT) {
6676 if (pn->pn_count == 0) {
6677 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6678 atom = cx->runtime->atomState.emptyAtom;
6679 ale = cg->atomList.add(cg->compiler, atom);
6680 if (!ale)
6681 return JS_FALSE;
6682 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6684 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6685 return JS_FALSE;
6687 #ifdef DEBUG
6688 else
6689 JS_ASSERT(pn->pn_count != 0);
6690 #endif
6691 break;
6693 case TOK_XMLPTAGC:
6694 if (pn->pn_op == JSOP_XMLOBJECT) {
6695 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6696 break;
6698 /* FALL THROUGH */
6700 case TOK_XMLSTAGO:
6701 case TOK_XMLETAGO:
6703 uint32 i;
6705 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6706 return JS_FALSE;
6708 ale = cg->atomList.add(cg->compiler,
6709 (pn->pn_type == TOK_XMLETAGO)
6710 ? cx->runtime->atomState.etagoAtom
6711 : cx->runtime->atomState.stagoAtom);
6712 if (!ale)
6713 return JS_FALSE;
6714 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6716 JS_ASSERT(pn->pn_count != 0);
6717 pn2 = pn->pn_head;
6718 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6719 return JS_FALSE;
6720 if (!js_EmitTree(cx, cg, pn2))
6721 return JS_FALSE;
6722 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6723 return JS_FALSE;
6725 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6726 if (pn2->pn_type == TOK_LC &&
6727 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6728 return JS_FALSE;
6730 if (!js_EmitTree(cx, cg, pn2))
6731 return JS_FALSE;
6732 if ((i & 1) && pn2->pn_type == TOK_LC) {
6733 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6734 return JS_FALSE;
6736 if (js_Emit1(cx, cg,
6737 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6738 return JS_FALSE;
6742 ale = cg->atomList.add(cg->compiler,
6743 (pn->pn_type == TOK_XMLPTAGC)
6744 ? cx->runtime->atomState.ptagcAtom
6745 : cx->runtime->atomState.tagcAtom);
6746 if (!ale)
6747 return JS_FALSE;
6748 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6749 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6750 return JS_FALSE;
6752 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
6753 return JS_FALSE;
6754 break;
6757 case TOK_XMLNAME:
6758 if (pn->pn_arity == PN_LIST) {
6759 JS_ASSERT(pn->pn_count != 0);
6760 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6761 if (pn2->pn_type == TOK_LC &&
6762 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6763 return JS_FALSE;
6765 if (!js_EmitTree(cx, cg, pn2))
6766 return JS_FALSE;
6767 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6768 return JS_FALSE;
6770 } else {
6771 JS_ASSERT(pn->pn_arity == PN_NULLARY);
6772 ok = (pn->pn_op == JSOP_OBJECT)
6773 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
6774 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
6776 break;
6778 case TOK_XMLPI:
6779 ale = cg->atomList.add(cg->compiler, pn->pn_atom2);
6780 if (!ale)
6781 return JS_FALSE;
6782 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6783 return JS_FALSE;
6784 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6785 return JS_FALSE;
6786 break;
6787 #endif /* JS_HAS_XML_SUPPORT */
6789 default:
6790 JS_ASSERT(0);
6793 if (ok && --cg->emitLevel == 0) {
6794 if (cg->spanDeps)
6795 ok = OptimizeSpanDeps(cx, cg);
6796 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
6797 return JS_FALSE;
6800 return ok;
6804 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6805 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6807 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6808 {"null", 0, 0, 0},
6809 {"if", 0, 0, 0},
6810 {"if-else", 2, 0, 1},
6811 {"for", 3, 1, 1},
6812 {"while", 1, 0, 1},
6813 {"continue", 0, 0, 0},
6814 {"decl", 1, 1, 1},
6815 {"pcdelta", 1, 0, 1},
6816 {"assignop", 0, 0, 0},
6817 {"cond", 1, 0, 1},
6818 {"brace", 1, 0, 1},
6819 {"hidden", 0, 0, 0},
6820 {"pcbase", 1, 0, -1},
6821 {"label", 1, 0, 0},
6822 {"labelbrace", 1, 0, 0},
6823 {"endbrace", 0, 0, 0},
6824 {"break2label", 1, 0, 0},
6825 {"cont2label", 1, 0, 0},
6826 {"switch", 2, 0, 1},
6827 {"funcdef", 1, 0, 0},
6828 {"catch", 1, 0, 1},
6829 {"extended", -1, 0, 0},
6830 {"newline", 0, 0, 0},
6831 {"setline", 1, 0, 0},
6832 {"xdelta", 0, 0, 0},
6835 static intN
6836 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6838 intN index;
6839 JSArenaPool *pool;
6840 size_t size;
6842 index = CG_NOTE_COUNT(cg);
6843 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6844 pool = cg->notePool;
6845 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6846 if (!CG_NOTES(cg)) {
6847 /* Allocate the first note array lazily; leave noteMask alone. */
6848 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6849 } else {
6850 /* Grow by doubling note array size; update noteMask on success. */
6851 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6852 if (CG_NOTES(cg))
6853 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6855 if (!CG_NOTES(cg)) {
6856 js_ReportOutOfScriptQuota(cx);
6857 return -1;
6861 CG_NOTE_COUNT(cg) = index + 1;
6862 return index;
6865 intN
6866 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
6868 intN index, n;
6869 jssrcnote *sn;
6870 ptrdiff_t offset, delta, xdelta;
6873 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6874 * incrementing CG_NOTE_COUNT(cg).
6876 index = AllocSrcNote(cx, cg);
6877 if (index < 0)
6878 return -1;
6879 sn = &CG_NOTES(cg)[index];
6882 * Compute delta from the last annotated bytecode's offset. If it's too
6883 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6885 offset = CG_OFFSET(cg);
6886 delta = offset - CG_LAST_NOTE_OFFSET(cg);
6887 CG_LAST_NOTE_OFFSET(cg) = offset;
6888 if (delta >= SN_DELTA_LIMIT) {
6889 do {
6890 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
6891 SN_MAKE_XDELTA(sn, xdelta);
6892 delta -= xdelta;
6893 index = AllocSrcNote(cx, cg);
6894 if (index < 0)
6895 return -1;
6896 sn = &CG_NOTES(cg)[index];
6897 } while (delta >= SN_DELTA_LIMIT);
6901 * Initialize type and delta, then allocate the minimum number of notes
6902 * needed for type's arity. Usually, we won't need more, but if an offset
6903 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6905 SN_MAKE_NOTE(sn, type, delta);
6906 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
6907 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
6908 return -1;
6910 return index;
6913 intN
6914 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6915 ptrdiff_t offset)
6917 intN index;
6919 index = js_NewSrcNote(cx, cg, type);
6920 if (index >= 0) {
6921 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
6922 return -1;
6924 return index;
6927 intN
6928 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6929 ptrdiff_t offset1, ptrdiff_t offset2)
6931 intN index;
6933 index = js_NewSrcNote(cx, cg, type);
6934 if (index >= 0) {
6935 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
6936 return -1;
6937 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
6938 return -1;
6940 return index;
6943 static JSBool
6944 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
6946 JSArenaPool *pool;
6947 size_t size;
6949 /* Grow by doubling note array size; update noteMask on success. */
6950 pool = cg->notePool;
6951 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6952 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6953 if (!CG_NOTES(cg)) {
6954 js_ReportOutOfScriptQuota(cx);
6955 return JS_FALSE;
6957 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6958 return JS_TRUE;
6961 jssrcnote *
6962 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
6963 ptrdiff_t delta)
6965 ptrdiff_t base, limit, newdelta, diff;
6966 intN index;
6969 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
6970 * main script note deltas, and only by a small positive amount.
6972 JS_ASSERT(cg->current == &cg->main);
6973 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
6975 base = SN_DELTA(sn);
6976 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
6977 newdelta = base + delta;
6978 if (newdelta < limit) {
6979 SN_SET_DELTA(sn, newdelta);
6980 } else {
6981 index = sn - cg->main.notes;
6982 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
6983 if (!GrowSrcNotes(cx, cg))
6984 return NULL;
6985 sn = cg->main.notes + index;
6987 diff = cg->main.noteCount - index;
6988 cg->main.noteCount++;
6989 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
6990 SN_MAKE_XDELTA(sn, delta);
6991 sn++;
6993 return sn;
6996 JS_FRIEND_API(uintN)
6997 js_SrcNoteLength(jssrcnote *sn)
6999 uintN arity;
7000 jssrcnote *base;
7002 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7003 for (base = sn++; arity; sn++, arity--) {
7004 if (*sn & SN_3BYTE_OFFSET_FLAG)
7005 sn += 2;
7007 return sn - base;
7010 JS_FRIEND_API(ptrdiff_t)
7011 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7013 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7014 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7015 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7016 for (sn++; which; sn++, which--) {
7017 if (*sn & SN_3BYTE_OFFSET_FLAG)
7018 sn += 2;
7020 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7021 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7022 | (sn[1] << 8)
7023 | sn[2]);
7025 return (ptrdiff_t)*sn;
7028 JSBool
7029 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7030 uintN which, ptrdiff_t offset)
7032 jssrcnote *sn;
7033 ptrdiff_t diff;
7035 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7036 ReportStatementTooLarge(cx, cg);
7037 return JS_FALSE;
7040 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7041 sn = &CG_NOTES(cg)[index];
7042 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7043 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7044 for (sn++; which; sn++, which--) {
7045 if (*sn & SN_3BYTE_OFFSET_FLAG)
7046 sn += 2;
7049 /* See if the new offset requires three bytes. */
7050 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7051 /* Maybe this offset was already set to a three-byte value. */
7052 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7053 /* Losing, need to insert another two bytes for this offset. */
7054 index = sn - CG_NOTES(cg);
7057 * Simultaneously test to see if the source note array must grow to
7058 * accommodate either the first or second byte of additional storage
7059 * required by this 3-byte offset.
7061 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7062 if (!GrowSrcNotes(cx, cg))
7063 return JS_FALSE;
7064 sn = CG_NOTES(cg) + index;
7066 CG_NOTE_COUNT(cg) += 2;
7068 diff = CG_NOTE_COUNT(cg) - (index + 3);
7069 JS_ASSERT(diff >= 0);
7070 if (diff > 0)
7071 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7073 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7074 *sn++ = (jssrcnote)(offset >> 8);
7076 *sn = (jssrcnote)offset;
7077 return JS_TRUE;
7080 #ifdef DEBUG_notme
7081 #define DEBUG_srcnotesize
7082 #endif
7084 #ifdef DEBUG_srcnotesize
7085 #define NBINS 10
7086 static uint32 hist[NBINS];
7088 void DumpSrcNoteSizeHist()
7090 static FILE *fp;
7091 int i, n;
7093 if (!fp) {
7094 fp = fopen("/tmp/srcnotes.hist", "w");
7095 if (!fp)
7096 return;
7097 setvbuf(fp, NULL, _IONBF, 0);
7099 fprintf(fp, "SrcNote size histogram:\n");
7100 for (i = 0; i < NBINS; i++) {
7101 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7102 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7103 fputc('*', fp);
7104 fputc('\n', fp);
7106 fputc('\n', fp);
7108 #endif
7111 * Fill in the storage at notes with prolog and main srcnotes; the space at
7112 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7113 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7114 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7116 JSBool
7117 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7119 uintN prologCount, mainCount, totalCount;
7120 ptrdiff_t offset, delta;
7121 jssrcnote *sn;
7123 JS_ASSERT(cg->current == &cg->main);
7125 prologCount = cg->prolog.noteCount;
7126 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7127 CG_SWITCH_TO_PROLOG(cg);
7128 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7129 return JS_FALSE;
7130 prologCount = cg->prolog.noteCount;
7131 CG_SWITCH_TO_MAIN(cg);
7132 } else {
7134 * Either no prolog srcnotes, or no line number change over prolog.
7135 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7136 * of the first main note, by adding to its delta and possibly even
7137 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7138 * that came at and after the last annotated bytecode.
7140 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7141 JS_ASSERT(offset >= 0);
7142 if (offset > 0 && cg->main.noteCount != 0) {
7143 /* NB: Use as much of the first main note's delta as we can. */
7144 sn = cg->main.notes;
7145 delta = SN_IS_XDELTA(sn)
7146 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7147 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7148 if (offset < delta)
7149 delta = offset;
7150 for (;;) {
7151 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7152 return JS_FALSE;
7153 offset -= delta;
7154 if (offset == 0)
7155 break;
7156 delta = JS_MIN(offset, SN_XDELTA_MASK);
7157 sn = cg->main.notes;
7162 mainCount = cg->main.noteCount;
7163 totalCount = prologCount + mainCount;
7164 if (prologCount)
7165 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7166 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7167 SN_MAKE_TERMINATOR(&notes[totalCount]);
7169 #ifdef DEBUG_notme
7170 { int bin = JS_CeilingLog2(totalCount);
7171 if (bin >= NBINS)
7172 bin = NBINS - 1;
7173 ++hist[bin];
7175 #endif
7176 return JS_TRUE;
7179 static JSBool
7180 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7181 uintN stackDepth, size_t start, size_t end)
7183 JSTryNode *tryNode;
7185 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7186 JS_ASSERT(start <= end);
7187 JS_ASSERT((size_t)(uint32)start == start);
7188 JS_ASSERT((size_t)(uint32)end == end);
7190 JS_ARENA_ALLOCATE_TYPE(tryNode, JSTryNode, &cx->tempPool);
7191 if (!tryNode) {
7192 js_ReportOutOfScriptQuota(cx);
7193 return JS_FALSE;
7196 tryNode->note.kind = kind;
7197 tryNode->note.stackDepth = (uint16)stackDepth;
7198 tryNode->note.start = (uint32)start;
7199 tryNode->note.length = (uint32)(end - start);
7200 tryNode->prev = cg->lastTryNode;
7201 cg->lastTryNode = tryNode;
7202 cg->ntrynotes++;
7203 return JS_TRUE;
7206 void
7207 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7209 JSTryNode *tryNode;
7210 JSTryNote *tn;
7212 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7213 tn = array->vector + array->length;
7214 tryNode = cg->lastTryNode;
7215 do {
7216 *--tn = tryNode->note;
7217 } while ((tryNode = tryNode->prev) != NULL);
7218 JS_ASSERT(tn == array->vector);
7222 * Find the index of the given object for code generator.
7224 * Since the emitter refers to each parsed object only once, for the index we
7225 * use the number of already indexes objects. We also add the object to a list
7226 * to convert the list to a fixed-size array when we complete code generation,
7227 * see JSCGObjectList::finish below.
7229 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7230 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7231 * directly used to store and fetch a reference to a cloned RegExp object that
7232 * shares the same JSRegExp private data created for the object literal in
7233 * objbox. We need a cloned object to hold lastIndex and other direct properties
7234 * that should not be shared among threads sharing a precompiled function or
7235 * script.
7237 * If the code being compiled is function code, allocate a reserved slot in
7238 * the cloned function object that shares its precompiled script with other
7239 * cloned function objects and with the compiler-created clone-parent. There
7240 * are nregexps = JS_SCRIPT_REGEXPS(script)->length such reserved slots in each
7241 * function object cloned from fun->object. NB: during compilation, a funobj
7242 * slots element must never be allocated, because js_AllocSlot could hand out
7243 * one of the slots that should be given to a regexp clone.
7245 * If the code being compiled is global code, the cloned regexp are stored in
7246 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7247 * fp->nvars to ngvars + nregexps.
7249 * The slots initially contain undefined or null. We populate them lazily when
7250 * JSOP_REGEXP is executed for the first time.
7252 * Why clone regexp objects? ECMA specifies that when a regular expression
7253 * literal is scanned, a RegExp object is created. In the spec, compilation
7254 * and execution happen indivisibly, but in this implementation and many of
7255 * its embeddings, code is precompiled early and re-executed in multiple
7256 * threads, or using multiple global objects, or both, for efficiency.
7258 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7259 * objects, which makes for collisions on the lastIndex property (especially
7260 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
7261 * __parent__ refer to the pre-compilation prototype and global objects, a
7262 * pigeon-hole problem for instanceof tests.
7264 uintN
7265 JSCGObjectList::index(JSObjectBox *objbox)
7267 JS_ASSERT(!objbox->emitLink);
7268 objbox->emitLink = lastbox;
7269 lastbox = objbox;
7270 return length++;
7273 void
7274 JSCGObjectList::finish(JSObjectArray *array)
7276 JSObject **cursor;
7277 JSObjectBox *objbox;
7279 JS_ASSERT(length <= INDEX_LIMIT);
7280 JS_ASSERT(length == array->length);
7282 cursor = array->vector + array->length;
7283 objbox = lastbox;
7284 do {
7285 --cursor;
7286 JS_ASSERT(!*cursor);
7287 *cursor = objbox->object;
7288 } while ((objbox = objbox->emitLink) != NULL);
7289 JS_ASSERT(cursor == array->vector);