Bug 559408: Arena pool macros to methods. (r=gal)
[mozilla-central.git] / js / src / jsemit.cpp
blob344ce16a59e0c9b4c8740a800bcb67fa1c0b703c
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
25 * Contributor(s):
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
82 using namespace js;
84 static JSBool
85 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
86 uintN stackDepth, size_t start, size_t end);
88 JSCodeGenerator::JSCodeGenerator(Parser *parser,
89 JSArenaPool *cpool, JSArenaPool *npool,
90 uintN lineno)
91 : JSTreeContext(parser),
92 codePool(cpool), notePool(npool),
93 codeMark(cpool->getMark()), noteMark(npool->getMark()),
94 stackDepth(0), maxStackDepth(0),
95 ntrynotes(0), lastTryNode(NULL),
96 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
97 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
98 arrayCompDepth(0),
99 emitLevel(0),
100 constMap(parser->context)
102 flags = TCF_COMPILING;
103 memset(&prolog, 0, sizeof prolog);
104 memset(&main, 0, sizeof main);
105 current = &main;
106 firstLine = prolog.currentLine = main.currentLine = lineno;
107 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
108 memset(&upvarMap, 0, sizeof upvarMap);
111 bool JSCodeGenerator::init()
113 return constMap.init();
116 JSCodeGenerator::~JSCodeGenerator()
118 codePool->release(codeMark);
119 notePool->release(noteMark);
121 /* NB: non-null only after OOM. */
122 if (spanDeps)
123 parser->context->free(spanDeps);
125 if (upvarMap.vector)
126 parser->context->free(upvarMap.vector);
129 static ptrdiff_t
130 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
132 jsbytecode *base, *limit, *next;
133 ptrdiff_t offset, length;
134 size_t incr, size;
136 base = CG_BASE(cg);
137 next = CG_NEXT(cg);
138 limit = CG_LIMIT(cg);
139 offset = next - base;
140 if (next + delta > limit) {
141 length = offset + delta;
142 length = (length <= BYTECODE_CHUNK)
143 ? BYTECODE_CHUNK
144 : JS_BIT(JS_CeilingLog2(length));
145 incr = BYTECODE_SIZE(length);
146 if (!base) {
147 cg->codePool->allocateCast<jsbytecode *>(base, incr);
148 } else {
149 size = BYTECODE_SIZE(limit - base);
150 incr -= size;
151 cg->codePool->growCast<jsbytecode *>(base, size, incr);
153 if (!base) {
154 js_ReportOutOfScriptQuota(cx);
155 return -1;
157 CG_BASE(cg) = base;
158 CG_LIMIT(cg) = base + length;
159 CG_NEXT(cg) = base + offset;
161 return offset;
164 static void
165 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
167 jsbytecode *pc;
168 JSOp op;
169 const JSCodeSpec *cs;
170 uintN extra, depth, nuses;
171 intN ndefs;
173 pc = CG_CODE(cg, target);
174 op = (JSOp) *pc;
175 cs = &js_CodeSpec[op];
176 #ifdef JS_TRACER
177 extern uint8 js_opcode2extra[];
178 extra = js_opcode2extra[op];
179 #else
180 extra = 0;
181 #endif
182 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
183 depth = (uintN) cg->stackDepth +
184 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
185 extra;
186 if (depth > cg->maxStackDepth)
187 cg->maxStackDepth = depth;
190 nuses = js_GetStackUses(cs, op, pc);
191 cg->stackDepth -= nuses;
192 JS_ASSERT(cg->stackDepth >= 0);
193 if (cg->stackDepth < 0) {
194 char numBuf[12];
195 TokenStream *ts;
197 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
198 ts = &cg->parser->tokenStream;
199 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
200 js_GetErrorMessage, NULL,
201 JSMSG_STACK_UNDERFLOW,
202 ts->getFilename() ? ts->getFilename() : "stdin",
203 numBuf);
205 ndefs = cs->ndefs;
206 if (ndefs < 0) {
207 JSObject *blockObj;
209 /* We just executed IndexParsedObject */
210 JS_ASSERT(op == JSOP_ENTERBLOCK);
211 JS_ASSERT(nuses == 0);
212 blockObj = cg->objectList.lastbox->object;
213 JS_ASSERT(blockObj->getClass() == &js_BlockClass);
214 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
216 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
217 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
219 cg->stackDepth += ndefs;
220 if ((uintN)cg->stackDepth > cg->maxStackDepth)
221 cg->maxStackDepth = cg->stackDepth;
224 ptrdiff_t
225 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
227 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
229 if (offset >= 0) {
230 *CG_NEXT(cg)++ = (jsbytecode)op;
231 UpdateDepth(cx, cg, offset);
233 return offset;
236 ptrdiff_t
237 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
239 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
241 if (offset >= 0) {
242 jsbytecode *next = CG_NEXT(cg);
243 next[0] = (jsbytecode)op;
244 next[1] = op1;
245 CG_NEXT(cg) = next + 2;
246 UpdateDepth(cx, cg, offset);
248 return offset;
251 ptrdiff_t
252 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
253 jsbytecode op2)
255 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
257 if (offset >= 0) {
258 jsbytecode *next = CG_NEXT(cg);
259 next[0] = (jsbytecode)op;
260 next[1] = op1;
261 next[2] = op2;
262 CG_NEXT(cg) = next + 3;
263 UpdateDepth(cx, cg, offset);
265 return offset;
268 ptrdiff_t
269 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
271 ptrdiff_t length = 1 + (ptrdiff_t)extra;
272 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
274 if (offset >= 0) {
275 jsbytecode *next = CG_NEXT(cg);
276 *next = (jsbytecode)op;
277 memset(next + 1, 0, BYTECODE_SIZE(extra));
278 CG_NEXT(cg) = next + length;
281 * Don't UpdateDepth if op's use-count comes from the immediate
282 * operand yet to be stored in the extra bytes after op.
284 if (js_CodeSpec[op].nuses >= 0)
285 UpdateDepth(cx, cg, offset);
287 return offset;
290 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
291 const char js_with_statement_str[] = "with statement";
292 const char js_finally_block_str[] = "finally block";
293 const char js_script_str[] = "script";
295 static const char *statementName[] = {
296 "label statement", /* LABEL */
297 "if statement", /* IF */
298 "else statement", /* ELSE */
299 "destructuring body", /* BODY */
300 "switch statement", /* SWITCH */
301 "block", /* BLOCK */
302 js_with_statement_str, /* WITH */
303 "catch block", /* CATCH */
304 "try block", /* TRY */
305 js_finally_block_str, /* FINALLY */
306 js_finally_block_str, /* SUBROUTINE */
307 "do loop", /* DO_LOOP */
308 "for loop", /* FOR_LOOP */
309 "for/in loop", /* FOR_IN_LOOP */
310 "while loop", /* WHILE_LOOP */
313 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
315 static const char *
316 StatementName(JSCodeGenerator *cg)
318 if (!cg->topStmt)
319 return js_script_str;
320 return statementName[cg->topStmt->type];
323 static void
324 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
326 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
327 StatementName(cg));
331 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
332 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
333 into unconditional (gotos and gosubs), and conditional jumps or branches
334 (which pop a value, test it, and jump depending on its value). Most jumps
335 have just one immediate operand, a signed offset from the jump opcode's pc
336 to the target bytecode. The lookup and table switch opcodes may contain
337 many jump offsets.
339 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
340 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
341 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
342 the extended form of the JSOP_OR branch opcode). The unextended or short
343 formats have 16-bit signed immediate offset operands, the extended or long
344 formats have 32-bit signed immediates. The span-dependency problem consists
345 of selecting as few long instructions as possible, or about as few -- since
346 jumps can span other jumps, extending one jump may cause another to need to
347 be extended.
349 Most JS scripts are short, so need no extended jumps. We optimize for this
350 case by generating short jumps until we know a long jump is needed. After
351 that point, we keep generating short jumps, but each jump's 16-bit immediate
352 offset operand is actually an unsigned index into cg->spanDeps, an array of
353 JSSpanDep structs. Each struct tells the top offset in the script of the
354 opcode, the "before" offset of the jump (which will be the same as top for
355 simplex jumps, but which will index further into the bytecode array for a
356 non-initial jump offset in a lookup or table switch), the after "offset"
357 adjusted during span-dependent instruction selection (initially the same
358 value as the "before" offset), and the jump target (more below).
360 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
361 ensure that all bytecode generated so far can be inspected to discover where
362 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
363 that we generate span-dependency records sorted by their offsets, so we can
364 binary-search when trying to find a JSSpanDep for a given bytecode offset,
365 or the nearest JSSpanDep at or above a given pc.
367 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
368 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
369 tells us that we need to binary-search for the cg->spanDeps entry by the
370 jump opcode's bytecode offset (sd->before).
372 Jump targets need to be maintained in a data structure that lets us look
373 up an already-known target by its address (jumps may have a common target),
374 and that also lets us update the addresses (script-relative, a.k.a. absolute
375 offsets) of targets that come after a jump target (for when a jump below
376 that target needs to be extended). We use an AVL tree, implemented using
377 recursion, but with some tricky optimizations to its height-balancing code
378 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
380 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
381 positive sign, even though they link "backward" (i.e., toward lower bytecode
382 address). We don't want to waste space and search time in the AVL tree for
383 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
384 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
385 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
386 target, or is still awaiting backpatching.
388 Note that backpatch chains would present a problem for BuildSpanDepTable,
389 which inspects bytecode to build cg->spanDeps on demand, when the first
390 short jump offset overflows. To solve this temporary problem, we emit a
391 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
392 nuses/ndefs counts help keep the stack balanced, but whose opcode format
393 distinguishes its backpatch delta immediate operand from a normal jump
394 offset.
396 static int
397 BalanceJumpTargets(JSJumpTarget **jtp)
399 JSJumpTarget *jt, *jt2, *root;
400 int dir, otherDir, heightChanged;
401 JSBool doubleRotate;
403 jt = *jtp;
404 JS_ASSERT(jt->balance != 0);
406 if (jt->balance < -1) {
407 dir = JT_RIGHT;
408 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
409 } else if (jt->balance > 1) {
410 dir = JT_LEFT;
411 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
412 } else {
413 return 0;
416 otherDir = JT_OTHER_DIR(dir);
417 if (doubleRotate) {
418 jt2 = jt->kids[otherDir];
419 *jtp = root = jt2->kids[dir];
421 jt->kids[otherDir] = root->kids[dir];
422 root->kids[dir] = jt;
424 jt2->kids[dir] = root->kids[otherDir];
425 root->kids[otherDir] = jt2;
427 heightChanged = 1;
428 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
429 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
430 root->balance = 0;
431 } else {
432 *jtp = root = jt->kids[otherDir];
433 jt->kids[otherDir] = root->kids[dir];
434 root->kids[dir] = jt;
436 heightChanged = (root->balance != 0);
437 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
440 return heightChanged;
443 typedef struct AddJumpTargetArgs {
444 JSContext *cx;
445 JSCodeGenerator *cg;
446 ptrdiff_t offset;
447 JSJumpTarget *node;
448 } AddJumpTargetArgs;
450 static int
451 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
453 JSJumpTarget *jt;
454 int balanceDelta;
456 jt = *jtp;
457 if (!jt) {
458 JSCodeGenerator *cg = args->cg;
460 jt = cg->jtFreeList;
461 if (jt) {
462 cg->jtFreeList = jt->kids[JT_LEFT];
463 } else {
464 args->cx->tempPool.allocateCast<JSJumpTarget *>(jt, sizeof *jt);
465 if (!jt) {
466 js_ReportOutOfScriptQuota(args->cx);
467 return 0;
470 jt->offset = args->offset;
471 jt->balance = 0;
472 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
473 cg->numJumpTargets++;
474 args->node = jt;
475 *jtp = jt;
476 return 1;
479 if (jt->offset == args->offset) {
480 args->node = jt;
481 return 0;
484 if (args->offset < jt->offset)
485 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
486 else
487 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
488 if (!args->node)
489 return 0;
491 jt->balance += balanceDelta;
492 return (balanceDelta && jt->balance)
493 ? 1 - BalanceJumpTargets(jtp)
494 : 0;
497 #ifdef DEBUG_brendan
498 static int AVLCheck(JSJumpTarget *jt)
500 int lh, rh;
502 if (!jt) return 0;
503 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
504 lh = AVLCheck(jt->kids[JT_LEFT]);
505 rh = AVLCheck(jt->kids[JT_RIGHT]);
506 JS_ASSERT(jt->balance == rh - lh);
507 return 1 + JS_MAX(lh, rh);
509 #endif
511 static JSBool
512 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
513 ptrdiff_t off)
515 AddJumpTargetArgs args;
517 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
518 ReportStatementTooLarge(cx, cg);
519 return JS_FALSE;
522 args.cx = cx;
523 args.cg = cg;
524 args.offset = sd->top + off;
525 args.node = NULL;
526 AddJumpTarget(&args, &cg->jumpTargets);
527 if (!args.node)
528 return JS_FALSE;
530 #ifdef DEBUG_brendan
531 AVLCheck(cg->jumpTargets);
532 #endif
534 SD_SET_TARGET(sd, args.node);
535 return JS_TRUE;
538 #define SPANDEPS_MIN 256
539 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
540 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
542 static JSBool
543 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
544 ptrdiff_t off)
546 uintN index;
547 JSSpanDep *sdbase, *sd;
548 size_t size;
550 index = cg->numSpanDeps;
551 if (index + 1 == 0) {
552 ReportStatementTooLarge(cx, cg);
553 return JS_FALSE;
556 if ((index & (index - 1)) == 0 &&
557 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
558 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
559 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
560 if (!sdbase)
561 return JS_FALSE;
562 cg->spanDeps = sdbase;
565 cg->numSpanDeps = index + 1;
566 sd = cg->spanDeps + index;
567 sd->top = pc - CG_BASE(cg);
568 sd->offset = sd->before = pc2 - CG_BASE(cg);
570 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
571 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
572 if (off != 0) {
573 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
574 if (off > BPDELTA_MAX) {
575 ReportStatementTooLarge(cx, cg);
576 return JS_FALSE;
579 SD_SET_BPDELTA(sd, off);
580 } else if (off == 0) {
581 /* Jump offset will be patched directly, without backpatch chaining. */
582 SD_SET_TARGET(sd, 0);
583 } else {
584 /* The jump offset in off is non-zero, therefore it's already known. */
585 if (!SetSpanDepTarget(cx, cg, sd, off))
586 return JS_FALSE;
589 if (index > SPANDEP_INDEX_MAX)
590 index = SPANDEP_INDEX_HUGE;
591 SET_SPANDEP_INDEX(pc2, index);
592 return JS_TRUE;
595 static jsbytecode *
596 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
598 JSOp op;
599 jsbytecode *pc2;
600 ptrdiff_t off;
601 jsint low, high;
602 uintN njumps, indexlen;
604 op = (JSOp) *pc;
605 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
606 pc2 = pc;
607 off = GET_JUMP_OFFSET(pc2);
608 if (!AddSpanDep(cx, cg, pc, pc2, off))
609 return NULL;
610 pc2 += JUMP_OFFSET_LEN;
611 if (op == JSOP_TABLESWITCH) {
612 low = GET_JUMP_OFFSET(pc2);
613 pc2 += JUMP_OFFSET_LEN;
614 high = GET_JUMP_OFFSET(pc2);
615 pc2 += JUMP_OFFSET_LEN;
616 njumps = (uintN) (high - low + 1);
617 indexlen = 0;
618 } else {
619 njumps = GET_UINT16(pc2);
620 pc2 += UINT16_LEN;
621 indexlen = INDEX_LEN;
623 while (njumps) {
624 --njumps;
625 pc2 += indexlen;
626 off = GET_JUMP_OFFSET(pc2);
627 if (!AddSpanDep(cx, cg, pc, pc2, off))
628 return NULL;
629 pc2 += JUMP_OFFSET_LEN;
631 return 1 + pc2;
634 static JSBool
635 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
637 jsbytecode *pc, *end;
638 JSOp op;
639 const JSCodeSpec *cs;
640 ptrdiff_t off;
642 pc = CG_BASE(cg) + cg->spanDepTodo;
643 end = CG_NEXT(cg);
644 while (pc != end) {
645 JS_ASSERT(pc < end);
646 op = (JSOp)*pc;
647 cs = &js_CodeSpec[op];
649 switch (JOF_TYPE(cs->format)) {
650 case JOF_TABLESWITCH:
651 case JOF_LOOKUPSWITCH:
652 pc = AddSwitchSpanDeps(cx, cg, pc);
653 if (!pc)
654 return JS_FALSE;
655 break;
657 case JOF_JUMP:
658 off = GET_JUMP_OFFSET(pc);
659 if (!AddSpanDep(cx, cg, pc, pc, off))
660 return JS_FALSE;
661 /* FALL THROUGH */
662 default:
663 pc += cs->length;
664 break;
668 return JS_TRUE;
671 static JSSpanDep *
672 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
674 uintN index;
675 ptrdiff_t offset;
676 int lo, hi, mid;
677 JSSpanDep *sd;
679 index = GET_SPANDEP_INDEX(pc);
680 if (index != SPANDEP_INDEX_HUGE)
681 return cg->spanDeps + index;
683 offset = pc - CG_BASE(cg);
684 lo = 0;
685 hi = cg->numSpanDeps - 1;
686 while (lo <= hi) {
687 mid = (lo + hi) / 2;
688 sd = cg->spanDeps + mid;
689 if (sd->before == offset)
690 return sd;
691 if (sd->before < offset)
692 lo = mid + 1;
693 else
694 hi = mid - 1;
697 JS_ASSERT(0);
698 return NULL;
701 static JSBool
702 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
703 ptrdiff_t delta)
705 JSSpanDep *sd;
707 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
708 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
709 SET_JUMP_OFFSET(pc, delta);
710 return JS_TRUE;
713 if (delta > BPDELTA_MAX) {
714 ReportStatementTooLarge(cx, cg);
715 return JS_FALSE;
718 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
719 return JS_FALSE;
721 sd = GetSpanDep(cg, pc);
722 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
723 SD_SET_BPDELTA(sd, delta);
724 return JS_TRUE;
727 static void
728 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
730 if (jt->offset > pivot) {
731 jt->offset += delta;
732 if (jt->kids[JT_LEFT])
733 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
735 if (jt->kids[JT_RIGHT])
736 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
739 static JSSpanDep *
740 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
741 JSSpanDep *guard)
743 int num, hi, mid;
744 JSSpanDep *sdbase, *sd;
746 num = cg->numSpanDeps;
747 JS_ASSERT(num > 0);
748 hi = num - 1;
749 sdbase = cg->spanDeps;
750 while (lo <= hi) {
751 mid = (lo + hi) / 2;
752 sd = sdbase + mid;
753 if (sd->before == offset)
754 return sd;
755 if (sd->before < offset)
756 lo = mid + 1;
757 else
758 hi = mid - 1;
760 if (lo == num)
761 return guard;
762 sd = sdbase + lo;
763 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
764 return sd;
767 static void
768 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
770 if (jt->kids[JT_LEFT])
771 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
772 if (jt->kids[JT_RIGHT])
773 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
774 jt->kids[JT_LEFT] = cg->jtFreeList;
775 cg->jtFreeList = jt;
778 static JSBool
779 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
781 jsbytecode *pc, *oldpc, *base, *limit, *next;
782 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
783 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
784 JSBool done;
785 JSOp op;
786 uint32 type;
787 size_t size, incr;
788 jssrcnote *sn, *snlimit;
789 JSSrcNoteSpec *spec;
790 uintN i, n, noteIndex;
791 JSTryNode *tryNode;
792 #ifdef DEBUG_brendan
793 int passes = 0;
794 #endif
796 base = CG_BASE(cg);
797 sdbase = cg->spanDeps;
798 sdlimit = sdbase + cg->numSpanDeps;
799 offset = CG_OFFSET(cg);
800 growth = 0;
802 do {
803 done = JS_TRUE;
804 delta = 0;
805 top = pivot = -1;
806 sdtop = NULL;
807 pc = NULL;
808 op = JSOP_NOP;
809 type = 0;
810 #ifdef DEBUG_brendan
811 passes++;
812 #endif
814 for (sd = sdbase; sd < sdlimit; sd++) {
815 JS_ASSERT(JT_HAS_TAG(sd->target));
816 sd->offset += delta;
818 if (sd->top != top) {
819 sdtop = sd;
820 top = sd->top;
821 JS_ASSERT(top == sd->before);
822 pivot = sd->offset;
823 pc = base + top;
824 op = (JSOp) *pc;
825 type = JOF_OPTYPE(op);
826 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
828 * We already extended all the jump offset operands for
829 * the opcode at sd->top. Jumps and branches have only
830 * one jump offset operand, but switches have many, all
831 * of which are adjacent in cg->spanDeps.
833 continue;
836 JS_ASSERT(type == JOF_JUMP ||
837 type == JOF_TABLESWITCH ||
838 type == JOF_LOOKUPSWITCH);
841 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
842 span = SD_SPAN(sd, pivot);
843 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
844 ptrdiff_t deltaFromTop = 0;
846 done = JS_FALSE;
848 switch (op) {
849 case JSOP_GOTO: op = JSOP_GOTOX; break;
850 case JSOP_IFEQ: op = JSOP_IFEQX; break;
851 case JSOP_IFNE: op = JSOP_IFNEX; break;
852 case JSOP_OR: op = JSOP_ORX; break;
853 case JSOP_AND: op = JSOP_ANDX; break;
854 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
855 case JSOP_CASE: op = JSOP_CASEX; break;
856 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
857 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
858 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
859 default:
860 ReportStatementTooLarge(cx, cg);
861 return JS_FALSE;
863 *pc = (jsbytecode) op;
865 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
866 if (sd2 <= sd) {
868 * sd2->offset already includes delta as it stood
869 * before we entered this loop, but it must also
870 * include the delta relative to top due to all the
871 * extended jump offset immediates for the opcode
872 * starting at top, which we extend in this loop.
874 * If there is only one extended jump offset, then
875 * sd2->offset won't change and this for loop will
876 * iterate once only.
878 sd2->offset += deltaFromTop;
879 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
880 } else {
882 * sd2 comes after sd, and won't be revisited by
883 * the outer for loop, so we have to increase its
884 * offset by delta, not merely by deltaFromTop.
886 sd2->offset += delta;
889 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
890 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
891 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
893 sd = sd2 - 1;
898 growth += delta;
899 } while (!done);
901 if (growth) {
902 #ifdef DEBUG_brendan
903 TokenStream *ts = &cg->parser->tokenStream;
905 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
906 ts->filename ? ts->filename : "stdin", cg->firstLine,
907 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
908 passes, offset + growth, offset, growth);
909 #endif
912 * Ensure that we have room for the extended jumps, but don't round up
913 * to a power of two -- we're done generating code, so we cut to fit.
915 limit = CG_LIMIT(cg);
916 length = offset + growth;
917 next = base + length;
918 if (next > limit) {
919 JS_ASSERT(length > BYTECODE_CHUNK);
920 size = BYTECODE_SIZE(limit - base);
921 incr = BYTECODE_SIZE(length) - size;
922 cg->codePool->growCast<jsbytecode *>(base, size, incr);
923 if (!base) {
924 js_ReportOutOfScriptQuota(cx);
925 return JS_FALSE;
927 CG_BASE(cg) = base;
928 CG_LIMIT(cg) = next = base + length;
930 CG_NEXT(cg) = next;
933 * Set up a fake span dependency record to guard the end of the code
934 * being generated. This guard record is returned as a fencepost by
935 * FindNearestSpanDep if there is no real spandep at or above a given
936 * unextended code offset.
938 guard.top = -1;
939 guard.offset = offset + growth;
940 guard.before = offset;
941 guard.target = NULL;
945 * Now work backwards through the span dependencies, copying chunks of
946 * bytecode between each extended jump toward the end of the grown code
947 * space, and restoring immediate offset operands for all jump bytecodes.
948 * The first chunk of bytecodes, starting at base and ending at the first
949 * extended jump offset (NB: this chunk includes the operation bytecode
950 * just before that immediate jump offset), doesn't need to be copied.
952 JS_ASSERT(sd == sdlimit);
953 top = -1;
954 while (--sd >= sdbase) {
955 if (sd->top != top) {
956 top = sd->top;
957 op = (JSOp) base[top];
958 type = JOF_OPTYPE(op);
960 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
961 continue;
962 sd2++;
963 pivot = sd2->offset;
964 JS_ASSERT(top == sd2->before);
967 oldpc = base + sd->before;
968 span = SD_SPAN(sd, pivot);
971 * If this jump didn't need to be extended, restore its span immediate
972 * offset operand now, overwriting the index of sd within cg->spanDeps
973 * that was stored temporarily after *pc when BuildSpanDepTable ran.
975 * Note that span might fit in 16 bits even for an extended jump op,
976 * if the op has multiple span operands, not all of which overflowed
977 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
978 * range for a short jump, but others are not).
980 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
981 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
982 SET_JUMP_OFFSET(oldpc, span);
983 continue;
987 * Set up parameters needed to copy the next run of bytecode starting
988 * at offset (which is a cursor into the unextended, original bytecode
989 * vector), down to sd->before (a cursor of the same scale as offset,
990 * it's the index of the original jump pc). Reuse delta to count the
991 * nominal number of bytes to copy.
993 pc = base + sd->offset;
994 delta = offset - sd->before;
995 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
998 * Don't bother copying the jump offset we're about to reset, but do
999 * copy the bytecode at oldpc (which comes just before its immediate
1000 * jump offset operand), on the next iteration through the loop, by
1001 * including it in offset's new value.
1003 offset = sd->before + 1;
1004 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
1005 if (size) {
1006 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1007 oldpc + 1 + JUMP_OFFSET_LEN,
1008 size);
1011 SET_JUMPX_OFFSET(pc, span);
1014 if (growth) {
1016 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1017 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1018 * at each sd that moved. The future may bring different offset sizes
1019 * for span-dependent instruction operands. However, we fix only main
1020 * notes here, not prolog notes -- we know that prolog opcodes are not
1021 * span-dependent, and aren't likely ever to be.
1023 offset = growth = 0;
1024 sd = sdbase;
1025 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1026 sn < snlimit;
1027 sn = SN_NEXT(sn)) {
1029 * Recall that the offset of a given note includes its delta, and
1030 * tells the offset of the annotated bytecode from the main entry
1031 * point of the script.
1033 offset += SN_DELTA(sn);
1034 while (sd < sdlimit && sd->before < offset) {
1036 * To compute the delta to add to sn, we need to look at the
1037 * spandep after sd, whose offset - (before + growth) tells by
1038 * how many bytes sd's instruction grew.
1040 sd2 = sd + 1;
1041 if (sd2 == sdlimit)
1042 sd2 = &guard;
1043 delta = sd2->offset - (sd2->before + growth);
1044 if (delta > 0) {
1045 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1046 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1047 if (!sn)
1048 return JS_FALSE;
1049 snlimit = cg->main.notes + cg->main.noteCount;
1050 growth += delta;
1052 sd++;
1056 * If sn has span-dependent offset operands, check whether each
1057 * covers further span-dependencies, and increase those operands
1058 * accordingly. Some source notes measure offset not from the
1059 * annotated pc, but from that pc plus some small bias. NB: we
1060 * assume that spec->offsetBias can't itself span span-dependent
1061 * instructions!
1063 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1064 if (spec->isSpanDep) {
1065 pivot = offset + spec->offsetBias;
1066 n = spec->arity;
1067 for (i = 0; i < n; i++) {
1068 span = js_GetSrcNoteOffset(sn, i);
1069 if (span == 0)
1070 continue;
1071 target = pivot + span * spec->isSpanDep;
1072 sd2 = FindNearestSpanDep(cg, target,
1073 (target >= pivot)
1074 ? sd - sdbase
1075 : 0,
1076 &guard);
1079 * Increase target by sd2's before-vs-after offset delta,
1080 * which is absolute (i.e., relative to start of script,
1081 * as is target). Recompute the span by subtracting its
1082 * adjusted pivot from target.
1084 target += sd2->offset - sd2->before;
1085 span = target - (pivot + growth);
1086 span *= spec->isSpanDep;
1087 noteIndex = sn - cg->main.notes;
1088 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1089 return JS_FALSE;
1090 sn = cg->main.notes + noteIndex;
1091 snlimit = cg->main.notes + cg->main.noteCount;
1095 cg->main.lastNoteOffset += growth;
1098 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1099 * not clear how we can beat that).
1101 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1103 * First, look for the nearest span dependency at/above tn->start.
1104 * There may not be any such spandep, in which case the guard will
1105 * be returned.
1107 offset = tryNode->note.start;
1108 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1109 delta = sd->offset - sd->before;
1110 tryNode->note.start = offset + delta;
1113 * Next, find the nearest spandep at/above tn->start + tn->length.
1114 * Use its delta minus tn->start's delta to increase tn->length.
1116 length = tryNode->note.length;
1117 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1118 if (sd2 != sd) {
1119 tryNode->note.length =
1120 length + sd2->offset - sd2->before - delta;
1125 #ifdef DEBUG_brendan
1127 uintN bigspans = 0;
1128 top = -1;
1129 for (sd = sdbase; sd < sdlimit; sd++) {
1130 offset = sd->offset;
1132 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1133 if (sd->top != top) {
1134 JS_ASSERT(top == -1 ||
1135 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1136 bigspans != 0);
1137 bigspans = 0;
1138 top = sd->top;
1139 JS_ASSERT(top == sd->before);
1140 op = (JSOp) base[offset];
1141 type = JOF_OPTYPE(op);
1142 JS_ASSERT(type == JOF_JUMP ||
1143 type == JOF_JUMPX ||
1144 type == JOF_TABLESWITCH ||
1145 type == JOF_TABLESWITCHX ||
1146 type == JOF_LOOKUPSWITCH ||
1147 type == JOF_LOOKUPSWITCHX);
1148 pivot = offset;
1151 pc = base + offset;
1152 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1153 span = GET_JUMPX_OFFSET(pc);
1154 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1155 bigspans++;
1156 } else {
1157 JS_ASSERT(type == JOF_TABLESWITCHX ||
1158 type == JOF_LOOKUPSWITCHX);
1160 } else {
1161 span = GET_JUMP_OFFSET(pc);
1163 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1165 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1167 #endif
1170 * Reset so we optimize at most once -- cg may be used for further code
1171 * generation of successive, independent, top-level statements. No jump
1172 * can span top-level statements, because JS lacks goto.
1174 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1175 cx->free(cg->spanDeps);
1176 cg->spanDeps = NULL;
1177 FreeJumpTargets(cg, cg->jumpTargets);
1178 cg->jumpTargets = NULL;
1179 cg->numSpanDeps = cg->numJumpTargets = 0;
1180 cg->spanDepTodo = CG_OFFSET(cg);
1181 return JS_TRUE;
1184 static ptrdiff_t
1185 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1187 JSBool extend;
1188 ptrdiff_t jmp;
1189 jsbytecode *pc;
1191 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1192 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1193 return -1;
1195 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1196 if (jmp >= 0 && (extend || cg->spanDeps)) {
1197 pc = CG_CODE(cg, jmp);
1198 if (!AddSpanDep(cx, cg, pc, pc, off))
1199 return -1;
1201 return jmp;
1204 static ptrdiff_t
1205 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1207 JSSpanDep *sd;
1208 JSJumpTarget *jt;
1209 ptrdiff_t top;
1211 if (!cg->spanDeps)
1212 return GET_JUMP_OFFSET(pc);
1214 sd = GetSpanDep(cg, pc);
1215 jt = sd->target;
1216 if (!JT_HAS_TAG(jt))
1217 return JT_TO_BPDELTA(jt);
1219 top = sd->top;
1220 while (--sd >= cg->spanDeps && sd->top == top)
1221 continue;
1222 sd++;
1223 return JT_CLR_TAG(jt)->offset - sd->offset;
1226 JSBool
1227 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1228 ptrdiff_t off)
1230 if (!cg->spanDeps) {
1231 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1232 SET_JUMP_OFFSET(pc, off);
1233 return JS_TRUE;
1236 if (!BuildSpanDepTable(cx, cg))
1237 return JS_FALSE;
1240 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1243 bool
1244 JSTreeContext::inStatement(JSStmtType type)
1246 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1247 if (stmt->type == type)
1248 return true;
1250 return false;
1253 bool
1254 JSTreeContext::ensureSharpSlots()
1256 #if JS_HAS_SHARP_VARS
1257 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1259 if (sharpSlotBase >= 0) {
1260 JS_ASSERT(flags & TCF_HAS_SHARPS);
1261 return true;
1264 JS_ASSERT(!(flags & TCF_HAS_SHARPS));
1265 if (inFunction()) {
1266 JSContext *cx = parser->context;
1267 JSAtom *sharpArrayAtom = js_Atomize(cx, "#array", 6, 0);
1268 JSAtom *sharpDepthAtom = js_Atomize(cx, "#depth", 6, 0);
1269 if (!sharpArrayAtom || !sharpDepthAtom)
1270 return false;
1272 sharpSlotBase = fun->u.i.nvars;
1273 if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
1274 return false;
1275 if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
1276 return false;
1277 } else {
1279 * Compiler::compileScript will rebase immediate operands indexing
1280 * the sharp slots to come at the end of the global script's |nfixed|
1281 * slots storage, after gvars and regexps.
1283 sharpSlotBase = 0;
1285 flags |= TCF_HAS_SHARPS;
1286 #endif
1287 return true;
1290 bool
1291 JSTreeContext::skipSpansGenerator(unsigned skip)
1293 JSTreeContext *tc = this;
1294 for (unsigned i = 0; i < skip; ++i, tc = tc->parent) {
1295 if (!tc)
1296 return false;
1297 if (tc->flags & TCF_FUN_IS_GENERATOR)
1298 return true;
1300 return false;
1303 void
1304 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1305 ptrdiff_t top)
1307 stmt->type = type;
1308 stmt->flags = 0;
1309 stmt->blockid = tc->blockid();
1310 SET_STATEMENT_TOP(stmt, top);
1311 stmt->label = NULL;
1312 JS_ASSERT(!stmt->blockObj);
1313 stmt->down = tc->topStmt;
1314 tc->topStmt = stmt;
1315 if (STMT_LINKS_SCOPE(stmt)) {
1316 stmt->downScope = tc->topScopeStmt;
1317 tc->topScopeStmt = stmt;
1318 } else {
1319 stmt->downScope = NULL;
1323 void
1324 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1325 ptrdiff_t top)
1327 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1328 stmt->flags |= SIF_SCOPE;
1329 blockObj->setParent(tc->blockChain);
1330 stmt->downScope = tc->topScopeStmt;
1331 tc->topScopeStmt = stmt;
1332 tc->blockChain = blockObj;
1333 stmt->blockObj = blockObj;
1337 * Emit a backpatch op with offset pointing to the previous jump of this type,
1338 * so that we can walk back up the chain fixing up the op and jump offset.
1340 static ptrdiff_t
1341 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1343 ptrdiff_t offset, delta;
1345 offset = CG_OFFSET(cg);
1346 delta = offset - *lastp;
1347 *lastp = offset;
1348 JS_ASSERT(delta > 0);
1349 return EmitJump(cx, cg, op, delta);
1353 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1354 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1355 * NB: We use cx and cg from our caller's lexical environment, and return
1356 * false on error.
1358 #define EMIT_UINT16_IMM_OP(op, i) \
1359 JS_BEGIN_MACRO \
1360 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1361 return JS_FALSE; \
1362 JS_END_MACRO
1364 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1365 JS_BEGIN_MACRO \
1366 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1367 if (off_ < 0) \
1368 return JS_FALSE; \
1369 jsbytecode *pc_ = CG_CODE(cg, off_); \
1370 SET_UINT16(pc_, i); \
1371 pc_ += UINT16_LEN; \
1372 SET_UINT16(pc_, j); \
1373 JS_END_MACRO
1375 static JSBool
1376 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1378 JS_ASSERT(*npops != 0);
1379 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1380 return JS_FALSE;
1381 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1382 *npops = 0;
1383 return JS_TRUE;
1387 * Emit additional bytecode(s) for non-local jumps.
1389 static JSBool
1390 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1392 intN depth, npops;
1393 JSStmtInfo *stmt;
1396 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1397 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1398 * end of a with statement, so we save cg->stackDepth here and restore it
1399 * just before a successful return.
1401 depth = cg->stackDepth;
1402 npops = 0;
1404 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1406 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1407 switch (stmt->type) {
1408 case STMT_FINALLY:
1409 FLUSH_POPS();
1410 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1411 return JS_FALSE;
1412 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1413 return JS_FALSE;
1414 break;
1416 case STMT_WITH:
1417 /* There's a With object on the stack that we need to pop. */
1418 FLUSH_POPS();
1419 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1420 return JS_FALSE;
1421 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1422 return JS_FALSE;
1423 break;
1425 case STMT_FOR_IN_LOOP:
1427 * The iterator and the object being iterated need to be popped.
1429 FLUSH_POPS();
1430 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1431 return JS_FALSE;
1432 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1433 return JS_FALSE;
1434 break;
1436 case STMT_SUBROUTINE:
1438 * There's a [exception or hole, retsub pc-index] pair on the
1439 * stack that we need to pop.
1441 npops += 2;
1442 break;
1444 default:;
1447 if (stmt->flags & SIF_SCOPE) {
1448 uintN i;
1450 /* There is a Block object with locals on the stack to pop. */
1451 FLUSH_POPS();
1452 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1453 return JS_FALSE;
1454 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1455 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1459 FLUSH_POPS();
1460 cg->stackDepth = depth;
1461 return JS_TRUE;
1463 #undef FLUSH_POPS
1466 static ptrdiff_t
1467 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1468 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1470 intN index;
1472 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1473 return -1;
1475 if (label)
1476 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1477 else if (noteType != SRC_NULL)
1478 index = js_NewSrcNote(cx, cg, noteType);
1479 else
1480 index = 0;
1481 if (index < 0)
1482 return -1;
1484 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1487 static JSBool
1488 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1489 jsbytecode *target, jsbytecode op)
1491 jsbytecode *pc, *stop;
1492 ptrdiff_t delta, span;
1494 pc = CG_CODE(cg, last);
1495 stop = CG_CODE(cg, -1);
1496 while (pc != stop) {
1497 delta = GetJumpOffset(cg, pc);
1498 span = target - pc;
1499 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1502 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1503 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1504 * and need to see the JSOP_BACKPATCH* op at *pc).
1506 *pc = op;
1507 pc -= delta;
1509 return JS_TRUE;
1512 void
1513 js_PopStatement(JSTreeContext *tc)
1515 JSStmtInfo *stmt;
1517 stmt = tc->topStmt;
1518 tc->topStmt = stmt->down;
1519 if (STMT_LINKS_SCOPE(stmt)) {
1520 tc->topScopeStmt = stmt->downScope;
1521 if (stmt->flags & SIF_SCOPE) {
1522 tc->blockChain = stmt->blockObj->getParent();
1523 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1528 JSBool
1529 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1531 JSStmtInfo *stmt;
1533 stmt = cg->topStmt;
1534 if (!STMT_IS_TRYING(stmt) &&
1535 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1536 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1537 JSOP_GOTO))) {
1538 return JS_FALSE;
1540 js_PopStatement(cg);
1541 return JS_TRUE;
1544 JSBool
1545 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1546 JSParseNode *pn)
1548 jsdouble dval;
1549 jsint ival;
1550 JSAtom *valueAtom;
1551 jsval v;
1553 /* XXX just do numbers for now */
1554 if (pn->pn_type == TOK_NUMBER) {
1555 dval = pn->pn_dval;
1556 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1557 v = INT_TO_JSVAL(ival);
1558 } else {
1560 * We atomize double to root a jsdouble instance that we wrap as
1561 * jsval and store in cg->constList. This works because atoms are
1562 * protected from GC during compilation.
1564 valueAtom = js_AtomizeDouble(cx, dval);
1565 if (!valueAtom)
1566 return JS_FALSE;
1567 v = ATOM_KEY(valueAtom);
1569 if (!cg->constMap.put(atom, v))
1570 return JS_FALSE;
1572 return JS_TRUE;
1575 JSStmtInfo *
1576 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1578 JSObject *obj;
1579 JSScope *scope;
1580 JSScopeProperty *sprop;
1582 if (!stmt)
1583 stmt = tc->topScopeStmt;
1584 for (; stmt; stmt = stmt->downScope) {
1585 if (stmt->type == STMT_WITH)
1586 break;
1588 /* Skip "maybe scope" statements that don't contain let bindings. */
1589 if (!(stmt->flags & SIF_SCOPE))
1590 continue;
1592 obj = stmt->blockObj;
1593 JS_ASSERT(obj->getClass() == &js_BlockClass);
1594 scope = obj->scope();
1595 sprop = scope->lookup(ATOM_TO_JSID(atom));
1596 if (sprop) {
1597 JS_ASSERT(sprop->hasShortID());
1599 if (slotp) {
1600 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1601 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1602 sprop->shortid;
1604 return stmt;
1608 if (slotp)
1609 *slotp = -1;
1610 return stmt;
1614 * Check if the attributes describe a property holding a compile-time constant
1615 * or a permanent, read-only property without a getter.
1617 #define IS_CONSTANT_PROPERTY(attrs) \
1618 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1619 (JSPROP_READONLY | JSPROP_PERMANENT))
1622 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1623 * name defining a constant.
1625 static JSBool
1626 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1627 jsval *vp)
1629 JSBool ok;
1630 JSStmtInfo *stmt;
1631 JSObject *obj, *objbox;
1632 JSProperty *prop;
1633 uintN attrs;
1636 * Chase down the cg stack, but only until we reach the outermost cg.
1637 * This enables propagating consts from top-level into switch cases in a
1638 * function compiled along with the top-level script.
1640 *vp = JSVAL_HOLE;
1641 do {
1642 if (cg->inFunction() && cg->compileAndGo()) {
1643 /* XXX this will need revising if 'const' becomes block-scoped. */
1644 stmt = js_LexicalLookup(cg, atom, NULL);
1645 if (stmt)
1646 return JS_TRUE;
1648 if (JSCodeGenerator::ConstMap::Ptr p = cg->constMap.lookup(atom)) {
1649 JS_ASSERT(p->value != JSVAL_HOLE);
1650 *vp = p->value;
1651 return JS_TRUE;
1655 * Try looking in the variable object for a direct property that
1656 * is readonly and permanent. We know such a property can't be
1657 * shadowed by another property on obj's prototype chain, or a
1658 * with object or catch variable; nor can prop's value be changed,
1659 * nor can prop be deleted.
1661 if (cg->inFunction()) {
1662 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1663 break;
1664 } else {
1665 JS_ASSERT(cg->compileAndGo());
1666 obj = cg->scopeChain;
1667 ok = obj->lookupProperty(cx, ATOM_TO_JSID(atom), &objbox, &prop);
1668 if (!ok)
1669 return JS_FALSE;
1670 if (objbox == obj) {
1672 * We're compiling code that will be executed immediately,
1673 * not re-executed against a different scope chain and/or
1674 * variable object. Therefore we can get constant values
1675 * from our variable object here.
1677 ok = obj->getAttributes(cx, ATOM_TO_JSID(atom), prop, &attrs);
1678 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1679 ok = obj->getProperty(cx, ATOM_TO_JSID(atom), vp);
1680 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1683 if (prop)
1684 objbox->dropProperty(cx, prop);
1685 if (!ok)
1686 return JS_FALSE;
1687 if (prop)
1688 break;
1691 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1692 return JS_TRUE;
1696 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1697 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1698 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1699 * after the main bytecode sequence.
1701 static JSOp
1702 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1704 uintN indexBase;
1707 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1708 * for big indexes.
1710 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1711 JS_STATIC_ASSERT(INDEX_LIMIT >=
1712 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1714 if (index < JS_BIT(16))
1715 return JSOP_NOP;
1716 indexBase = index >> 16;
1717 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1718 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1719 return JSOP_FALSE;
1720 return JSOP_RESETBASE0;
1723 if (index >= INDEX_LIMIT) {
1724 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1725 JSMSG_TOO_MANY_LITERALS);
1726 return JSOP_FALSE;
1729 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1730 return JSOP_FALSE;
1731 return JSOP_RESETBASE;
1735 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1736 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1737 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1738 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1739 * with single-byte JSOP_INDEXBASE[123] codes.
1741 * Such prefixing currently requires a suffix to restore the "zero segment"
1742 * register setting, but this could be optimized further.
1744 static JSBool
1745 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1747 JSOp bigSuffix;
1749 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1750 if (bigSuffix == JSOP_FALSE)
1751 return JS_FALSE;
1752 EMIT_UINT16_IMM_OP(op, index);
1753 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1757 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1758 * caller's lexical environment, and embedding a false return on error.
1760 #define EMIT_INDEX_OP(op, index) \
1761 JS_BEGIN_MACRO \
1762 if (!EmitIndexOp(cx, op, index, cg)) \
1763 return JS_FALSE; \
1764 JS_END_MACRO
1766 static JSBool
1767 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1769 JSAtomListElement *ale;
1771 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1772 if (op == JSOP_GETPROP &&
1773 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1774 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1776 ale = cg->atomList.add(cg->parser, pn->pn_atom);
1777 if (!ale)
1778 return JS_FALSE;
1779 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1782 static JSBool
1783 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1784 JSCodeGenerator *cg)
1786 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1787 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1791 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1792 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1793 * other parts of the code there's no necessary relationship between the two.
1794 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1795 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1797 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1798 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1800 static JSBool
1801 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1802 JSCodeGenerator *cg)
1804 JSOp bigSuffix;
1805 ptrdiff_t off;
1806 jsbytecode *pc;
1808 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1809 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1810 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1811 if (bigSuffix == JSOP_FALSE)
1812 return JS_FALSE;
1814 /* Emit [op, slot, index]. */
1815 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1816 if (off < 0)
1817 return JS_FALSE;
1818 pc = CG_CODE(cg, off);
1819 SET_UINT16(pc, slot);
1820 pc += 2;
1821 SET_INDEX(pc, index);
1822 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1826 * Adjust the slot for a block local to account for the number of variables
1827 * that share the same index space with locals. Due to the incremental code
1828 * generation for top-level script, we do the adjustment via code patching in
1829 * Compiler::compileScript; see comments there.
1831 * The function returns -1 on failures.
1833 static jsint
1834 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1836 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1837 if (cg->inFunction()) {
1838 slot += cg->fun->u.i.nvars;
1839 if ((uintN) slot >= SLOTNO_LIMIT) {
1840 ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
1841 slot = -1;
1844 return slot;
1847 static bool
1848 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1850 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1851 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1852 return false;
1854 JSObject *blockObj = pn->pn_objbox->object;
1855 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1856 if (depth < 0)
1857 return false;
1859 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1860 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1861 slot < limit; slot++) {
1862 jsval v = blockObj->getSlot(slot);
1864 /* Beware the empty destructuring dummy. */
1865 if (JSVAL_IS_VOID(v)) {
1866 JS_ASSERT(slot + 1 <= limit);
1867 continue;
1870 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1871 JS_ASSERT(dn->pn_defn);
1872 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1873 dn->pn_cookie += depth;
1874 #ifdef DEBUG
1875 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1876 JS_ASSERT(pnu->pn_lexdef == dn);
1877 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1878 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1880 #endif
1883 blockObj->scope()->freeslot = JSSLOT_FREE(&js_BlockClass);
1884 return blockObj->growSlots(cx, JSSLOT_FREE(&js_BlockClass));
1888 * When eval is called from a function, the eval code or function code it
1889 * compiles may reference upvars that live in the eval-calling function. The
1890 * eval-invoked compiler does not have explicit definitions for these upvars
1891 * and we do not attempt to create them a-priori (by inspecting the function's
1892 * args and vars) -- we could, but we'd take an avoidable penalty for each
1893 * function local not referenced by any upvar. Instead, we map such upvars
1894 * lazily, growing upvarMap.vector by powers of two.
1896 * This function knows that it is called with pn pointing to a PN_NAME-arity
1897 * node, and cg->parser->callerFrame having a non-null fun member, and the
1898 * static level of cg at least one greater than the eval-calling function's
1899 * static level.
1901 static bool
1902 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1904 JSContext *cx = cg->parser->context;
1905 JSFunction *fun = cg->parser->callerFrame->fun;
1906 uintN upvarLevel = fun->u.i.script->staticLevel;
1908 JSFunctionBox *funbox = cg->funbox;
1909 if (funbox) {
1911 * Treat top-level function definitions as escaping (i.e., as funargs),
1912 * required since we compile each such top level function or statement
1913 * and throw away the AST, so we can't yet see all funarg uses of this
1914 * function being compiled (cg->funbox->object). See bug 493177.
1916 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1917 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1918 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1919 ((JSFunction *) funbox->object)->atom);
1920 return true;
1923 while (funbox->level >= upvarLevel) {
1924 if (funbox->node->pn_dflags & PND_FUNARG)
1925 return true;
1926 funbox = funbox->parent;
1927 if (!funbox)
1928 break;
1932 JSAtom *atom = pn->pn_atom;
1934 uintN index;
1935 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1936 if (localKind == JSLOCAL_NONE)
1937 return true;
1939 JS_ASSERT(cg->staticLevel > upvarLevel);
1940 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1941 return true;
1943 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1944 if (!ale) {
1945 if (cg->inFunction() &&
1946 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1947 return false;
1950 ale = cg->upvarList.add(cg->parser, atom);
1951 if (!ale)
1952 return false;
1953 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1955 uint32 *vector = cg->upvarMap.vector;
1956 uint32 length = cg->upvarMap.length;
1958 JS_ASSERT(ALE_INDEX(ale) <= length);
1959 if (ALE_INDEX(ale) == length) {
1960 length = 2 * JS_MAX(2, length);
1961 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1962 if (!vector)
1963 return false;
1964 cg->upvarMap.vector = vector;
1965 cg->upvarMap.length = length;
1968 if (localKind != JSLOCAL_ARG)
1969 index += fun->nargs;
1970 JS_ASSERT(index < JS_BIT(16));
1972 uintN skip = cg->staticLevel - upvarLevel;
1973 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1976 pn->pn_op = JSOP_GETUPVAR;
1977 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1978 pn->pn_dflags |= PND_BOUND;
1979 return true;
1983 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1984 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1985 * It returns false on error, true on success.
1987 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1988 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1989 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1990 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1991 * pn->pn_op was modified, if this function finds an argument or local variable
1992 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1993 * successful return.
1995 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1996 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1997 * in js_EmitTree.
1999 static JSBool
2000 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
2002 JSDefinition *dn;
2003 JSOp op;
2004 JSAtom *atom;
2005 uint32 cookie;
2006 JSDefinition::Kind dn_kind;
2007 JSAtomListElement *ale;
2008 uintN index;
2010 JS_ASSERT(pn->pn_type == TOK_NAME);
2012 /* Idempotency tests come first, since we may be called more than once. */
2013 if (pn->pn_dflags & PND_BOUND)
2014 return JS_TRUE;
2016 /* No cookie initialized for these two, they're pre-bound by definition. */
2017 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
2020 * The parser linked all uses (including forward references) to their
2021 * definitions, unless a with statement or direct eval intervened.
2023 if (pn->pn_used) {
2024 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
2025 dn = pn->pn_lexdef;
2026 JS_ASSERT(dn->pn_defn);
2027 if (pn->isDeoptimized())
2028 return JS_TRUE;
2029 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
2030 } else {
2031 if (!pn->pn_defn)
2032 return JS_TRUE;
2033 dn = (JSDefinition *) pn;
2036 op = PN_OP(pn);
2037 if (op == JSOP_NOP)
2038 return JS_TRUE;
2040 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2041 atom = pn->pn_atom;
2042 cookie = dn->pn_cookie;
2043 dn_kind = dn->kind();
2046 * Turn attempts to mutate const-declared bindings into get ops (for
2047 * pre-increment and pre-decrement ops, our caller will have to emit
2048 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2050 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2051 * bindings visible to the compiler are permanent in JS unless the
2052 * declaration originates in eval code. We detect eval code by testing
2053 * cg->parser->callerFrame, which is set only by eval or a debugger
2054 * equivalent.
2056 * Note that this callerFrame non-null test must be qualified by testing
2057 * !cg->funbox to exclude function code nested in eval code, which is not
2058 * subject to the deletable binding exception.
2060 switch (op) {
2061 case JSOP_NAME:
2062 case JSOP_SETCONST:
2063 break;
2064 case JSOP_DELNAME:
2065 if (dn_kind != JSDefinition::UNKNOWN) {
2066 if (cg->parser->callerFrame && !cg->funbox)
2067 JS_ASSERT(cg->compileAndGo());
2068 else
2069 pn->pn_op = JSOP_FALSE;
2070 pn->pn_dflags |= PND_BOUND;
2071 return JS_TRUE;
2073 break;
2074 default:
2075 if (pn->isConst())
2076 pn->pn_op = op = JSOP_NAME;
2079 if (cookie == FREE_UPVAR_COOKIE) {
2080 JSStackFrame *caller = cg->parser->callerFrame;
2081 if (caller) {
2082 JS_ASSERT(cg->compileAndGo());
2085 * Don't generate upvars on the left side of a for loop. See
2086 * bug 470758.
2088 if (cg->flags & TCF_IN_FOR_INIT)
2089 return JS_TRUE;
2091 JS_ASSERT(caller->script);
2092 if (!caller->fun)
2093 return JS_TRUE;
2096 * Make sure the variable object used by the compiler to initialize
2097 * parent links matches the caller's varobj. Compile-n-go compiler-
2098 * created function objects have the top-level cg's scopeChain set
2099 * as their parent by Parser::newFunction.
2101 JSObject *scopeobj = cg->inFunction()
2102 ? FUN_OBJECT(cg->fun)->getParent()
2103 : cg->scopeChain;
2104 if (scopeobj != cg->parser->callerVarObj)
2105 return JS_TRUE;
2108 * We are compiling eval or debug script inside a function frame
2109 * and the scope chain matches the function's variable object.
2110 * Optimize access to function's arguments and variable and the
2111 * arguments object.
2113 if (op != JSOP_NAME)
2114 return JS_TRUE;
2117 * Generator functions may be resumed from any call stack, which
2118 * defeats the display optimization to static link searching used
2119 * by JSOP_{GET,CALL}UPVAR.
2121 JSFunction *fun = cg->parser->callerFrame->fun;
2122 JS_ASSERT(cg->staticLevel >= fun->u.i.script->staticLevel);
2123 unsigned skip = cg->staticLevel - fun->u.i.script->staticLevel;
2124 if (cg->skipSpansGenerator(skip))
2125 return JS_TRUE;
2127 return MakeUpvarForEval(pn, cg);
2129 return JS_TRUE;
2132 if (dn->pn_dflags & PND_GVAR) {
2134 * If this is a global reference from within a function, leave pn_op as
2135 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2136 * only we could depend on the global frame's slots being valid for all
2137 * calls to the function, and if we could equate the atom index in the
2138 * function's atom map for every global name with its frame slot.
2140 if (cg->inFunction())
2141 return JS_TRUE;
2144 * We are optimizing global variables and there may be no pre-existing
2145 * global property named atom when this global script runs. If atom was
2146 * declared via const or var, optimize pn to access fp->vars using the
2147 * appropriate JSOP_*GVAR op.
2149 * FIXME: should be able to optimize global function access too.
2151 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2153 switch (op) {
2154 case JSOP_NAME: op = JSOP_GETGVAR; break;
2155 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2156 case JSOP_SETCONST: /* NB: no change */ break;
2157 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2158 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2159 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2160 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2161 case JSOP_FORNAME: /* NB: no change */ break;
2162 case JSOP_DELNAME: /* NB: no change */ break;
2163 default: JS_NOT_REACHED("gvar");
2165 pn->pn_op = op;
2166 pn->pn_cookie = cookie;
2167 pn->pn_dflags |= PND_BOUND;
2168 return JS_TRUE;
2171 uintN level = UPVAR_FRAME_SKIP(cookie);
2172 JS_ASSERT(cg->staticLevel >= level);
2175 * A JSDefinition witnessed as a declaration by the parser cannot be an
2176 * upvar, unless it is the degenerate kind of upvar selected above (in the
2177 * code before the PND_GVAR test) for the special case of compile-and-go
2178 * code generated from eval called from a function, where the eval code
2179 * uses local vars defined in the function. We detect this upvar-for-eval
2180 * case by checking dn's op.
2182 if (PN_OP(dn) == JSOP_GETUPVAR) {
2183 JS_ASSERT(cg->staticLevel >= level);
2184 if (op != JSOP_NAME)
2185 return JS_TRUE;
2187 #ifdef DEBUG
2188 JSStackFrame *caller = cg->parser->callerFrame;
2189 #endif
2190 JS_ASSERT(caller);
2191 JS_ASSERT(caller->script);
2193 JSTreeContext *tc = cg;
2194 while (tc->staticLevel != level)
2195 tc = tc->parent;
2196 JS_ASSERT(tc->compiling());
2198 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2199 JS_ASSERT(evalcg->compileAndGo());
2200 JS_ASSERT(caller->fun && cg->parser->callerVarObj == evalcg->scopeChain);
2203 * Don't generate upvars on the left side of a for loop. See
2204 * bug 470758 and bug 520513.
2206 if (evalcg->flags & TCF_IN_FOR_INIT)
2207 return JS_TRUE;
2209 if (cg->staticLevel == level) {
2210 pn->pn_op = JSOP_GETUPVAR;
2211 pn->pn_cookie = cookie;
2212 pn->pn_dflags |= PND_BOUND;
2213 return JS_TRUE;
2216 return MakeUpvarForEval(pn, cg);
2219 uintN skip = cg->staticLevel - level;
2220 if (skip != 0) {
2221 JS_ASSERT(cg->inFunction());
2222 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie) != CALLEE_UPVAR_SLOT,
2223 cg->lexdeps.lookup(atom));
2224 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2225 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2228 * If op is a mutating opcode, this upvar's static level is too big to
2229 * index into the display, or the function is heavyweight, we fall back
2230 * on JSOP_*NAME*.
2232 if (op != JSOP_NAME)
2233 return JS_TRUE;
2234 if (level >= JS_DISPLAY_SIZE)
2235 return JS_TRUE;
2236 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2237 return JS_TRUE;
2239 if (FUN_FLAT_CLOSURE(cg->fun)) {
2240 op = JSOP_GETDSLOT;
2241 } else {
2243 * The function we're compiling may not be heavyweight, but if it
2244 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2245 * Parser::analyzeFunctions has arranged for this function's
2246 * enclosing functions to be heavyweight, so we can safely stick
2247 * with JSOP_NAME/JSOP_CALLNAME.
2249 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2250 return JS_TRUE;
2253 * Generator functions may be resumed from any call stack, which
2254 * defeats the display optimization to static link searching used
2255 * by JSOP_{GET,CALL}UPVAR.
2257 if (cg->skipSpansGenerator(skip))
2258 return JS_TRUE;
2260 op = JSOP_GETUPVAR;
2263 ale = cg->upvarList.lookup(atom);
2264 if (ale) {
2265 index = ALE_INDEX(ale);
2266 } else {
2267 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2268 return JS_FALSE;
2270 ale = cg->upvarList.add(cg->parser, atom);
2271 if (!ale)
2272 return JS_FALSE;
2273 index = ALE_INDEX(ale);
2274 JS_ASSERT(index == cg->upvarList.count - 1);
2276 uint32 *vector = cg->upvarMap.vector;
2277 if (!vector) {
2278 uint32 length = cg->lexdeps.count;
2280 vector = (uint32 *) js_calloc(length * sizeof *vector);
2281 if (!vector) {
2282 JS_ReportOutOfMemory(cx);
2283 return JS_FALSE;
2285 cg->upvarMap.vector = vector;
2286 cg->upvarMap.length = length;
2289 uintN slot = UPVAR_FRAME_SLOT(cookie);
2290 if (slot != CALLEE_UPVAR_SLOT && dn_kind != JSDefinition::ARG) {
2291 JSTreeContext *tc = cg;
2292 do {
2293 tc = tc->parent;
2294 } while (tc->staticLevel != level);
2295 if (tc->inFunction())
2296 slot += tc->fun->nargs;
2299 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2302 pn->pn_op = op;
2303 pn->pn_cookie = index;
2304 pn->pn_dflags |= PND_BOUND;
2305 return JS_TRUE;
2309 * We are compiling a function body and may be able to optimize name
2310 * to stack slot. Look for an argument or variable in the function and
2311 * rewrite pn_op and update pn accordingly.
2313 switch (dn_kind) {
2314 case JSDefinition::UNKNOWN:
2315 return JS_TRUE;
2317 case JSDefinition::LET:
2318 switch (op) {
2319 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2320 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2321 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2322 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2323 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2324 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2325 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2326 default: JS_NOT_REACHED("let");
2328 break;
2330 case JSDefinition::ARG:
2331 switch (op) {
2332 case JSOP_NAME: op = JSOP_GETARG; break;
2333 case JSOP_SETNAME: op = JSOP_SETARG; break;
2334 case JSOP_INCNAME: op = JSOP_INCARG; break;
2335 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2336 case JSOP_DECNAME: op = JSOP_DECARG; break;
2337 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2338 case JSOP_FORNAME: op = JSOP_FORARG; break;
2339 default: JS_NOT_REACHED("arg");
2341 JS_ASSERT(!pn->isConst());
2342 break;
2344 case JSDefinition::VAR:
2345 if (PN_OP(dn) == JSOP_CALLEE) {
2346 JS_ASSERT(op != JSOP_CALLEE);
2347 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2350 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as we
2351 * cannot be sure cg->fun is not something of the form:
2353 * var ff = (function f(s) { eval(s); return f; });
2355 * where a caller invokes ff("var f = 42"). The result returned for
2356 * such an invocation must be 42, since the callee name is
2357 * lexically bound in an outer declarative environment from the
2358 * function's activation. See jsfun.cpp:call_resolve.
2360 JS_ASSERT(op != JSOP_DELNAME);
2361 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2362 op = JSOP_CALLEE;
2363 pn->pn_dflags |= PND_CONST;
2366 pn->pn_op = op;
2367 pn->pn_dflags |= PND_BOUND;
2368 return JS_TRUE;
2370 /* FALL THROUGH */
2372 default:
2373 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2374 dn_kind == JSDefinition::VAR ||
2375 dn_kind == JSDefinition::CONST);
2376 switch (op) {
2377 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2378 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2379 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2380 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2381 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2382 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2383 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2384 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2385 default: JS_NOT_REACHED("local");
2387 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2388 break;
2391 JS_ASSERT(op != PN_OP(pn));
2392 pn->pn_op = op;
2393 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2394 pn->pn_dflags |= PND_BOUND;
2395 return JS_TRUE;
2399 * If pn contains a useful expression, return true with *answer set to true.
2400 * If pn contains a useless expression, return true with *answer set to false.
2401 * Return false on error.
2403 * The caller should initialize *answer to false and invoke this function on
2404 * an expression statement or similar subtree to decide whether the tree could
2405 * produce code that has any side effects. For an expression statement, we
2406 * define useless code as code with no side effects, because the main effect,
2407 * the value left on the stack after the code executes, will be discarded by a
2408 * pop bytecode.
2410 static JSBool
2411 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2412 JSBool *answer)
2414 JSBool ok;
2415 JSParseNode *pn2;
2417 ok = JS_TRUE;
2418 if (!pn || *answer)
2419 return ok;
2421 switch (pn->pn_arity) {
2422 case PN_FUNC:
2424 * A named function, contrary to ES3, is no longer useful, because we
2425 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2426 * Object instance and binding a readonly, permanent property in it
2427 * (the object and binding can be detected and hijacked or captured).
2428 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2430 *answer = JS_FALSE;
2431 break;
2433 case PN_LIST:
2434 if (pn->pn_op == JSOP_NOP ||
2435 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2436 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2438 * Non-operators along with ||, &&, ===, and !== never invoke
2439 * toString or valueOf.
2441 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2442 ok &= CheckSideEffects(cx, cg, pn2, answer);
2443 } else {
2445 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2446 * are presumed to be useful, because they may have side effects
2447 * even if their main effect (their return value) is discarded.
2449 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2450 * to avoid too much recursion. All such lists must be presumed
2451 * to be useful because each index operation could invoke a getter
2452 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2453 * does not apply here: arguments[i][j] might invoke a getter).
2455 * Likewise, array and object initialisers may call prototype
2456 * setters (the __defineSetter__ built-in, and writable __proto__
2457 * on Array.prototype create this hazard). Initialiser list nodes
2458 * have JSOP_NEWINIT in their pn_op.
2460 *answer = JS_TRUE;
2462 break;
2464 case PN_TERNARY:
2465 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2466 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2467 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2468 break;
2470 case PN_BINARY:
2471 if (pn->pn_type == TOK_ASSIGN) {
2473 * Assignment is presumed to be useful, even if the next operation
2474 * is another assignment overwriting this one's ostensible effect,
2475 * because the left operand may be a property with a setter that
2476 * has side effects.
2478 * The only exception is assignment of a useless value to a const
2479 * declared in the function currently being compiled.
2481 pn2 = pn->pn_left;
2482 if (pn2->pn_type != TOK_NAME) {
2483 *answer = JS_TRUE;
2484 } else {
2485 if (!BindNameToSlot(cx, cg, pn2))
2486 return JS_FALSE;
2487 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2488 return JS_FALSE;
2489 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2490 *answer = JS_TRUE;
2492 } else {
2493 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2494 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2496 * ||, &&, ===, and !== do not convert their operands via
2497 * toString or valueOf method calls.
2499 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2500 CheckSideEffects(cx, cg, pn->pn_right, answer);
2501 } else {
2503 * We can't easily prove that neither operand ever denotes an
2504 * object with a toString or valueOf method.
2506 *answer = JS_TRUE;
2509 break;
2511 case PN_UNARY:
2512 switch (pn->pn_type) {
2513 case TOK_DELETE:
2514 pn2 = pn->pn_kid;
2515 switch (pn2->pn_type) {
2516 case TOK_NAME:
2517 if (!BindNameToSlot(cx, cg, pn2))
2518 return JS_FALSE;
2519 if (pn2->isConst()) {
2520 *answer = JS_FALSE;
2521 break;
2523 /* FALL THROUGH */
2524 case TOK_DOT:
2525 #if JS_HAS_XML_SUPPORT
2526 case TOK_DBLDOT:
2527 #endif
2528 case TOK_LP:
2529 case TOK_LB:
2530 /* All these delete addressing modes have effects too. */
2531 *answer = JS_TRUE;
2532 break;
2533 default:
2534 ok = CheckSideEffects(cx, cg, pn2, answer);
2535 break;
2537 break;
2539 case TOK_UNARYOP:
2540 if (pn->pn_op == JSOP_NOT) {
2541 /* ! does not convert its operand via toString or valueOf. */
2542 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2543 break;
2545 /* FALL THROUGH */
2547 default:
2549 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2550 * have direct effects. Of the remaining unary-arity node types,
2551 * we can't easily prove that the operand never denotes an object
2552 * with a toString or valueOf method.
2554 *answer = JS_TRUE;
2555 break;
2557 break;
2559 case PN_NAME:
2561 * Take care to avoid trying to bind a label name (labels, both for
2562 * statements and property values in object initialisers, have pn_op
2563 * defaulted to JSOP_NOP).
2565 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2566 if (!BindNameToSlot(cx, cg, pn))
2567 return JS_FALSE;
2568 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2569 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2571 * Not an argument or local variable use, and not a use of a
2572 * unshadowed named function expression's given name, so this
2573 * expression could invoke a getter that has side effects.
2575 *answer = JS_TRUE;
2578 pn2 = pn->maybeExpr();
2579 if (pn->pn_type == TOK_DOT) {
2580 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2581 return JS_FALSE;
2582 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2583 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2585 * Any dotted property reference could call a getter, except
2586 * for arguments.length where arguments is unambiguous.
2588 *answer = JS_TRUE;
2591 ok = CheckSideEffects(cx, cg, pn2, answer);
2592 break;
2594 case PN_NAMESET:
2595 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2596 break;
2598 case PN_NULLARY:
2599 if (pn->pn_type == TOK_DEBUGGER)
2600 *answer = JS_TRUE;
2601 break;
2603 return ok;
2606 static JSBool
2607 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2608 JSBool callContext)
2610 JSOp op;
2612 if (!BindNameToSlot(cx, cg, pn))
2613 return JS_FALSE;
2614 op = PN_OP(pn);
2616 if (callContext) {
2617 switch (op) {
2618 case JSOP_NAME:
2619 op = JSOP_CALLNAME;
2620 break;
2621 case JSOP_GETGVAR:
2622 JS_ASSERT(!cg->funbox);
2623 op = JSOP_CALLGVAR;
2624 break;
2625 case JSOP_GETARG:
2626 op = JSOP_CALLARG;
2627 break;
2628 case JSOP_GETLOCAL:
2629 op = JSOP_CALLLOCAL;
2630 break;
2631 case JSOP_GETUPVAR:
2632 op = JSOP_CALLUPVAR;
2633 break;
2634 case JSOP_GETDSLOT:
2635 op = JSOP_CALLDSLOT;
2636 break;
2637 default:
2638 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2639 break;
2643 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2644 if (js_Emit1(cx, cg, op) < 0)
2645 return JS_FALSE;
2646 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2647 return JS_FALSE;
2648 } else {
2649 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2650 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2651 } else {
2652 if (!EmitAtomOp(cx, pn, op, cg))
2653 return JS_FALSE;
2657 return JS_TRUE;
2660 #if JS_HAS_XML_SUPPORT
2661 static JSBool
2662 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2664 JSParseNode *pn2;
2665 uintN oldflags;
2667 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2668 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2669 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2671 pn2 = pn->pn_kid;
2672 oldflags = cg->flags;
2673 cg->flags &= ~TCF_IN_FOR_INIT;
2674 if (!js_EmitTree(cx, cg, pn2))
2675 return JS_FALSE;
2676 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2677 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2678 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2679 return JS_FALSE;
2682 return js_Emit1(cx, cg, op) >= 0;
2684 #endif
2686 static JSBool
2687 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2690 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2691 * interpreter and trace recorder, which skip dense array instances by
2692 * going up to Array.prototype before looking up the property name.
2694 JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
2695 if (!ale)
2696 return JS_FALSE;
2697 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2698 return JS_FALSE;
2699 if (js_Emit1(cx, cg, op) < 0)
2700 return JS_FALSE;
2701 return JS_TRUE;
2704 static JSBool
2705 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2706 JSBool callContext)
2708 JSParseNode *pn2, *pndot, *pnup, *pndown;
2709 ptrdiff_t top;
2711 JS_ASSERT(pn->pn_arity == PN_NAME);
2712 pn2 = pn->maybeExpr();
2714 /* Special case deoptimization for __proto__. */
2715 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2716 pn->pn_atom == cx->runtime->atomState.protoAtom) {
2717 if (pn2 && !js_EmitTree(cx, cg, pn2))
2718 return JS_FALSE;
2719 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2722 if (callContext) {
2723 JS_ASSERT(pn->pn_type == TOK_DOT);
2724 JS_ASSERT(op == JSOP_GETPROP);
2725 op = JSOP_CALLPROP;
2726 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2727 if (pn2->pn_op == JSOP_THIS) {
2728 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2729 /* Fast path for gets of |this.foo|. */
2730 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2732 } else if (pn2->pn_type == TOK_NAME) {
2734 * Try to optimize:
2735 * - arguments.length into JSOP_ARGCNT
2736 * - argname.prop into JSOP_GETARGPROP
2737 * - localname.prop into JSOP_GETLOCALPROP
2738 * but don't do this if the property is 'length' -- prefer to emit
2739 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2741 if (!BindNameToSlot(cx, cg, pn2))
2742 return JS_FALSE;
2743 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2744 if (pn2->pn_op == JSOP_ARGUMENTS)
2745 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2746 } else {
2747 switch (pn2->pn_op) {
2748 case JSOP_GETARG:
2749 op = JSOP_GETARGPROP;
2750 goto do_indexconst;
2751 case JSOP_GETLOCAL:
2752 op = JSOP_GETLOCALPROP;
2753 do_indexconst: {
2754 JSAtomListElement *ale;
2755 jsatomid atomIndex;
2757 ale = cg->atomList.add(cg->parser, pn->pn_atom);
2758 if (!ale)
2759 return JS_FALSE;
2760 atomIndex = ALE_INDEX(ale);
2761 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2764 default:;
2771 * If the object operand is also a dotted property reference, reverse the
2772 * list linked via pn_expr temporarily so we can iterate over it from the
2773 * bottom up (reversing again as we go), to avoid excessive recursion.
2775 if (pn2->pn_type == TOK_DOT) {
2776 pndot = pn2;
2777 pnup = NULL;
2778 top = CG_OFFSET(cg);
2779 for (;;) {
2780 /* Reverse pndot->pn_expr to point up, not down. */
2781 pndot->pn_offset = top;
2782 JS_ASSERT(!pndot->pn_used);
2783 pndown = pndot->pn_expr;
2784 pndot->pn_expr = pnup;
2785 if (pndown->pn_type != TOK_DOT)
2786 break;
2787 pnup = pndot;
2788 pndot = pndown;
2791 /* pndown is a primary expression, not a dotted property reference. */
2792 if (!js_EmitTree(cx, cg, pndown))
2793 return JS_FALSE;
2795 do {
2796 /* Walk back up the list, emitting annotated name ops. */
2797 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2798 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2799 return JS_FALSE;
2802 /* Special case deoptimization on __proto__, as above. */
2803 if (pndot->pn_arity == PN_NAME && pndot->pn_atom == cx->runtime->atomState.protoAtom) {
2804 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2805 return JS_FALSE;
2806 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2807 return JS_FALSE;
2810 /* Reverse the pn_expr link again. */
2811 pnup = pndot->pn_expr;
2812 pndot->pn_expr = pndown;
2813 pndown = pndot;
2814 } while ((pndot = pnup) != NULL);
2815 } else {
2816 if (!js_EmitTree(cx, cg, pn2))
2817 return JS_FALSE;
2820 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2821 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2822 return JS_FALSE;
2825 return EmitAtomOp(cx, pn, op, cg);
2828 static JSBool
2829 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2831 ptrdiff_t top;
2832 JSParseNode *left, *right, *next, ltmp, rtmp;
2833 jsint slot;
2835 top = CG_OFFSET(cg);
2836 if (pn->pn_arity == PN_LIST) {
2837 /* Left-associative operator chain to avoid too much recursion. */
2838 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2839 JS_ASSERT(pn->pn_count >= 3);
2840 left = pn->pn_head;
2841 right = pn->last();
2842 next = left->pn_next;
2843 JS_ASSERT(next != right);
2846 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2847 * one or more index expression and JSOP_GETELEM op pairs.
2849 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2850 if (!BindNameToSlot(cx, cg, left))
2851 return JS_FALSE;
2852 if (left->pn_op == JSOP_ARGUMENTS &&
2853 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2854 (jsuint)slot < JS_BIT(16)) {
2856 * arguments[i]() requires arguments object as "this".
2857 * Check that we never generates list for that usage.
2859 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2860 left->pn_offset = next->pn_offset = top;
2861 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2862 left = next;
2863 next = left->pn_next;
2868 * Check whether we generated JSOP_ARGSUB, just above, and have only
2869 * one more index expression to emit. Given arguments[0][j], we must
2870 * skip the while loop altogether, falling through to emit code for j
2871 * (in the subtree referenced by right), followed by the annotated op,
2872 * at the bottom of this function.
2874 JS_ASSERT(next != right || pn->pn_count == 3);
2875 if (left == pn->pn_head) {
2876 if (!js_EmitTree(cx, cg, left))
2877 return JS_FALSE;
2879 while (next != right) {
2880 if (!js_EmitTree(cx, cg, next))
2881 return JS_FALSE;
2882 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2883 return JS_FALSE;
2884 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2885 return JS_FALSE;
2886 next = next->pn_next;
2888 } else {
2889 if (pn->pn_arity == PN_NAME) {
2891 * Set left and right so pn appears to be a TOK_LB node, instead
2892 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2893 * EmitDestructuringOps nearer below. In the destructuring case,
2894 * the base expression (pn_expr) of the name may be null, which
2895 * means we have to emit a JSOP_BINDNAME.
2897 left = pn->maybeExpr();
2898 if (!left) {
2899 left = &ltmp;
2900 left->pn_type = TOK_STRING;
2901 left->pn_op = JSOP_BINDNAME;
2902 left->pn_arity = PN_NULLARY;
2903 left->pn_pos = pn->pn_pos;
2904 left->pn_atom = pn->pn_atom;
2906 right = &rtmp;
2907 right->pn_type = TOK_STRING;
2908 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2909 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2910 ? JSOP_QNAMEPART
2911 : JSOP_STRING;
2912 right->pn_arity = PN_NULLARY;
2913 right->pn_pos = pn->pn_pos;
2914 right->pn_atom = pn->pn_atom;
2915 } else {
2916 JS_ASSERT(pn->pn_arity == PN_BINARY);
2917 left = pn->pn_left;
2918 right = pn->pn_right;
2921 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2922 if (op == JSOP_GETELEM &&
2923 left->pn_type == TOK_NAME &&
2924 right->pn_type == TOK_NUMBER) {
2925 if (!BindNameToSlot(cx, cg, left))
2926 return JS_FALSE;
2927 if (left->pn_op == JSOP_ARGUMENTS &&
2928 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2929 (jsuint)slot < JS_BIT(16)) {
2930 left->pn_offset = right->pn_offset = top;
2931 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2932 return JS_TRUE;
2936 if (!js_EmitTree(cx, cg, left))
2937 return JS_FALSE;
2940 /* The right side of the descendant operator is implicitly quoted. */
2941 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2942 right->pn_op == JSOP_QNAMEPART);
2943 if (!js_EmitTree(cx, cg, right))
2944 return JS_FALSE;
2945 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2946 return JS_FALSE;
2947 return js_Emit1(cx, cg, op) >= 0;
2950 static JSBool
2951 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2953 jsint ival;
2954 uint32 u;
2955 ptrdiff_t off;
2956 jsbytecode *pc;
2957 JSAtom *atom;
2958 JSAtomListElement *ale;
2960 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2961 if (ival == 0)
2962 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2963 if (ival == 1)
2964 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2965 if ((jsint)(int8)ival == ival)
2966 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2968 u = (uint32)ival;
2969 if (u < JS_BIT(16)) {
2970 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2971 } else if (u < JS_BIT(24)) {
2972 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2973 if (off < 0)
2974 return JS_FALSE;
2975 pc = CG_CODE(cg, off);
2976 SET_UINT24(pc, u);
2977 } else {
2978 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2979 if (off < 0)
2980 return JS_FALSE;
2981 pc = CG_CODE(cg, off);
2982 SET_INT32(pc, ival);
2984 return JS_TRUE;
2987 atom = js_AtomizeDouble(cx, dval);
2988 if (!atom)
2989 return JS_FALSE;
2991 ale = cg->atomList.add(cg->parser, atom);
2992 if (!ale)
2993 return JS_FALSE;
2994 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2997 static JSBool
2998 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2999 JSStmtInfo *stmtInfo)
3001 JSOp switchOp;
3002 JSBool ok, hasDefault, constPropagated;
3003 ptrdiff_t top, off, defaultOffset;
3004 JSParseNode *pn2, *pn3, *pn4;
3005 uint32 caseCount, tableLength;
3006 JSParseNode **table;
3007 jsdouble d;
3008 jsint i, low, high;
3009 jsval v;
3010 JSAtom *atom;
3011 JSAtomListElement *ale;
3012 intN noteIndex;
3013 size_t switchSize, tableSize;
3014 jsbytecode *pc, *savepc;
3015 #if JS_HAS_BLOCK_SCOPE
3016 jsint count;
3017 #endif
3019 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3020 switchOp = JSOP_TABLESWITCH;
3021 ok = JS_TRUE;
3022 hasDefault = constPropagated = JS_FALSE;
3023 defaultOffset = -1;
3026 * If the switch contains let variables scoped by its body, model the
3027 * resulting block on the stack first, before emitting the discriminant's
3028 * bytecode (in case the discriminant contains a stack-model dependency
3029 * such as a let expression).
3031 pn2 = pn->pn_right;
3032 #if JS_HAS_BLOCK_SCOPE
3033 if (pn2->pn_type == TOK_LEXICALSCOPE) {
3035 * Push the body's block scope before discriminant code-gen for proper
3036 * static block scope linkage in case the discriminant contains a let
3037 * expression. The block's locals must lie under the discriminant on
3038 * the stack so that case-dispatch bytecodes can find the discriminant
3039 * on top of stack.
3041 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
3042 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
3043 stmtInfo->type = STMT_SWITCH;
3045 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3046 if (!EmitEnterBlock(cx, pn2, cg))
3047 return JS_FALSE;
3050 * Pop the switch's statement info around discriminant code-gen. Note
3051 * how this leaves cg->blockChain referencing the switch's
3052 * block scope object, which is necessary for correct block parenting
3053 * in the case where the discriminant contains a let expression.
3055 cg->topStmt = stmtInfo->down;
3056 cg->topScopeStmt = stmtInfo->downScope;
3058 #ifdef __GNUC__
3059 else {
3060 count = 0;
3062 #endif
3063 #endif
3066 * Emit code for the discriminant first (or nearly first, in the case of a
3067 * switch whose body is a block scope).
3069 if (!js_EmitTree(cx, cg, pn->pn_left))
3070 return JS_FALSE;
3072 /* Switch bytecodes run from here till end of final case. */
3073 top = CG_OFFSET(cg);
3074 #if !JS_HAS_BLOCK_SCOPE
3075 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3076 #else
3077 if (pn2->pn_type == TOK_LC) {
3078 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3079 } else {
3080 /* Re-push the switch's statement info record. */
3081 cg->topStmt = cg->topScopeStmt = stmtInfo;
3083 /* Set the statement info record's idea of top. */
3084 stmtInfo->update = top;
3086 /* Advance pn2 to refer to the switch case list. */
3087 pn2 = pn2->expr();
3089 #endif
3091 caseCount = pn2->pn_count;
3092 tableLength = 0;
3093 table = NULL;
3095 if (caseCount == 0 ||
3096 (caseCount == 1 &&
3097 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3098 caseCount = 0;
3099 low = 0;
3100 high = -1;
3101 } else {
3102 #define INTMAP_LENGTH 256
3103 jsbitmap intmap_space[INTMAP_LENGTH];
3104 jsbitmap *intmap = NULL;
3105 int32 intmap_bitlen = 0;
3107 low = JSVAL_INT_MAX;
3108 high = JSVAL_INT_MIN;
3110 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3111 if (pn3->pn_type == TOK_DEFAULT) {
3112 hasDefault = JS_TRUE;
3113 caseCount--; /* one of the "cases" was the default */
3114 continue;
3117 JS_ASSERT(pn3->pn_type == TOK_CASE);
3118 if (switchOp == JSOP_CONDSWITCH)
3119 continue;
3121 pn4 = pn3->pn_left;
3122 while (pn4->pn_type == TOK_RP)
3123 pn4 = pn4->pn_kid;
3124 switch (pn4->pn_type) {
3125 case TOK_NUMBER:
3126 d = pn4->pn_dval;
3127 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3128 pn3->pn_val = INT_TO_JSVAL(i);
3129 } else {
3130 atom = js_AtomizeDouble(cx, d);
3131 if (!atom) {
3132 ok = JS_FALSE;
3133 goto release;
3135 pn3->pn_val = ATOM_KEY(atom);
3137 break;
3138 case TOK_STRING:
3139 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3140 break;
3141 case TOK_NAME:
3142 if (!pn4->maybeExpr()) {
3143 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3144 if (!ok)
3145 goto release;
3146 if (v != JSVAL_HOLE) {
3147 if (!JSVAL_IS_PRIMITIVE(v)) {
3149 * XXX JSOP_LOOKUPSWITCH does not support const-
3150 * propagated object values, see bug 407186.
3152 switchOp = JSOP_CONDSWITCH;
3153 continue;
3155 pn3->pn_val = v;
3156 constPropagated = JS_TRUE;
3157 break;
3160 /* FALL THROUGH */
3161 case TOK_PRIMARY:
3162 if (pn4->pn_op == JSOP_TRUE) {
3163 pn3->pn_val = JSVAL_TRUE;
3164 break;
3166 if (pn4->pn_op == JSOP_FALSE) {
3167 pn3->pn_val = JSVAL_FALSE;
3168 break;
3170 if (pn4->pn_op == JSOP_NULL) {
3171 pn3->pn_val = JSVAL_NULL;
3172 break;
3174 /* FALL THROUGH */
3175 default:
3176 switchOp = JSOP_CONDSWITCH;
3177 continue;
3180 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3182 if (switchOp != JSOP_TABLESWITCH)
3183 continue;
3184 if (!JSVAL_IS_INT(pn3->pn_val)) {
3185 switchOp = JSOP_LOOKUPSWITCH;
3186 continue;
3188 i = JSVAL_TO_INT(pn3->pn_val);
3189 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3190 switchOp = JSOP_LOOKUPSWITCH;
3191 continue;
3193 if (i < low)
3194 low = i;
3195 if (high < i)
3196 high = i;
3199 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3200 * We bias i by 65536 if it's negative, and hope that's a rare
3201 * case (because it requires a malloc'd bitmap).
3203 if (i < 0)
3204 i += JS_BIT(16);
3205 if (i >= intmap_bitlen) {
3206 if (!intmap &&
3207 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3208 intmap = intmap_space;
3209 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3210 } else {
3211 /* Just grab 8K for the worst-case bitmap. */
3212 intmap_bitlen = JS_BIT(16);
3213 intmap = (jsbitmap *)
3214 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3215 * sizeof(jsbitmap));
3216 if (!intmap) {
3217 JS_ReportOutOfMemory(cx);
3218 return JS_FALSE;
3221 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3223 if (JS_TEST_BIT(intmap, i)) {
3224 switchOp = JSOP_LOOKUPSWITCH;
3225 continue;
3227 JS_SET_BIT(intmap, i);
3230 release:
3231 if (intmap && intmap != intmap_space)
3232 cx->free(intmap);
3233 if (!ok)
3234 return JS_FALSE;
3237 * Compute table length and select lookup instead if overlarge or
3238 * more than half-sparse.
3240 if (switchOp == JSOP_TABLESWITCH) {
3241 tableLength = (uint32)(high - low + 1);
3242 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3243 switchOp = JSOP_LOOKUPSWITCH;
3244 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3246 * Lookup switch supports only atom indexes below 64K limit.
3247 * Conservatively estimate the maximum possible index during
3248 * switch generation and use conditional switch if it exceeds
3249 * the limit.
3251 if (caseCount + cg->atomList.count > JS_BIT(16))
3252 switchOp = JSOP_CONDSWITCH;
3257 * Emit a note with two offsets: first tells total switch code length,
3258 * second tells offset to first JSOP_CASE if condswitch.
3260 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3261 if (noteIndex < 0)
3262 return JS_FALSE;
3264 if (switchOp == JSOP_CONDSWITCH) {
3266 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3268 switchSize = 0;
3269 } else if (switchOp == JSOP_TABLESWITCH) {
3271 * 3 offsets (len, low, high) before the table, 1 per entry.
3273 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3274 } else {
3276 * JSOP_LOOKUPSWITCH:
3277 * 1 offset (len) and 1 atom index (npairs) before the table,
3278 * 1 atom index and 1 jump offset per entry.
3280 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3281 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3285 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3287 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3288 * to emit the immediate operand(s) by which bytecode readers such as
3289 * BuildSpanDepTable discover the length of the switch opcode *before*
3290 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3291 * also important to zero all unknown jump offset immediate operands,
3292 * so they can be converted to span dependencies with null targets to
3293 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3295 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3296 return JS_FALSE;
3298 off = -1;
3299 if (switchOp == JSOP_CONDSWITCH) {
3300 intN caseNoteIndex = -1;
3301 JSBool beforeCases = JS_TRUE;
3303 /* Emit code for evaluating cases and jumping to case statements. */
3304 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3305 pn4 = pn3->pn_left;
3306 if (pn4 && !js_EmitTree(cx, cg, pn4))
3307 return JS_FALSE;
3308 if (caseNoteIndex >= 0) {
3309 /* off is the previous JSOP_CASE's bytecode offset. */
3310 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3311 CG_OFFSET(cg) - off)) {
3312 return JS_FALSE;
3315 if (!pn4) {
3316 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3317 continue;
3319 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3320 if (caseNoteIndex < 0)
3321 return JS_FALSE;
3322 off = EmitJump(cx, cg, JSOP_CASE, 0);
3323 if (off < 0)
3324 return JS_FALSE;
3325 pn3->pn_offset = off;
3326 if (beforeCases) {
3327 uintN noteCount, noteCountDelta;
3329 /* Switch note's second offset is to first JSOP_CASE. */
3330 noteCount = CG_NOTE_COUNT(cg);
3331 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3332 off - top)) {
3333 return JS_FALSE;
3335 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3336 if (noteCountDelta != 0)
3337 caseNoteIndex += noteCountDelta;
3338 beforeCases = JS_FALSE;
3343 * If we didn't have an explicit default (which could fall in between
3344 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3345 * call in the loop above), link the last case to the implicit default
3346 * for the decompiler.
3348 if (!hasDefault &&
3349 caseNoteIndex >= 0 &&
3350 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3351 CG_OFFSET(cg) - off)) {
3352 return JS_FALSE;
3355 /* Emit default even if no explicit default statement. */
3356 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3357 if (defaultOffset < 0)
3358 return JS_FALSE;
3359 } else {
3360 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3362 if (switchOp == JSOP_TABLESWITCH) {
3363 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3364 SET_JUMP_OFFSET(pc, low);
3365 pc += JUMP_OFFSET_LEN;
3366 SET_JUMP_OFFSET(pc, high);
3367 pc += JUMP_OFFSET_LEN;
3370 * Use malloc to avoid arena bloat for programs with many switches.
3371 * We free table if non-null at label out, so all control flow must
3372 * exit this function through goto out or goto bad.
3374 if (tableLength != 0) {
3375 tableSize = (size_t)tableLength * sizeof *table;
3376 table = (JSParseNode **) cx->malloc(tableSize);
3377 if (!table)
3378 return JS_FALSE;
3379 memset(table, 0, tableSize);
3380 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3381 if (pn3->pn_type == TOK_DEFAULT)
3382 continue;
3383 i = JSVAL_TO_INT(pn3->pn_val);
3384 i -= low;
3385 JS_ASSERT((uint32)i < tableLength);
3386 table[i] = pn3;
3389 } else {
3390 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3392 /* Fill in the number of cases. */
3393 SET_INDEX(pc, caseCount);
3394 pc += INDEX_LEN;
3398 * After this point, all control flow involving JSOP_TABLESWITCH
3399 * must set ok and goto out to exit this function. To keep things
3400 * simple, all switchOp cases exit that way.
3402 MUST_FLOW_THROUGH("out");
3403 if (cg->spanDeps) {
3405 * We have already generated at least one big jump so we must
3406 * explicitly add span dependencies for the switch jumps. When
3407 * called below, js_SetJumpOffset can only do it when patching
3408 * the first big jump or when cg->spanDeps is null.
3410 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3411 goto bad;
3414 if (constPropagated) {
3416 * Skip switchOp, as we are not setting jump offsets in the two
3417 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3418 * unless there was an error.
3420 savepc = CG_NEXT(cg);
3421 CG_NEXT(cg) = pc + 1;
3422 if (switchOp == JSOP_TABLESWITCH) {
3423 for (i = 0; i < (jsint)tableLength; i++) {
3424 pn3 = table[i];
3425 if (pn3 &&
3426 (pn4 = pn3->pn_left) != NULL &&
3427 pn4->pn_type == TOK_NAME) {
3428 /* Note a propagated constant with the const's name. */
3429 JS_ASSERT(!pn4->maybeExpr());
3430 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3431 if (!ale)
3432 goto bad;
3433 CG_NEXT(cg) = pc;
3434 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3435 ALE_INDEX(ale)) < 0) {
3436 goto bad;
3439 pc += JUMP_OFFSET_LEN;
3441 } else {
3442 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3443 pn4 = pn3->pn_left;
3444 if (pn4 && pn4->pn_type == TOK_NAME) {
3445 /* Note a propagated constant with the const's name. */
3446 JS_ASSERT(!pn4->maybeExpr());
3447 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3448 if (!ale)
3449 goto bad;
3450 CG_NEXT(cg) = pc;
3451 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3452 ALE_INDEX(ale)) < 0) {
3453 goto bad;
3456 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3459 CG_NEXT(cg) = savepc;
3463 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3464 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3465 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3466 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3467 pn4 = pn3->pn_right;
3468 ok = js_EmitTree(cx, cg, pn4);
3469 if (!ok)
3470 goto out;
3471 pn3->pn_offset = pn4->pn_offset;
3472 if (pn3->pn_type == TOK_DEFAULT)
3473 off = pn3->pn_offset - top;
3476 if (!hasDefault) {
3477 /* If no default case, offset for default is to end of switch. */
3478 off = CG_OFFSET(cg) - top;
3481 /* We better have set "off" by now. */
3482 JS_ASSERT(off != -1);
3484 /* Set the default offset (to end of switch if no default). */
3485 if (switchOp == JSOP_CONDSWITCH) {
3486 pc = NULL;
3487 JS_ASSERT(defaultOffset != -1);
3488 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3489 off - (defaultOffset - top));
3490 if (!ok)
3491 goto out;
3492 } else {
3493 pc = CG_CODE(cg, top);
3494 ok = js_SetJumpOffset(cx, cg, pc, off);
3495 if (!ok)
3496 goto out;
3497 pc += JUMP_OFFSET_LEN;
3500 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3501 off = CG_OFFSET(cg) - top;
3502 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3503 if (!ok)
3504 goto out;
3506 if (switchOp == JSOP_TABLESWITCH) {
3507 /* Skip over the already-initialized switch bounds. */
3508 pc += 2 * JUMP_OFFSET_LEN;
3510 /* Fill in the jump table, if there is one. */
3511 for (i = 0; i < (jsint)tableLength; i++) {
3512 pn3 = table[i];
3513 off = pn3 ? pn3->pn_offset - top : 0;
3514 ok = js_SetJumpOffset(cx, cg, pc, off);
3515 if (!ok)
3516 goto out;
3517 pc += JUMP_OFFSET_LEN;
3519 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3520 /* Skip over the already-initialized number of cases. */
3521 pc += INDEX_LEN;
3523 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3524 if (pn3->pn_type == TOK_DEFAULT)
3525 continue;
3526 if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom))
3527 goto bad;
3528 ale = cg->atomList.add(cg->parser, atom);
3529 if (!ale)
3530 goto bad;
3531 SET_INDEX(pc, ALE_INDEX(ale));
3532 pc += INDEX_LEN;
3534 off = pn3->pn_offset - top;
3535 ok = js_SetJumpOffset(cx, cg, pc, off);
3536 if (!ok)
3537 goto out;
3538 pc += JUMP_OFFSET_LEN;
3542 out:
3543 if (table)
3544 cx->free(table);
3545 if (ok) {
3546 ok = js_PopStatementCG(cx, cg);
3548 #if JS_HAS_BLOCK_SCOPE
3549 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3550 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3551 #endif
3553 return ok;
3555 bad:
3556 ok = JS_FALSE;
3557 goto out;
3560 JSBool
3561 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3563 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3564 /* JSOP_GENERATOR must be the first instruction. */
3565 CG_SWITCH_TO_PROLOG(cg);
3566 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3567 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3568 return false;
3569 CG_SWITCH_TO_MAIN(cg);
3570 } else {
3572 * Emit a trace hint opcode only if not in a generator, since generators
3573 * are not yet traced and both want to be the first instruction.
3575 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
3576 return false;
3579 if (cg->flags & TCF_FUN_UNBRAND_THIS) {
3580 if (js_Emit1(cx, cg, JSOP_UNBRANDTHIS) < 0)
3581 return false;
3584 return js_EmitTree(cx, cg, body) &&
3585 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3586 js_NewScriptFromCG(cx, cg);
3589 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3590 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3591 JS_BEGIN_MACRO \
3592 uintN line_ = (line); \
3593 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3594 if (delta_ != 0) { \
3595 /* \
3596 * Encode any change in the current source line number by using \
3597 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3598 * whichever consumes less space. \
3600 * NB: We handle backward line number deltas (possible with for \
3601 * loops where the update part is emitted after the body, but its \
3602 * line number is <= any line number in the body) here by letting \
3603 * unsigned delta_ wrap to a very large number, which triggers a \
3604 * SRC_SETLINE. \
3605 */ \
3606 CG_CURRENT_LINE(cg) = line_; \
3607 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3608 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3609 return JS_FALSE; \
3610 } else { \
3611 do { \
3612 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3613 return JS_FALSE; \
3614 } while (--delta_ != 0); \
3617 JS_END_MACRO
3619 /* A function, so that we avoid macro-bloating all the other callsites. */
3620 static JSBool
3621 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3623 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3624 return JS_TRUE;
3627 static JSBool
3628 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3629 JSParseNode *pn, jsatomid *result)
3631 jsatomid atomIndex;
3632 JSAtomListElement *ale;
3634 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
3635 atomIndex = (jsatomid) UPVAR_FRAME_SLOT(pn->pn_cookie);
3636 } else {
3637 ale = cg->atomList.add(cg->parser, pn->pn_atom);
3638 if (!ale)
3639 return JS_FALSE;
3640 atomIndex = ALE_INDEX(ale);
3643 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3644 (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
3645 CG_SWITCH_TO_PROLOG(cg);
3646 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3647 return JS_FALSE;
3648 EMIT_INDEX_OP(prologOp, atomIndex);
3649 CG_SWITCH_TO_MAIN(cg);
3652 if (result)
3653 *result = atomIndex;
3654 return JS_TRUE;
3657 #if JS_HAS_DESTRUCTURING
3659 typedef JSBool
3660 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3661 JSParseNode *pn);
3663 static JSBool
3664 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3665 JSParseNode *pn)
3667 JS_ASSERT(pn->pn_type == TOK_NAME);
3668 if (!BindNameToSlot(cx, cg, pn))
3669 return JS_FALSE;
3671 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3672 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3675 static JSBool
3676 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3677 JSParseNode *pn)
3679 JSParseNode *pn2, *pn3;
3680 DestructuringDeclEmitter emitter;
3682 if (pn->pn_type == TOK_RB) {
3683 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3684 if (pn2->pn_type == TOK_COMMA)
3685 continue;
3686 emitter = (pn2->pn_type == TOK_NAME)
3687 ? EmitDestructuringDecl
3688 : EmitDestructuringDecls;
3689 if (!emitter(cx, cg, prologOp, pn2))
3690 return JS_FALSE;
3692 } else {
3693 JS_ASSERT(pn->pn_type == TOK_RC);
3694 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3695 pn3 = pn2->pn_right;
3696 emitter = (pn3->pn_type == TOK_NAME)
3697 ? EmitDestructuringDecl
3698 : EmitDestructuringDecls;
3699 if (!emitter(cx, cg, prologOp, pn3))
3700 return JS_FALSE;
3703 return JS_TRUE;
3706 static JSBool
3707 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3709 static JSBool
3710 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3712 jsuint slot;
3715 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3716 * destructuring initialiser-form, call ourselves to handle it, then
3717 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3718 * ending with a JSOP_ENUMELEM or equivalent op.
3720 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3721 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3722 return JS_FALSE;
3723 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3724 return JS_FALSE;
3725 } else {
3726 if (pn->pn_type == TOK_NAME) {
3727 if (!BindNameToSlot(cx, cg, pn))
3728 return JS_FALSE;
3729 if (pn->isConst() && !pn->isInitialized())
3730 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3733 switch (pn->pn_op) {
3734 case JSOP_SETNAME:
3736 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3737 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3738 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3740 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3741 return JS_FALSE;
3742 break;
3744 case JSOP_SETCONST:
3745 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3746 return JS_FALSE;
3747 break;
3749 case JSOP_SETLOCAL:
3750 slot = (jsuint) pn->pn_cookie;
3751 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3752 break;
3754 case JSOP_SETARG:
3755 case JSOP_SETGVAR:
3756 slot = (jsuint) pn->pn_cookie;
3757 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3758 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3759 return JS_FALSE;
3760 break;
3762 default:
3764 ptrdiff_t top;
3766 top = CG_OFFSET(cg);
3767 if (!js_EmitTree(cx, cg, pn))
3768 return JS_FALSE;
3769 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3770 return JS_FALSE;
3771 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3772 return JS_FALSE;
3773 break;
3776 case JSOP_ENUMELEM:
3777 JS_ASSERT(0);
3781 return JS_TRUE;
3785 * Recursive helper for EmitDestructuringOps.
3787 * Given a value to destructure on the stack, walk over an object or array
3788 * initialiser at pn, emitting bytecodes to match property values and store
3789 * them in the lvalues identified by the matched property names.
3791 static JSBool
3792 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3794 jsuint index;
3795 JSParseNode *pn2, *pn3;
3796 JSBool doElemOp;
3798 #ifdef DEBUG
3799 intN stackDepth = cg->stackDepth;
3800 JS_ASSERT(stackDepth != 0);
3801 JS_ASSERT(pn->pn_arity == PN_LIST);
3802 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3803 #endif
3805 if (pn->pn_count == 0) {
3806 /* Emit a DUP;POP sequence for the decompiler. */
3807 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3808 js_Emit1(cx, cg, JSOP_POP) >= 0;
3811 index = 0;
3812 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3814 * Duplicate the value being destructured to use as a reference base.
3815 * If dup is not the first one, annotate it for the decompiler.
3817 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3818 return JS_FALSE;
3819 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3820 return JS_FALSE;
3823 * Now push the property name currently being matched, which is either
3824 * the array initialiser's current index, or the current property name
3825 * "label" on the left of a colon in the object initialiser. Set pn3
3826 * to the lvalue node, which is in the value-initializing position.
3828 doElemOp = JS_TRUE;
3829 if (pn->pn_type == TOK_RB) {
3830 if (!EmitNumberOp(cx, index, cg))
3831 return JS_FALSE;
3832 pn3 = pn2;
3833 } else {
3834 JS_ASSERT(pn->pn_type == TOK_RC);
3835 JS_ASSERT(pn2->pn_type == TOK_COLON);
3836 pn3 = pn2->pn_left;
3837 if (pn3->pn_type == TOK_NUMBER) {
3839 * If we are emitting an object destructuring initialiser,
3840 * annotate the index op with SRC_INITPROP so we know we are
3841 * not decompiling an array initialiser.
3843 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3844 return JS_FALSE;
3845 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3846 return JS_FALSE;
3847 } else {
3848 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3849 pn3->pn_type == TOK_NAME);
3850 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3851 return JS_FALSE;
3852 doElemOp = JS_FALSE;
3854 pn3 = pn2->pn_right;
3857 if (doElemOp) {
3859 * Ok, get the value of the matching property name. This leaves
3860 * that value on top of the value being destructured, so the stack
3861 * is one deeper than when we started.
3863 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3864 return JS_FALSE;
3865 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3868 /* Nullary comma node makes a hole in the array destructurer. */
3869 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3870 JS_ASSERT(pn->pn_type == TOK_RB);
3871 JS_ASSERT(pn2 == pn3);
3872 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3873 return JS_FALSE;
3874 } else {
3875 if (!EmitDestructuringLHS(cx, cg, pn3))
3876 return JS_FALSE;
3879 JS_ASSERT(cg->stackDepth == stackDepth);
3880 ++index;
3883 return JS_TRUE;
3886 static ptrdiff_t
3887 OpToDeclType(JSOp op)
3889 switch (op) {
3890 case JSOP_NOP:
3891 return SRC_DECL_LET;
3892 case JSOP_DEFCONST:
3893 return SRC_DECL_CONST;
3894 case JSOP_DEFVAR:
3895 return SRC_DECL_VAR;
3896 default:
3897 return SRC_DECL_NONE;
3901 static JSBool
3902 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3903 JSParseNode *pn)
3906 * If we're called from a variable declaration, help the decompiler by
3907 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3908 * If the destructuring initialiser is empty, our helper will emit a
3909 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3911 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
3912 return JS_FALSE;
3915 * Call our recursive helper to emit the destructuring assignments and
3916 * related stack manipulations.
3918 return EmitDestructuringOpsHelper(cx, cg, pn);
3921 static JSBool
3922 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3923 JSParseNode *lhs, JSParseNode *rhs)
3925 jsuint depth, limit, i, nslots;
3926 JSParseNode *pn;
3928 depth = limit = (uintN) cg->stackDepth;
3929 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3930 if (limit == JS_BIT(16)) {
3931 ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
3932 return JS_FALSE;
3935 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
3936 JS_ASSERT(!(pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY));
3937 if (!js_EmitTree(cx, cg, pn))
3938 return JS_FALSE;
3939 ++limit;
3942 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
3943 return JS_FALSE;
3945 i = depth;
3946 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
3947 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
3948 JS_ASSERT(i < limit);
3949 jsint slot = AdjustBlockSlot(cx, cg, i);
3950 if (slot < 0)
3951 return JS_FALSE;
3952 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3954 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3955 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3956 return JS_FALSE;
3957 } else {
3958 if (!EmitDestructuringLHS(cx, cg, pn))
3959 return JS_FALSE;
3963 nslots = limit - depth;
3964 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
3965 cg->stackDepth = (uintN) depth;
3966 return JS_TRUE;
3970 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3971 * can emit a group assignment sequence, which results in 0 stack depth delta,
3972 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3974 static JSBool
3975 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3976 JSParseNode *pn, JSOp *pop)
3978 JSParseNode *lhs, *rhs;
3980 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3981 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3982 lhs = pn->pn_left;
3983 rhs = pn->pn_right;
3984 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3985 !(rhs->pn_xflags & PNX_HOLEY) &&
3986 lhs->pn_count <= rhs->pn_count) {
3987 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
3988 return JS_FALSE;
3989 *pop = JSOP_NOP;
3991 return JS_TRUE;
3994 #endif /* JS_HAS_DESTRUCTURING */
3996 static JSBool
3997 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3998 JSBool inLetHead, ptrdiff_t *headNoteIndex)
4000 bool let, forInVar, first;
4001 #if JS_HAS_BLOCK_SCOPE
4002 bool forInLet, popScope;
4003 JSStmtInfo *stmt, *scopeStmt;
4004 #endif
4005 ptrdiff_t off, noteIndex, tmp;
4006 JSParseNode *pn2, *pn3, *next;
4007 JSOp op;
4008 jsatomid atomIndex;
4009 uintN oldflags;
4011 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
4012 *headNoteIndex = -1;
4015 * Let blocks and expressions have a parenthesized head in which the new
4016 * scope is not yet open. Initializer evaluation uses the parent node's
4017 * lexical scope. If popScope is true below, then we hide the top lexical
4018 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
4019 * it won't find any names in the new let block.
4021 * The same goes for let declarations in the head of any kind of for loop.
4022 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
4023 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
4024 * in the containing scope, and puts x in the loop body's scope.
4026 let = (pn->pn_op == JSOP_NOP);
4027 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
4028 #if JS_HAS_BLOCK_SCOPE
4029 forInLet = let && forInVar;
4030 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
4031 if (popScope) {
4032 stmt = cg->topStmt;
4033 scopeStmt = cg->topScopeStmt;
4035 # ifdef __GNUC__
4036 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
4037 # endif
4038 JS_ASSERT(!popScope || let);
4039 #endif
4041 off = noteIndex = -1;
4042 for (pn2 = pn->pn_head; ; pn2 = next) {
4043 first = pn2 == pn->pn_head;
4044 next = pn2->pn_next;
4046 if (pn2->pn_type != TOK_NAME) {
4047 #if JS_HAS_DESTRUCTURING
4048 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
4050 * Emit variable binding ops, but not destructuring ops.
4051 * The parser (see Variables, jsparse.c) has ensured that
4052 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4053 * and that case will emit the destructuring code only after
4054 * emitting an enumerating opcode and a branch that tests
4055 * whether the enumeration ended.
4057 JS_ASSERT(forInVar);
4058 JS_ASSERT(pn->pn_count == 1);
4059 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
4060 return JS_FALSE;
4061 break;
4063 #endif
4066 * A destructuring initialiser assignment preceded by var will
4067 * never occur to the left of 'in' in a for-in loop. As with 'for
4068 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4069 * i' to be hoisted out of the loop.
4071 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
4072 JS_ASSERT(!forInVar);
4075 * To allow the front end to rewrite var f = x; as f = x; when a
4076 * function f(){} precedes the var, detect simple name assignment
4077 * here and initialize the name.
4079 #if !JS_HAS_DESTRUCTURING
4080 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4081 #else
4082 if (pn2->pn_left->pn_type == TOK_NAME)
4083 #endif
4085 pn3 = pn2->pn_right;
4086 pn2 = pn2->pn_left;
4087 goto do_name;
4090 #if JS_HAS_DESTRUCTURING
4091 if (pn->pn_count == 1) {
4093 * If this is the only destructuring assignment in the list,
4094 * try to optimize to a group assignment. If we're in a let
4095 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4096 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4098 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4099 op = JSOP_POP;
4100 if (!MaybeEmitGroupAssignment(cx, cg,
4101 inLetHead ? JSOP_POP : PN_OP(pn),
4102 pn2, &op)) {
4103 return JS_FALSE;
4105 if (op == JSOP_NOP) {
4106 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4107 break;
4111 pn3 = pn2->pn_left;
4112 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4113 return JS_FALSE;
4115 if (!js_EmitTree(cx, cg, pn2->pn_right))
4116 return JS_FALSE;
4119 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4120 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4121 * we will emit at the bottom of this function.
4123 if (!EmitDestructuringOps(cx, cg,
4124 inLetHead ? JSOP_POP : PN_OP(pn),
4125 pn3)) {
4126 return JS_FALSE;
4128 goto emit_note_pop;
4129 #endif
4133 * Load initializer early to share code above that jumps to do_name.
4134 * NB: if this var redeclares an existing binding, then pn2 is linked
4135 * on its definition's use-chain and pn_expr has been overlayed with
4136 * pn_lexdef.
4138 pn3 = pn2->maybeExpr();
4140 do_name:
4141 if (!BindNameToSlot(cx, cg, pn2))
4142 return JS_FALSE;
4144 op = PN_OP(pn2);
4145 if (op == JSOP_ARGUMENTS) {
4146 /* JSOP_ARGUMENTS => no initializer */
4147 JS_ASSERT(!pn3 && !let);
4148 pn3 = NULL;
4149 #ifdef __GNUC__
4150 atomIndex = 0; /* quell GCC overwarning */
4151 #endif
4152 } else {
4153 JS_ASSERT(op != JSOP_CALLEE);
4154 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE || !let);
4155 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4156 return JS_FALSE;
4158 if (pn3) {
4159 JS_ASSERT(!forInVar);
4160 if (op == JSOP_SETNAME) {
4161 JS_ASSERT(!let);
4162 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4164 if (pn->pn_op == JSOP_DEFCONST &&
4165 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4166 return JS_FALSE;
4169 #if JS_HAS_BLOCK_SCOPE
4170 /* Evaluate expr in the outer lexical scope if requested. */
4171 if (popScope) {
4172 cg->topStmt = stmt->down;
4173 cg->topScopeStmt = scopeStmt->downScope;
4175 #endif
4177 oldflags = cg->flags;
4178 cg->flags &= ~TCF_IN_FOR_INIT;
4179 if (!js_EmitTree(cx, cg, pn3))
4180 return JS_FALSE;
4181 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4183 #if JS_HAS_BLOCK_SCOPE
4184 if (popScope) {
4185 cg->topStmt = stmt;
4186 cg->topScopeStmt = scopeStmt;
4188 #endif
4193 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4194 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4195 * a TOK_SEQ node to make the two statements appear as one. Therefore
4196 * if this declaration is part of a for-in loop head, we do not need to
4197 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4198 * js_EmitTree, will annotate appropriately.
4200 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4201 if (forInVar) {
4202 JS_ASSERT(pn->pn_count == 1);
4203 JS_ASSERT(!pn3);
4204 break;
4207 if (first &&
4208 !inLetHead &&
4209 js_NewSrcNote2(cx, cg, SRC_DECL,
4210 (pn->pn_op == JSOP_DEFCONST)
4211 ? SRC_DECL_CONST
4212 : (pn->pn_op == JSOP_DEFVAR)
4213 ? SRC_DECL_VAR
4214 : SRC_DECL_LET) < 0) {
4215 return JS_FALSE;
4217 if (op == JSOP_ARGUMENTS) {
4218 if (js_Emit1(cx, cg, op) < 0)
4219 return JS_FALSE;
4220 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
4221 EMIT_UINT16_IMM_OP(op, atomIndex);
4222 } else {
4223 EMIT_INDEX_OP(op, atomIndex);
4226 #if JS_HAS_DESTRUCTURING
4227 emit_note_pop:
4228 #endif
4229 tmp = CG_OFFSET(cg);
4230 if (noteIndex >= 0) {
4231 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4232 return JS_FALSE;
4234 if (!next)
4235 break;
4236 off = tmp;
4237 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4238 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4239 return JS_FALSE;
4242 /* If this is a let head, emit and return a srcnote on the pop. */
4243 if (inLetHead) {
4244 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4245 if (*headNoteIndex < 0)
4246 return JS_FALSE;
4247 if (!(pn->pn_xflags & PNX_POPVAR))
4248 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4251 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4254 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4255 static JSBool
4256 GettableNoteForNextOp(JSCodeGenerator *cg)
4258 ptrdiff_t offset, target;
4259 jssrcnote *sn, *end;
4261 offset = 0;
4262 target = CG_OFFSET(cg);
4263 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4264 sn = SN_NEXT(sn)) {
4265 if (offset == target && SN_IS_GETTABLE(sn))
4266 return JS_TRUE;
4267 offset += SN_DELTA(sn);
4269 return JS_FALSE;
4271 #endif
4273 /* Top-level named functions need a nop for decompilation. */
4274 static JSBool
4275 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4277 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4278 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4281 static bool
4282 EmitNewInit(JSContext *cx, JSCodeGenerator *cg, JSProtoKey key, JSParseNode *pn, int sharpnum)
4284 if (js_Emit2(cx, cg, JSOP_NEWINIT, (jsbytecode) key) < 0)
4285 return false;
4286 #if JS_HAS_SHARP_VARS
4287 if (cg->hasSharps()) {
4288 if (pn->pn_count != 0)
4289 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4290 if (sharpnum >= 0)
4291 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
4292 } else {
4293 JS_ASSERT(sharpnum < 0);
4295 #endif
4296 return true;
4299 static bool
4300 EmitEndInit(JSContext *cx, JSCodeGenerator *cg, uint32 count)
4302 #if JS_HAS_SHARP_VARS
4303 /* Emit an op for sharp array cleanup and decompilation. */
4304 if (cg->hasSharps() && count != 0)
4305 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4306 #endif
4307 return js_Emit1(cx, cg, JSOP_ENDINIT) >= 0;
4310 /* See the SRC_FOR source note offsetBias comments later in this file. */
4311 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4312 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4314 JSBool
4315 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4317 JSBool ok, useful, wantval;
4318 JSStmtInfo *stmt, stmtInfo;
4319 ptrdiff_t top, off, tmp, beq, jmp;
4320 JSParseNode *pn2, *pn3;
4321 JSAtom *atom;
4322 JSAtomListElement *ale;
4323 jsatomid atomIndex;
4324 uintN index;
4325 ptrdiff_t noteIndex;
4326 JSSrcNoteType noteType;
4327 jsbytecode *pc;
4328 JSOp op;
4329 TokenKind type;
4330 uint32 argc;
4331 #if JS_HAS_SHARP_VARS
4332 jsint sharpnum;
4333 #endif
4335 JS_CHECK_RECURSION(cx, return JS_FALSE);
4337 ok = JS_TRUE;
4338 cg->emitLevel++;
4339 pn->pn_offset = top = CG_OFFSET(cg);
4341 /* Emit notes to tell the current bytecode's source line number. */
4342 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4344 switch (pn->pn_type) {
4345 case TOK_FUNCTION:
4347 JSFunction *fun;
4348 uintN slot;
4350 #if JS_HAS_XML_SUPPORT
4351 if (pn->pn_arity == PN_NULLARY) {
4352 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4353 return JS_FALSE;
4354 break;
4356 #endif
4358 fun = (JSFunction *) pn->pn_funbox->object;
4359 JS_ASSERT(FUN_INTERPRETED(fun));
4360 if (fun->u.i.script) {
4362 * This second pass is needed to emit JSOP_NOP with a source note
4363 * for the already-emitted function definition prolog opcode. See
4364 * comments in the TOK_LC case.
4366 JS_ASSERT(pn->pn_op == JSOP_NOP);
4367 JS_ASSERT(cg->inFunction());
4368 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4369 return JS_FALSE;
4370 break;
4373 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
4374 pn->pn_defn ||
4375 (!pn->pn_used && !pn->isTopLevel()) ||
4376 (fun->flags & JSFUN_LAMBDA));
4378 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4379 FUN_KIND(fun) == JSFUN_INTERPRETED);
4381 /* Generate code for the function's body. */
4382 void *cg2mark = cg->codePool->getMark();
4383 JSCodeGenerator *cg2space;
4384 cg->codePool->allocateType<JSCodeGenerator>(cg2space);
4385 if (!cg2space) {
4386 js_ReportOutOfScriptQuota(cx);
4387 return JS_FALSE;
4389 JSCodeGenerator *cg2 =
4390 new (cg2space) JSCodeGenerator(cg->parser,
4391 cg->codePool, cg->notePool,
4392 pn->pn_pos.begin.lineno);
4394 if (!cg2->init())
4395 return JS_FALSE;
4397 cg2->flags = pn->pn_funbox->tcflags | TCF_IN_FUNCTION;
4398 #if JS_HAS_SHARP_VARS
4399 if (cg2->flags & TCF_HAS_SHARPS) {
4400 cg2->sharpSlotBase = fun->sharpSlotBase(cx);
4401 if (cg2->sharpSlotBase < 0)
4402 return JS_FALSE;
4404 #endif
4405 cg2->fun = fun;
4406 cg2->funbox = pn->pn_funbox;
4407 cg2->parent = cg;
4410 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4411 * bits and to reserve the all-ones value, thereby reserving the magic
4412 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4414 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4415 cg2->staticLevel = cg->staticLevel + 1;
4417 /* We measured the max scope depth when we parsed the function. */
4418 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = uint16(-1));
4419 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4420 pn = NULL;
4422 cg2->~JSCodeGenerator();
4423 cg->codePool->release(cg2mark);
4424 cg2 = NULL;
4425 if (!pn)
4426 return JS_FALSE;
4428 /* Make the function object a literal in the outer script's pool. */
4429 index = cg->objectList.index(pn->pn_funbox);
4431 /* Emit a bytecode pointing to the closure object in its immediate. */
4432 op = PN_OP(pn);
4433 if (op != JSOP_NOP) {
4434 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4435 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4436 return JS_FALSE;
4438 EMIT_INDEX_OP(op, index);
4439 break;
4443 * For a script we emit the code as we parse. Thus the bytecode for
4444 * top-level functions should go in the prolog to predefine their
4445 * names in the variable object before the already-generated main code
4446 * is executed. This extra work for top-level scripts is not necessary
4447 * when we emit the code for a function. It is fully parsed prior to
4448 * invocation of the emitter and calls to js_EmitTree for function
4449 * definitions can be scheduled before generating the rest of code.
4451 if (!cg->inFunction()) {
4452 JS_ASSERT(!cg->topStmt);
4453 CG_SWITCH_TO_PROLOG(cg);
4454 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4455 EMIT_INDEX_OP(op, index);
4456 CG_SWITCH_TO_MAIN(cg);
4458 /* Emit NOP for the decompiler. */
4459 if (!EmitFunctionDefNop(cx, cg, index))
4460 return JS_FALSE;
4461 } else {
4462 #ifdef DEBUG
4463 JSLocalKind localKind =
4464 #endif
4465 js_LookupLocal(cx, cg->fun, fun->atom, &slot);
4466 JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
4467 JS_ASSERT(index < JS_BIT(20));
4468 pn->pn_index = index;
4469 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4470 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4471 return JS_FALSE;
4473 break;
4476 case TOK_ARGSBODY:
4477 ok = js_EmitTree(cx, cg, pn->last());
4478 break;
4480 case TOK_UPVARS:
4481 JS_ASSERT(cg->lexdeps.count == 0);
4482 JS_ASSERT(pn->pn_names.count != 0);
4483 cg->lexdeps = pn->pn_names;
4484 ok = js_EmitTree(cx, cg, pn->pn_tree);
4485 break;
4487 case TOK_IF:
4488 /* Initialize so we can detect else-if chains and avoid recursion. */
4489 stmtInfo.type = STMT_IF;
4490 beq = jmp = -1;
4491 noteIndex = -1;
4493 if_again:
4494 /* Emit code for the condition before pushing stmtInfo. */
4495 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4496 return JS_FALSE;
4497 top = CG_OFFSET(cg);
4498 if (stmtInfo.type == STMT_IF) {
4499 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4500 } else {
4502 * We came here from the goto further below that detects else-if
4503 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4504 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4505 * to help the decompiler. Actually, we need two offsets, one for
4506 * decompiling any else clause and the second for decompiling an
4507 * else-if chain without bracing, overindenting, or incorrectly
4508 * scoping let declarations.
4510 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4511 stmtInfo.type = STMT_IF;
4512 stmtInfo.update = top;
4513 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4514 return JS_FALSE;
4515 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - beq))
4516 return JS_FALSE;
4519 /* Emit an annotated branch-if-false around the then part. */
4520 pn3 = pn->pn_kid3;
4521 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4522 if (noteIndex < 0)
4523 return JS_FALSE;
4524 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4525 if (beq < 0)
4526 return JS_FALSE;
4528 /* Emit code for the then and optional else parts. */
4529 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4530 return JS_FALSE;
4531 if (pn3) {
4532 /* Modify stmtInfo so we know we're in the else part. */
4533 stmtInfo.type = STMT_ELSE;
4536 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4537 * around the else part. The js_PopStatementCG call at the bottom
4538 * of this switch case will fix up the backpatch chain linked from
4539 * stmtInfo.breaks.
4541 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4542 if (jmp < 0)
4543 return JS_FALSE;
4545 /* Ensure the branch-if-false comes here, then emit the else. */
4546 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4547 if (pn3->pn_type == TOK_IF) {
4548 pn = pn3;
4549 goto if_again;
4552 if (!js_EmitTree(cx, cg, pn3))
4553 return JS_FALSE;
4556 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4557 * the decompiler's benefit. We can't just "back up" from the pc
4558 * of the else clause, because we don't know whether an extended
4559 * jump was required to leap from the end of the then clause over
4560 * the else clause.
4562 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4563 return JS_FALSE;
4564 } else {
4565 /* No else part, fixup the branch-if-false to come here. */
4566 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4568 ok = js_PopStatementCG(cx, cg);
4569 break;
4571 case TOK_SWITCH:
4572 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4573 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4574 break;
4576 case TOK_WHILE:
4578 * Minimize bytecodes issued for one or more iterations by jumping to
4579 * the condition below the body and closing the loop if the condition
4580 * is true with a backward branch. For iteration count i:
4582 * i test at the top test at the bottom
4583 * = =============== ==================
4584 * 0 ifeq-pass goto; ifne-fail
4585 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4586 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4587 * . . .
4588 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4590 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4591 * test at the top. When JSParseNode trees were added during the ES3
4592 * work (1998-9), the code generation scheme was not optimized, and
4593 * the decompiler continued to take advantage of the branch and jump
4594 * that bracketed the body. But given the SRC_WHILE note, it is easy
4595 * to support the more efficient scheme.
4597 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4598 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4599 if (noteIndex < 0)
4600 return JS_FALSE;
4601 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4602 if (jmp < 0)
4603 return JS_FALSE;
4604 top = js_Emit1(cx, cg, JSOP_TRACE);
4605 if (top < 0)
4606 return JS_FALSE;
4607 if (!js_EmitTree(cx, cg, pn->pn_right))
4608 return JS_FALSE;
4609 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4610 if (!js_EmitTree(cx, cg, pn->pn_left))
4611 return JS_FALSE;
4612 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4613 if (beq < 0)
4614 return JS_FALSE;
4615 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4616 return JS_FALSE;
4617 ok = js_PopStatementCG(cx, cg);
4618 break;
4620 case TOK_DO:
4621 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4622 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4623 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4624 return JS_FALSE;
4626 /* Compile the loop body. */
4627 top = js_Emit1(cx, cg, JSOP_TRACE);
4628 if (top < 0)
4629 return JS_FALSE;
4630 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4631 if (!js_EmitTree(cx, cg, pn->pn_left))
4632 return JS_FALSE;
4634 /* Set loop and enclosing label update offsets, for continue. */
4635 stmt = &stmtInfo;
4636 do {
4637 stmt->update = CG_OFFSET(cg);
4638 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4640 /* Compile the loop condition, now that continues know where to go. */
4641 if (!js_EmitTree(cx, cg, pn->pn_right))
4642 return JS_FALSE;
4645 * Since we use JSOP_IFNE for other purposes as well as for do-while
4646 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4647 * and the decompiler must get that delta and decompile recursively.
4649 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4650 if (beq < 0)
4651 return JS_FALSE;
4652 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4653 return JS_FALSE;
4654 ok = js_PopStatementCG(cx, cg);
4655 break;
4657 case TOK_FOR:
4658 beq = 0; /* suppress gcc warnings */
4659 jmp = -1;
4660 pn2 = pn->pn_left;
4661 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4663 if (pn2->pn_type == TOK_IN) {
4664 /* Set stmtInfo type for later testing. */
4665 stmtInfo.type = STMT_FOR_IN_LOOP;
4668 * If the left part is 'var x', emit code to define x if necessary
4669 * using a prolog opcode, but do not emit a pop. If the left part
4670 * is 'var x = i', emit prolog code to define x if necessary; then
4671 * emit code to evaluate i, assign the result to x, and pop the
4672 * result off the stack.
4674 * All the logic to do this is implemented in the outer switch's
4675 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4677 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4678 * called here will generate the proper note for the assignment
4679 * op that sets x = i, hoisting the initialized var declaration
4680 * out of the loop: 'var x = i; for (x in o) ...'.
4682 * In the 'for (var x in o) ...' case, nothing but the prolog op
4683 * (if needed) should be generated here, we must emit the note
4684 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4685 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4687 * A 'for (let x = i in o)' loop must not be hoisted, since in
4688 * this form the let variable is scoped by the loop body (but not
4689 * the head). The initializer expression i must be evaluated for
4690 * any side effects. So we hoist only i in the let case.
4692 pn3 = pn2->pn_left;
4693 type = PN_TYPE(pn3);
4694 cg->flags |= TCF_IN_FOR_INIT;
4695 if (TokenKindIsDecl(type) && !js_EmitTree(cx, cg, pn3))
4696 return JS_FALSE;
4697 cg->flags &= ~TCF_IN_FOR_INIT;
4699 /* Compile the object expression to the right of 'in'. */
4700 if (!js_EmitTree(cx, cg, pn2->pn_right))
4701 return JS_FALSE;
4704 * Emit a bytecode to convert top of stack value to the iterator
4705 * object depending on the loop variant (for-in, for-each-in, or
4706 * destructuring for-in).
4708 JS_ASSERT(pn->pn_op == JSOP_ITER);
4709 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4710 return JS_FALSE;
4712 /* Annotate so the decompiler can find the loop-closing jump. */
4713 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4714 if (noteIndex < 0)
4715 return JS_FALSE;
4718 * Jump down to the loop condition to minimize overhead assuming at
4719 * least one iteration, as the other loop forms do.
4721 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4722 if (jmp < 0)
4723 return JS_FALSE;
4725 top = CG_OFFSET(cg);
4726 SET_STATEMENT_TOP(&stmtInfo, top);
4727 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4728 return JS_FALSE;
4730 #ifdef DEBUG
4731 intN loopDepth = cg->stackDepth;
4732 #endif
4735 * Compile a JSOP_FOR* bytecode based on the left hand side.
4737 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4738 * or similar, to signify assignment, rather than declaration, to
4739 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4740 * parameter and emits the appropriate source note, defaulting to
4741 * assignment, so JSOP_SETNAME is not critical here; many similar
4742 * ops could be used -- just not JSOP_NOP (which means 'let').
4744 op = JSOP_SETNAME;
4745 switch (type) {
4746 #if JS_HAS_BLOCK_SCOPE
4747 case TOK_LET:
4748 #endif
4749 case TOK_VAR:
4750 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4751 pn3 = pn3->pn_head;
4752 #if JS_HAS_DESTRUCTURING
4753 if (pn3->pn_type == TOK_ASSIGN) {
4754 pn3 = pn3->pn_left;
4755 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4757 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4758 op = PN_OP(pn2->pn_left);
4759 goto destructuring_for;
4761 #else
4762 JS_ASSERT(pn3->pn_type == TOK_NAME);
4763 #endif
4764 /* FALL THROUGH */
4766 case TOK_NAME:
4768 * Always annotate JSOP_FORLOCAL if given input of the form
4769 * 'for (let x in * o)' -- the decompiler must not hoist the
4770 * 'let x' out of the loop head, or x will be bound in the
4771 * wrong scope. Likewise, but in this case only for the sake
4772 * of higher decompilation fidelity only, do not hoist 'var x'
4773 * when given 'for (var x in o)'.
4775 if ((
4776 #if JS_HAS_BLOCK_SCOPE
4777 type == TOK_LET ||
4778 #endif
4779 (type == TOK_VAR && !pn3->maybeExpr())) &&
4780 js_NewSrcNote2(cx, cg, SRC_DECL,
4781 (type == TOK_VAR)
4782 ? SRC_DECL_VAR
4783 : SRC_DECL_LET) < 0) {
4784 return JS_FALSE;
4786 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4787 op = PN_OP(pn3);
4788 switch (op) {
4789 case JSOP_GETARG: /* FALL THROUGH */
4790 case JSOP_SETARG: op = JSOP_FORARG; break;
4791 case JSOP_GETGVAR: /* FALL THROUGH */
4792 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4793 case JSOP_GETLOCAL: /* FALL THROUGH */
4794 case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4795 default: JS_ASSERT(0);
4797 } else {
4798 pn3->pn_op = JSOP_FORNAME;
4799 if (!BindNameToSlot(cx, cg, pn3))
4800 return JS_FALSE;
4801 op = PN_OP(pn3);
4803 if (pn3->isConst()) {
4804 ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
4805 JSMSG_BAD_FOR_LEFTSIDE);
4806 return JS_FALSE;
4808 if (pn3->pn_cookie != FREE_UPVAR_COOKIE) {
4809 atomIndex = (jsatomid) pn3->pn_cookie;
4810 EMIT_UINT16_IMM_OP(op, atomIndex);
4811 } else {
4812 if (!EmitAtomOp(cx, pn3, op, cg))
4813 return JS_FALSE;
4815 break;
4817 case TOK_DOT:
4819 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4820 * has no side effects.
4822 useful = JS_FALSE;
4823 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
4824 return JS_FALSE;
4825 if (!useful) {
4826 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
4827 return JS_FALSE;
4828 break;
4830 /* FALL THROUGH */
4832 #if JS_HAS_DESTRUCTURING
4833 destructuring_for:
4834 #endif
4835 default:
4836 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
4837 return JS_FALSE;
4838 JS_ASSERT(cg->stackDepth >= 2);
4840 #if JS_HAS_DESTRUCTURING
4841 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4842 if (!EmitDestructuringOps(cx, cg, op, pn3))
4843 return JS_FALSE;
4844 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4845 return JS_FALSE;
4846 } else
4847 #endif
4848 if (pn3->pn_type == TOK_LP) {
4849 JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4850 if (!js_EmitTree(cx, cg, pn3))
4851 return JS_FALSE;
4852 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4853 return JS_FALSE;
4854 } else
4855 #if JS_HAS_XML_SUPPORT
4856 if (pn3->pn_type == TOK_UNARYOP) {
4857 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4858 if (!js_EmitTree(cx, cg, pn3))
4859 return JS_FALSE;
4860 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
4861 return JS_FALSE;
4862 } else
4863 #endif
4864 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4865 return JS_FALSE;
4866 break;
4869 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4870 JS_ASSERT(cg->stackDepth == loopDepth);
4872 /* Set the first srcnote offset so we can find the start of the loop body. */
4873 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - jmp))
4874 return JS_FALSE;
4876 /* Emit code for the loop body. */
4877 if (!js_EmitTree(cx, cg, pn->pn_right))
4878 return JS_FALSE;
4880 /* Set loop and enclosing "update" offsets, for continue. */
4881 stmt = &stmtInfo;
4882 do {
4883 stmt->update = CG_OFFSET(cg);
4884 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4887 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
4889 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4890 if (js_Emit1(cx, cg, JSOP_MOREITER) < 0)
4891 return JS_FALSE;
4892 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4893 if (beq < 0)
4894 return JS_FALSE;
4896 /* Set the second srcnote offset so we can find the closing jump. */
4897 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
4898 return JS_FALSE;
4899 } else {
4900 /* C-style for (init; cond; update) ... loop. */
4901 op = JSOP_POP;
4902 pn3 = pn2->pn_kid1;
4903 if (!pn3) {
4904 /* No initializer: emit an annotated nop for the decompiler. */
4905 op = JSOP_NOP;
4906 } else {
4907 cg->flags |= TCF_IN_FOR_INIT;
4908 #if JS_HAS_DESTRUCTURING
4909 if (pn3->pn_type == TOK_ASSIGN &&
4910 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4911 return JS_FALSE;
4913 #endif
4914 if (op == JSOP_POP) {
4915 if (!js_EmitTree(cx, cg, pn3))
4916 return JS_FALSE;
4917 if (TokenKindIsDecl(PN_TYPE(pn3))) {
4919 * Check whether a destructuring-initialized var decl
4920 * was optimized to a group assignment. If so, we do
4921 * not need to emit a pop below, so switch to a nop,
4922 * just for the decompiler.
4924 JS_ASSERT(pn3->pn_arity == PN_LIST);
4925 if (pn3->pn_xflags & PNX_GROUPINIT)
4926 op = JSOP_NOP;
4929 cg->flags &= ~TCF_IN_FOR_INIT;
4933 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4934 * Use tmp to hold the biased srcnote "top" offset, which differs
4935 * from the top local variable by the length of the JSOP_GOTO{,X}
4936 * emitted in between tmp and top if this loop has a condition.
4938 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4939 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
4940 return JS_FALSE;
4941 tmp = CG_OFFSET(cg);
4943 if (pn2->pn_kid2) {
4944 /* Goto the loop condition, which branches back to iterate. */
4945 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4946 if (jmp < 0)
4947 return JS_FALSE;
4950 top = CG_OFFSET(cg);
4951 SET_STATEMENT_TOP(&stmtInfo, top);
4953 /* Emit code for the loop body. */
4954 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
4955 return JS_FALSE;
4956 if (!js_EmitTree(cx, cg, pn->pn_right))
4957 return JS_FALSE;
4959 /* Set the second note offset so we can find the update part. */
4960 JS_ASSERT(noteIndex != -1);
4961 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4962 CG_OFFSET(cg) - tmp)) {
4963 return JS_FALSE;
4966 /* Set loop and enclosing "update" offsets, for continue. */
4967 stmt = &stmtInfo;
4968 do {
4969 stmt->update = CG_OFFSET(cg);
4970 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4972 /* Check for update code to do before the condition (if any). */
4973 pn3 = pn2->pn_kid3;
4974 if (pn3) {
4975 op = JSOP_POP;
4976 #if JS_HAS_DESTRUCTURING
4977 if (pn3->pn_type == TOK_ASSIGN &&
4978 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4979 return JS_FALSE;
4981 #endif
4982 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
4983 return JS_FALSE;
4985 /* Always emit the POP or NOP, to help the decompiler. */
4986 if (js_Emit1(cx, cg, op) < 0)
4987 return JS_FALSE;
4989 /* Restore the absolute line number for source note readers. */
4990 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4991 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4992 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4993 return JS_FALSE;
4994 CG_CURRENT_LINE(cg) = (uintN) off;
4998 /* Set the first note offset so we can find the loop condition. */
4999 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5000 CG_OFFSET(cg) - tmp)) {
5001 return JS_FALSE;
5004 if (pn2->pn_kid2) {
5005 /* Fix up the goto from top to target the loop condition. */
5006 JS_ASSERT(jmp >= 0);
5007 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5009 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
5010 return JS_FALSE;
5013 /* The third note offset helps us find the loop-closing jump. */
5014 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
5015 CG_OFFSET(cg) - tmp)) {
5016 return JS_FALSE;
5019 if (pn2->pn_kid2) {
5020 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
5021 if (beq < 0)
5022 return JS_FALSE;
5023 } else {
5024 /* No loop condition -- emit the loop-closing jump. */
5025 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
5026 if (jmp < 0)
5027 return JS_FALSE;
5031 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5032 if (!js_PopStatementCG(cx, cg))
5033 return JS_FALSE;
5035 if (pn2->pn_type == TOK_IN) {
5036 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
5037 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
5038 return JS_FALSE;
5041 break;
5043 case TOK_BREAK:
5044 stmt = cg->topStmt;
5045 atom = pn->pn_atom;
5046 if (atom) {
5047 ale = cg->atomList.add(cg->parser, atom);
5048 if (!ale)
5049 return JS_FALSE;
5050 while (stmt->type != STMT_LABEL || stmt->label != atom)
5051 stmt = stmt->down;
5052 noteType = SRC_BREAK2LABEL;
5053 } else {
5054 ale = NULL;
5055 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
5056 stmt = stmt->down;
5057 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
5060 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
5061 return JS_FALSE;
5062 break;
5064 case TOK_CONTINUE:
5065 stmt = cg->topStmt;
5066 atom = pn->pn_atom;
5067 if (atom) {
5068 /* Find the loop statement enclosed by the matching label. */
5069 JSStmtInfo *loop = NULL;
5070 ale = cg->atomList.add(cg->parser, atom);
5071 if (!ale)
5072 return JS_FALSE;
5073 while (stmt->type != STMT_LABEL || stmt->label != atom) {
5074 if (STMT_IS_LOOP(stmt))
5075 loop = stmt;
5076 stmt = stmt->down;
5078 stmt = loop;
5079 noteType = SRC_CONT2LABEL;
5080 } else {
5081 ale = NULL;
5082 while (!STMT_IS_LOOP(stmt))
5083 stmt = stmt->down;
5084 noteType = SRC_CONTINUE;
5087 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
5088 return JS_FALSE;
5089 break;
5091 case TOK_WITH:
5092 if (!js_EmitTree(cx, cg, pn->pn_left))
5093 return JS_FALSE;
5094 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
5095 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
5096 return JS_FALSE;
5097 if (!js_EmitTree(cx, cg, pn->pn_right))
5098 return JS_FALSE;
5099 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
5100 return JS_FALSE;
5101 ok = js_PopStatementCG(cx, cg);
5102 break;
5104 case TOK_TRY:
5106 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5107 intN depth;
5108 JSParseNode *lastCatch;
5110 catchJump = -1;
5113 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5114 * for later fixup.
5116 * When a finally block is active (STMT_FINALLY in our tree context),
5117 * non-local jumps (including jumps-over-catches) result in a GOSUB
5118 * being written into the bytecode stream and fixed-up later (c.f.
5119 * EmitBackPatchOp and BackPatch).
5121 js_PushStatement(cg, &stmtInfo,
5122 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5123 CG_OFFSET(cg));
5126 * Since an exception can be thrown at any place inside the try block,
5127 * we need to restore the stack and the scope chain before we transfer
5128 * the control to the exception handler.
5130 * For that we store in a try note associated with the catch or
5131 * finally block the stack depth upon the try entry. The interpreter
5132 * uses this depth to properly unwind the stack and the scope chain.
5134 depth = cg->stackDepth;
5136 /* Mark try location for decompilation, then emit try block. */
5137 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5138 return JS_FALSE;
5139 tryStart = CG_OFFSET(cg);
5140 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5141 return JS_FALSE;
5142 JS_ASSERT(depth == cg->stackDepth);
5144 /* GOSUB to finally, if present. */
5145 if (pn->pn_kid3) {
5146 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5147 return JS_FALSE;
5148 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5149 if (jmp < 0)
5150 return JS_FALSE;
5153 /* Emit (hidden) jump over catch and/or finally. */
5154 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5155 return JS_FALSE;
5156 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5157 if (jmp < 0)
5158 return JS_FALSE;
5160 tryEnd = CG_OFFSET(cg);
5162 /* If this try has a catch block, emit it. */
5163 pn2 = pn->pn_kid2;
5164 lastCatch = NULL;
5165 if (pn2) {
5166 jsint count = 0; /* previous catch block's population */
5169 * The emitted code for a catch block looks like:
5171 * [throwing] only if 2nd+ catch block
5172 * [leaveblock] only if 2nd+ catch block
5173 * enterblock with SRC_CATCH
5174 * exception
5175 * [dup] only if catchguard
5176 * setlocalpop <slot> or destructuring code
5177 * [< catchguard code >] if there's a catchguard
5178 * [ifeq <offset to next catch block>] " "
5179 * [pop] only if catchguard
5180 * < catch block contents >
5181 * leaveblock
5182 * goto <end of catch blocks> non-local; finally applies
5184 * If there's no catch block without a catchguard, the last
5185 * <offset to next catch block> points to rethrow code. This
5186 * code will [gosub] to the finally code if appropriate, and is
5187 * also used for the catch-all trynote for capturing exceptions
5188 * thrown from catch{} blocks.
5190 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5191 ptrdiff_t guardJump, catchNote;
5193 JS_ASSERT(cg->stackDepth == depth);
5194 guardJump = GUARDJUMP(stmtInfo);
5195 if (guardJump != -1) {
5196 /* Fix up and clean up previous catch block. */
5197 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5200 * Account for JSOP_ENTERBLOCK (whose block object count
5201 * is saved below) and pushed exception object that we
5202 * still have after the jumping from the previous guard.
5204 cg->stackDepth = depth + count + 1;
5207 * Move exception back to cx->exception to prepare for
5208 * the next catch. We hide [throwing] from the decompiler
5209 * since it compensates for the hidden JSOP_DUP at the
5210 * start of the previous guarded catch.
5212 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5213 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5214 return JS_FALSE;
5216 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5217 return JS_FALSE;
5218 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
5219 JS_ASSERT(cg->stackDepth == depth);
5223 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5224 * by the call to js_EmitTree immediately below. Save this
5225 * source note's index in stmtInfo for use by the TOK_CATCH:
5226 * case, where the length of the catch guard is set as the
5227 * note's offset.
5229 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5230 if (catchNote < 0)
5231 return JS_FALSE;
5232 CATCHNOTE(stmtInfo) = catchNote;
5235 * Emit the lexical scope and catch body. Save the catch's
5236 * block object population via count, for use when targeting
5237 * guardJump at the next catch (the guard mismatch case).
5239 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5240 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5241 if (!js_EmitTree(cx, cg, pn3))
5242 return JS_FALSE;
5244 /* gosub <finally>, if required */
5245 if (pn->pn_kid3) {
5246 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5247 &GOSUBS(stmtInfo));
5248 if (jmp < 0)
5249 return JS_FALSE;
5250 JS_ASSERT(cg->stackDepth == depth);
5254 * Jump over the remaining catch blocks. This will get fixed
5255 * up to jump to after catch/finally.
5257 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5258 return JS_FALSE;
5259 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5260 if (jmp < 0)
5261 return JS_FALSE;
5264 * Save a pointer to the last catch node to handle try-finally
5265 * and try-catch(guard)-finally special cases.
5267 lastCatch = pn3->expr();
5272 * Last catch guard jumps to the rethrow code sequence if none of the
5273 * guards match. Target guardJump at the beginning of the rethrow
5274 * sequence, just in case a guard expression throws and leaves the
5275 * stack unbalanced.
5277 if (lastCatch && lastCatch->pn_kid2) {
5278 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5280 /* Sync the stack to take into account pushed exception. */
5281 JS_ASSERT(cg->stackDepth == depth);
5282 cg->stackDepth = depth + 1;
5285 * Rethrow the exception, delegating executing of finally if any
5286 * to the exception handler.
5288 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5289 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5290 return JS_FALSE;
5294 JS_ASSERT(cg->stackDepth == depth);
5296 /* Emit finally handler if any. */
5297 finallyStart = 0; /* to quell GCC uninitialized warnings */
5298 if (pn->pn_kid3) {
5300 * Fix up the gosubs that might have been emitted before non-local
5301 * jumps to the finally code.
5303 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5304 return JS_FALSE;
5306 finallyStart = CG_OFFSET(cg);
5308 /* Indicate that we're emitting a subroutine body. */
5309 stmtInfo.type = STMT_SUBROUTINE;
5310 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5311 return JS_FALSE;
5312 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5313 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5314 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5315 return JS_FALSE;
5317 JS_ASSERT(cg->stackDepth == depth);
5319 if (!js_PopStatementCG(cx, cg))
5320 return JS_FALSE;
5322 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5323 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5324 return JS_FALSE;
5327 /* Fix up the end-of-try/catch jumps to come here. */
5328 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5329 return JS_FALSE;
5332 * Add the try note last, to let post-order give us the right ordering
5333 * (first to last for a given nesting level, inner to outer by level).
5335 if (pn->pn_kid2 &&
5336 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5337 return JS_FALSE;
5341 * If we've got a finally, mark try+catch region with additional
5342 * trynote to catch exceptions (re)thrown from a catch block or
5343 * for the try{}finally{} case.
5345 if (pn->pn_kid3 &&
5346 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5347 return JS_FALSE;
5349 break;
5352 case TOK_CATCH:
5354 ptrdiff_t catchStart, guardJump;
5355 JSObject *blockObj;
5358 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5359 * and save the block object atom.
5361 stmt = cg->topStmt;
5362 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5363 stmt->type = STMT_CATCH;
5364 catchStart = stmt->update;
5365 blockObj = stmt->blockObj;
5367 /* Go up one statement info record to the TRY or FINALLY record. */
5368 stmt = stmt->down;
5369 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5371 /* Pick up the pending exception and bind it to the catch variable. */
5372 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5373 return JS_FALSE;
5376 * Dup the exception object if there is a guard for rethrowing to use
5377 * it later when rethrowing or in other catches.
5379 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5380 return JS_FALSE;
5382 pn2 = pn->pn_kid1;
5383 switch (pn2->pn_type) {
5384 #if JS_HAS_DESTRUCTURING
5385 case TOK_RB:
5386 case TOK_RC:
5387 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5388 return JS_FALSE;
5389 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5390 return JS_FALSE;
5391 break;
5392 #endif
5394 case TOK_NAME:
5395 /* Inline and specialize BindNameToSlot for pn2. */
5396 JS_ASSERT(pn2->pn_cookie != FREE_UPVAR_COOKIE);
5397 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie);
5398 break;
5400 default:
5401 JS_ASSERT(0);
5404 /* Emit the guard expression, if there is one. */
5405 if (pn->pn_kid2) {
5406 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5407 return JS_FALSE;
5408 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5409 CG_OFFSET(cg) - catchStart)) {
5410 return JS_FALSE;
5412 /* ifeq <next block> */
5413 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5414 if (guardJump < 0)
5415 return JS_FALSE;
5416 GUARDJUMP(*stmt) = guardJump;
5418 /* Pop duplicated exception object as we no longer need it. */
5419 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5420 return JS_FALSE;
5423 /* Emit the catch body. */
5424 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5425 return JS_FALSE;
5428 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5429 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5431 off = cg->stackDepth;
5432 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5433 return JS_FALSE;
5434 break;
5437 case TOK_VAR:
5438 if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5439 return JS_FALSE;
5440 break;
5442 case TOK_RETURN:
5443 /* Push a return value */
5444 pn2 = pn->pn_kid;
5445 if (pn2) {
5446 if (!js_EmitTree(cx, cg, pn2))
5447 return JS_FALSE;
5448 } else {
5449 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5450 return JS_FALSE;
5454 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5455 * blocks having finally clauses and to exit intermingled let blocks.
5456 * We can't simply transfer control flow to our caller in that case,
5457 * because we must gosub to those finally clauses from inner to outer,
5458 * with the correct stack pointer (i.e., after popping any with,
5459 * for/in, etc., slots nested inside the finally's try).
5461 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5462 * extra JSOP_RETRVAL after the fixups.
5464 top = CG_OFFSET(cg);
5465 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5466 return JS_FALSE;
5467 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5468 return JS_FALSE;
5469 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5470 CG_BASE(cg)[top] = JSOP_SETRVAL;
5471 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5472 return JS_FALSE;
5474 break;
5476 #if JS_HAS_GENERATORS
5477 case TOK_YIELD:
5478 if (!cg->inFunction()) {
5479 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5480 JSMSG_BAD_RETURN_OR_YIELD,
5481 js_yield_str);
5482 return JS_FALSE;
5484 if (pn->pn_kid) {
5485 if (!js_EmitTree(cx, cg, pn->pn_kid))
5486 return JS_FALSE;
5487 } else {
5488 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5489 return JS_FALSE;
5491 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5492 return JS_FALSE;
5493 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5494 return JS_FALSE;
5495 break;
5496 #endif
5498 case TOK_LC:
5500 #if JS_HAS_XML_SUPPORT
5501 if (pn->pn_arity == PN_UNARY) {
5502 if (!js_EmitTree(cx, cg, pn->pn_kid))
5503 return JS_FALSE;
5504 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5505 return JS_FALSE;
5506 break;
5508 #endif
5510 JS_ASSERT(pn->pn_arity == PN_LIST);
5512 noteIndex = -1;
5513 tmp = CG_OFFSET(cg);
5514 if (pn->pn_xflags & PNX_NEEDBRACES) {
5515 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5516 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5517 return JS_FALSE;
5520 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5522 JSParseNode *pnchild = pn->pn_head;
5523 if (pn->pn_xflags & PNX_FUNCDEFS) {
5525 * This block contains top-level function definitions. To ensure
5526 * that we emit the bytecode defining them before the rest of code
5527 * in the block we use a separate pass over functions. During the
5528 * main pass later the emitter will add JSOP_NOP with source notes
5529 * for the function to preserve the original functions position
5530 * when decompiling.
5532 * Currently this is used only for functions, as compile-as-we go
5533 * mode for scripts does not allow separate emitter passes.
5535 JS_ASSERT(cg->inFunction());
5536 if (pn->pn_xflags & PNX_DESTRUCT) {
5538 * Assign the destructuring arguments before defining any
5539 * functions, see bug 419662.
5541 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5542 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_COMMA);
5543 if (!js_EmitTree(cx, cg, pnchild))
5544 return JS_FALSE;
5545 pnchild = pnchild->pn_next;
5548 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5549 if (pn2->pn_type == TOK_FUNCTION) {
5550 if (pn2->pn_op == JSOP_NOP) {
5551 if (!js_EmitTree(cx, cg, pn2))
5552 return JS_FALSE;
5553 } else {
5555 * JSOP_DEFFUN in a top-level block with function
5556 * definitions appears, for example, when "if (true)"
5557 * is optimized away from "if (true) function x() {}".
5558 * See bug 428424.
5560 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5565 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5566 if (!js_EmitTree(cx, cg, pn2))
5567 return JS_FALSE;
5570 if (noteIndex >= 0 &&
5571 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5572 CG_OFFSET(cg) - tmp)) {
5573 return JS_FALSE;
5576 ok = js_PopStatementCG(cx, cg);
5577 break;
5580 case TOK_SEQ:
5581 JS_ASSERT(pn->pn_arity == PN_LIST);
5582 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5583 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5584 if (!js_EmitTree(cx, cg, pn2))
5585 return JS_FALSE;
5587 ok = js_PopStatementCG(cx, cg);
5588 break;
5590 case TOK_SEMI:
5591 pn2 = pn->pn_kid;
5592 if (pn2) {
5594 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5595 * debugger, and eval frames may need the value of the ultimate
5596 * expression statement as the script's result, despite the fact
5597 * that it appears useless to the compiler.
5599 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5600 * calling JS_Compile* to suppress JSOP_POPV.
5602 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5603 if (!useful) {
5604 if (!CheckSideEffects(cx, cg, pn2, &useful))
5605 return JS_FALSE;
5609 * Don't eliminate apparently useless expressions if they are
5610 * labeled expression statements. The tc->topStmt->update test
5611 * catches the case where we are nesting in js_EmitTree for a
5612 * labeled compound statement.
5614 if (!useful &&
5615 (!cg->topStmt ||
5616 cg->topStmt->type != STMT_LABEL ||
5617 cg->topStmt->update < CG_OFFSET(cg))) {
5618 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5619 if (!ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5620 JSREPORT_WARNING | JSREPORT_STRICT,
5621 JSMSG_USELESS_EXPR)) {
5622 return JS_FALSE;
5624 } else {
5625 op = wantval ? JSOP_POPV : JSOP_POP;
5626 #if JS_HAS_DESTRUCTURING
5627 if (!wantval &&
5628 pn2->pn_type == TOK_ASSIGN &&
5629 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5630 return JS_FALSE;
5632 #endif
5633 if (op != JSOP_NOP) {
5635 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5636 * avoid null closure cloning. Do this only for assignment
5637 * statements that are not completion values wanted by a
5638 * script evaluator, to ensure that the joined function
5639 * can't escape directly.
5641 if (!wantval &&
5642 PN_TYPE(pn2) == TOK_ASSIGN &&
5643 PN_OP(pn2) == JSOP_NOP &&
5644 PN_OP(pn2->pn_left) == JSOP_SETPROP &&
5645 PN_OP(pn2->pn_right) == JSOP_LAMBDA &&
5646 pn2->pn_right->pn_funbox->joinable()) {
5647 pn2->pn_left->pn_op = JSOP_SETMETHOD;
5649 if (!js_EmitTree(cx, cg, pn2))
5650 return JS_FALSE;
5651 if (js_Emit1(cx, cg, op) < 0)
5652 return JS_FALSE;
5656 break;
5658 case TOK_COLON:
5659 /* Emit an annotated nop so we know to decompile a label. */
5660 atom = pn->pn_atom;
5661 ale = cg->atomList.add(cg->parser, atom);
5662 if (!ale)
5663 return JS_FALSE;
5664 pn2 = pn->expr();
5665 noteType = (pn2->pn_type == TOK_LC ||
5666 (pn2->pn_type == TOK_LEXICALSCOPE &&
5667 pn2->expr()->pn_type == TOK_LC))
5668 ? SRC_LABELBRACE
5669 : SRC_LABEL;
5670 noteIndex = js_NewSrcNote2(cx, cg, noteType,
5671 (ptrdiff_t) ALE_INDEX(ale));
5672 if (noteIndex < 0 ||
5673 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5674 return JS_FALSE;
5677 /* Emit code for the labeled statement. */
5678 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
5679 stmtInfo.label = atom;
5680 if (!js_EmitTree(cx, cg, pn2))
5681 return JS_FALSE;
5682 if (!js_PopStatementCG(cx, cg))
5683 return JS_FALSE;
5685 /* If the statement was compound, emit a note for the end brace. */
5686 if (noteType == SRC_LABELBRACE) {
5687 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5688 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5689 return JS_FALSE;
5692 break;
5694 case TOK_COMMA:
5696 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5697 * These notes help the decompiler bracket the bytecodes generated
5698 * from each sub-expression that follows a comma.
5700 off = noteIndex = -1;
5701 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5702 if (!js_EmitTree(cx, cg, pn2))
5703 return JS_FALSE;
5704 tmp = CG_OFFSET(cg);
5705 if (noteIndex >= 0) {
5706 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5707 return JS_FALSE;
5709 if (!pn2->pn_next)
5710 break;
5711 off = tmp;
5712 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5713 if (noteIndex < 0 ||
5714 js_Emit1(cx, cg, JSOP_POP) < 0) {
5715 return JS_FALSE;
5718 break;
5720 case TOK_ASSIGN:
5722 * Check left operand type and generate specialized code for it.
5723 * Specialize to avoid ECMA "reference type" values on the operand
5724 * stack, which impose pervasive runtime "GetValue" costs.
5726 pn2 = pn->pn_left;
5727 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
5728 switch (PN_TYPE(pn2)) {
5729 case TOK_NAME:
5730 if (!BindNameToSlot(cx, cg, pn2))
5731 return JS_FALSE;
5732 if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
5733 atomIndex = (jsatomid) pn2->pn_cookie;
5734 } else {
5735 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5736 if (!ale)
5737 return JS_FALSE;
5738 atomIndex = ALE_INDEX(ale);
5739 if (!pn2->isConst())
5740 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
5742 break;
5743 case TOK_DOT:
5744 if (!js_EmitTree(cx, cg, pn2->expr()))
5745 return JS_FALSE;
5746 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
5747 if (!ale)
5748 return JS_FALSE;
5749 atomIndex = ALE_INDEX(ale);
5750 break;
5751 case TOK_LB:
5752 JS_ASSERT(pn2->pn_arity == PN_BINARY);
5753 if (!js_EmitTree(cx, cg, pn2->pn_left))
5754 return JS_FALSE;
5755 if (!js_EmitTree(cx, cg, pn2->pn_right))
5756 return JS_FALSE;
5757 break;
5758 #if JS_HAS_DESTRUCTURING
5759 case TOK_RB:
5760 case TOK_RC:
5761 break;
5762 #endif
5763 case TOK_LP:
5764 if (!js_EmitTree(cx, cg, pn2))
5765 return JS_FALSE;
5766 break;
5767 #if JS_HAS_XML_SUPPORT
5768 case TOK_UNARYOP:
5769 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5770 if (!js_EmitTree(cx, cg, pn2->pn_kid))
5771 return JS_FALSE;
5772 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5773 return JS_FALSE;
5774 break;
5775 #endif
5776 default:
5777 JS_ASSERT(0);
5780 op = PN_OP(pn);
5781 if (op != JSOP_NOP) {
5782 switch (pn2->pn_type) {
5783 case TOK_NAME:
5784 if (pn2->isConst()) {
5785 if (PN_OP(pn2) == JSOP_CALLEE) {
5786 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
5787 return JS_FALSE;
5788 } else {
5789 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5791 } else if (PN_OP(pn2) == JSOP_SETNAME) {
5792 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5793 return JS_FALSE;
5794 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
5795 } else {
5796 JS_ASSERT(PN_OP(pn2) != JSOP_GETUPVAR);
5797 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
5798 ? JSOP_GETGVAR
5799 : (PN_OP(pn2) == JSOP_SETARG)
5800 ? JSOP_GETARG
5801 : JSOP_GETLOCAL,
5802 atomIndex);
5804 break;
5805 case TOK_DOT:
5806 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5807 return JS_FALSE;
5808 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
5809 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
5810 return JS_FALSE;
5811 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
5812 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
5813 return JS_FALSE;
5814 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5815 return JS_FALSE;
5816 } else {
5817 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
5819 break;
5820 case TOK_LB:
5821 case TOK_LP:
5822 #if JS_HAS_XML_SUPPORT
5823 case TOK_UNARYOP:
5824 #endif
5825 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5826 return JS_FALSE;
5827 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5828 return JS_FALSE;
5829 break;
5830 default:;
5834 /* Now emit the right operand (it may affect the namespace). */
5835 if (!js_EmitTree(cx, cg, pn->pn_right))
5836 return JS_FALSE;
5838 /* If += etc., emit the binary operator with a decompiler note. */
5839 if (op != JSOP_NOP) {
5841 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
5842 * declared in the current compilation unit, as in this case (just
5843 * a bit further below) we will avoid emitting the assignment op.
5845 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
5846 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5847 return JS_FALSE;
5849 if (js_Emit1(cx, cg, op) < 0)
5850 return JS_FALSE;
5853 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5854 if (pn2->pn_type != TOK_NAME &&
5855 #if JS_HAS_DESTRUCTURING
5856 pn2->pn_type != TOK_RB &&
5857 pn2->pn_type != TOK_RC &&
5858 #endif
5859 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
5860 return JS_FALSE;
5863 /* Finally, emit the specialized assignment bytecode. */
5864 switch (pn2->pn_type) {
5865 case TOK_NAME:
5866 if (pn2->isConst())
5867 break;
5868 /* FALL THROUGH */
5869 case TOK_DOT:
5870 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
5871 break;
5872 case TOK_LB:
5873 case TOK_LP:
5874 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5875 return JS_FALSE;
5876 break;
5877 #if JS_HAS_DESTRUCTURING
5878 case TOK_RB:
5879 case TOK_RC:
5880 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5881 return JS_FALSE;
5882 break;
5883 #endif
5884 #if JS_HAS_XML_SUPPORT
5885 case TOK_UNARYOP:
5886 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5887 return JS_FALSE;
5888 break;
5889 #endif
5890 default:
5891 JS_ASSERT(0);
5893 break;
5895 case TOK_HOOK:
5896 /* Emit the condition, then branch if false to the else part. */
5897 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5898 return JS_FALSE;
5899 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5900 if (noteIndex < 0)
5901 return JS_FALSE;
5902 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5903 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5904 return JS_FALSE;
5906 /* Jump around else, fixup the branch, emit else, fixup jump. */
5907 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5908 if (jmp < 0)
5909 return JS_FALSE;
5910 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5913 * Because each branch pushes a single value, but our stack budgeting
5914 * analysis ignores branches, we now have to adjust cg->stackDepth to
5915 * ignore the value pushed by the first branch. Execution will follow
5916 * only one path, so we must decrement cg->stackDepth.
5918 * Failing to do this will foil code, such as the try/catch/finally
5919 * exception handling code generator, that samples cg->stackDepth for
5920 * use at runtime (JSOP_SETSP), or in let expression and block code
5921 * generation, which must use the stack depth to compute local stack
5922 * indexes correctly.
5924 JS_ASSERT(cg->stackDepth > 0);
5925 cg->stackDepth--;
5926 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5927 return JS_FALSE;
5928 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5929 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5930 return JS_FALSE;
5931 break;
5933 case TOK_OR:
5934 case TOK_AND:
5936 * JSOP_OR converts the operand on the stack to boolean, and if true,
5937 * leaves the original operand value on the stack and jumps; otherwise
5938 * it pops and falls into the next bytecode, which evaluates the right
5939 * operand. The jump goes around the right operand evaluation.
5941 * JSOP_AND converts the operand on the stack to boolean, and if false,
5942 * leaves the original operand value on the stack and jumps; otherwise
5943 * it pops and falls into the right operand's bytecode.
5945 if (pn->pn_arity == PN_BINARY) {
5946 if (!js_EmitTree(cx, cg, pn->pn_left))
5947 return JS_FALSE;
5948 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5949 if (top < 0)
5950 return JS_FALSE;
5951 if (!js_EmitTree(cx, cg, pn->pn_right))
5952 return JS_FALSE;
5953 off = CG_OFFSET(cg);
5954 pc = CG_CODE(cg, top);
5955 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5956 *pc = pn->pn_op;
5957 } else {
5958 JS_ASSERT(pn->pn_arity == PN_LIST);
5959 JS_ASSERT(pn->pn_head->pn_next->pn_next);
5961 /* Left-associative operator chain: avoid too much recursion. */
5962 pn2 = pn->pn_head;
5963 if (!js_EmitTree(cx, cg, pn2))
5964 return JS_FALSE;
5965 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5966 if (top < 0)
5967 return JS_FALSE;
5969 /* Emit nodes between the head and the tail. */
5970 jmp = top;
5971 while ((pn2 = pn2->pn_next)->pn_next) {
5972 if (!js_EmitTree(cx, cg, pn2))
5973 return JS_FALSE;
5974 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5975 if (off < 0)
5976 return JS_FALSE;
5977 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5978 return JS_FALSE;
5979 jmp = off;
5982 if (!js_EmitTree(cx, cg, pn2))
5983 return JS_FALSE;
5985 pn2 = pn->pn_head;
5986 off = CG_OFFSET(cg);
5987 do {
5988 pc = CG_CODE(cg, top);
5989 tmp = GetJumpOffset(cg, pc);
5990 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5991 *pc = pn->pn_op;
5992 top += tmp;
5993 } while ((pn2 = pn2->pn_next)->pn_next);
5995 break;
5997 case TOK_PLUS:
5998 /* For TCF_IN_FUNCTION test, see TOK_RB concerning JSOP_NEWARRAY. */
5999 if (pn->pn_arity == PN_LIST && pn->pn_count < JS_BIT(16) &&
6000 cg->inFunction()) {
6001 /* Emit up to the first string literal conventionally. */
6002 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6003 if (pn2->pn_type == TOK_STRING)
6004 break;
6005 if (!js_EmitTree(cx, cg, pn2))
6006 return JS_FALSE;
6007 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6008 return JS_FALSE;
6011 if (!pn2)
6012 break;
6015 * Having seen a string literal, we know statically that the rest
6016 * of the additions are string concatenation, so we emit them as a
6017 * single concatn. First, do string conversion on the result of the
6018 * preceding zero or more additions so that any side effects of
6019 * string conversion occur before the next operand begins.
6021 if (pn2 == pn->pn_head) {
6022 index = 0;
6023 } else {
6024 if (!js_Emit1(cx, cg, JSOP_OBJTOSTR))
6025 return JS_FALSE;
6026 index = 1;
6029 for (; pn2; pn2 = pn2->pn_next, index++) {
6030 if (!js_EmitTree(cx, cg, pn2))
6031 return JS_FALSE;
6032 if (!pn2->isLiteral() && js_Emit1(cx, cg, JSOP_OBJTOSTR) < 0)
6033 return JS_FALSE;
6036 EMIT_UINT16_IMM_OP(JSOP_CONCATN, index);
6037 break;
6039 case TOK_BITOR:
6040 case TOK_BITXOR:
6041 case TOK_BITAND:
6042 case TOK_EQOP:
6043 case TOK_RELOP:
6044 case TOK_IN:
6045 case TOK_INSTANCEOF:
6046 case TOK_SHOP:
6047 case TOK_MINUS:
6048 case TOK_STAR:
6049 case TOK_DIVOP:
6050 if (pn->pn_arity == PN_LIST) {
6051 /* Left-associative operator chain: avoid too much recursion. */
6052 pn2 = pn->pn_head;
6053 if (!js_EmitTree(cx, cg, pn2))
6054 return JS_FALSE;
6055 op = PN_OP(pn);
6056 while ((pn2 = pn2->pn_next) != NULL) {
6057 if (!js_EmitTree(cx, cg, pn2))
6058 return JS_FALSE;
6059 if (js_Emit1(cx, cg, op) < 0)
6060 return JS_FALSE;
6062 } else {
6063 #if JS_HAS_XML_SUPPORT
6064 uintN oldflags;
6066 case TOK_DBLCOLON:
6067 if (pn->pn_arity == PN_NAME) {
6068 if (!js_EmitTree(cx, cg, pn->expr()))
6069 return JS_FALSE;
6070 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
6071 return JS_FALSE;
6072 break;
6076 * Binary :: has a right operand that brackets arbitrary code,
6077 * possibly including a let (a = b) ... expression. We must clear
6078 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6080 oldflags = cg->flags;
6081 cg->flags &= ~TCF_IN_FOR_INIT;
6082 #endif
6084 /* Binary operators that evaluate both operands unconditionally. */
6085 if (!js_EmitTree(cx, cg, pn->pn_left))
6086 return JS_FALSE;
6087 if (!js_EmitTree(cx, cg, pn->pn_right))
6088 return JS_FALSE;
6089 #if JS_HAS_XML_SUPPORT
6090 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6091 #endif
6092 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6093 return JS_FALSE;
6095 break;
6097 case TOK_THROW:
6098 #if JS_HAS_XML_SUPPORT
6099 case TOK_AT:
6100 case TOK_DEFAULT:
6101 JS_ASSERT(pn->pn_arity == PN_UNARY);
6102 /* FALL THROUGH */
6103 #endif
6104 case TOK_UNARYOP:
6106 uintN oldflags;
6108 /* Unary op, including unary +/-. */
6109 op = PN_OP(pn);
6110 #if JS_HAS_XML_SUPPORT
6111 if (op == JSOP_XMLNAME) {
6112 if (!EmitXMLName(cx, pn, op, cg))
6113 return JS_FALSE;
6114 break;
6116 #endif
6117 pn2 = pn->pn_kid;
6119 if (op == JSOP_TYPEOF && pn2->pn_type != TOK_NAME)
6120 op = JSOP_TYPEOFEXPR;
6122 oldflags = cg->flags;
6123 cg->flags &= ~TCF_IN_FOR_INIT;
6124 if (!js_EmitTree(cx, cg, pn2))
6125 return JS_FALSE;
6126 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6127 if (js_Emit1(cx, cg, op) < 0)
6128 return JS_FALSE;
6129 break;
6132 case TOK_INC:
6133 case TOK_DEC:
6134 /* Emit lvalue-specialized code for ++/-- operators. */
6135 pn2 = pn->pn_kid;
6136 JS_ASSERT(pn2->pn_type != TOK_RP);
6137 op = PN_OP(pn);
6138 switch (pn2->pn_type) {
6139 default:
6140 JS_ASSERT(pn2->pn_type == TOK_NAME);
6141 pn2->pn_op = op;
6142 if (!BindNameToSlot(cx, cg, pn2))
6143 return JS_FALSE;
6144 op = PN_OP(pn2);
6145 if (op == JSOP_CALLEE) {
6146 if (js_Emit1(cx, cg, op) < 0)
6147 return JS_FALSE;
6148 } else if (pn2->pn_cookie != FREE_UPVAR_COOKIE) {
6149 atomIndex = (jsatomid) pn2->pn_cookie;
6150 EMIT_UINT16_IMM_OP(op, atomIndex);
6151 } else {
6152 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6153 if (!EmitAtomOp(cx, pn2, op, cg))
6154 return JS_FALSE;
6155 break;
6157 if (pn2->isConst()) {
6158 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6159 return JS_FALSE;
6160 op = PN_OP(pn);
6161 if (!(js_CodeSpec[op].format & JOF_POST)) {
6162 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6163 return JS_FALSE;
6164 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6165 if (js_Emit1(cx, cg, op) < 0)
6166 return JS_FALSE;
6169 break;
6170 case TOK_DOT:
6171 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6172 return JS_FALSE;
6173 break;
6174 case TOK_LB:
6175 if (!EmitElemOp(cx, pn2, op, cg))
6176 return JS_FALSE;
6177 break;
6178 case TOK_LP:
6179 if (!js_EmitTree(cx, cg, pn2))
6180 return JS_FALSE;
6181 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6182 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6183 return JS_FALSE;
6185 if (js_Emit1(cx, cg, op) < 0)
6186 return JS_FALSE;
6187 break;
6188 #if JS_HAS_XML_SUPPORT
6189 case TOK_UNARYOP:
6190 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6191 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6192 return JS_FALSE;
6193 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6194 return JS_FALSE;
6195 if (js_Emit1(cx, cg, op) < 0)
6196 return JS_FALSE;
6197 break;
6198 #endif
6200 break;
6202 case TOK_DELETE:
6204 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6205 * must evaluate the operand if it appears it might have side effects.
6207 pn2 = pn->pn_kid;
6208 switch (pn2->pn_type) {
6209 case TOK_NAME:
6210 if (!BindNameToSlot(cx, cg, pn2))
6211 return JS_FALSE;
6212 op = PN_OP(pn2);
6213 if (op == JSOP_FALSE) {
6214 if (js_Emit1(cx, cg, op) < 0)
6215 return JS_FALSE;
6216 } else {
6217 if (!EmitAtomOp(cx, pn2, op, cg))
6218 return JS_FALSE;
6220 break;
6221 case TOK_DOT:
6222 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6223 return JS_FALSE;
6224 break;
6225 #if JS_HAS_XML_SUPPORT
6226 case TOK_DBLDOT:
6227 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6228 return JS_FALSE;
6229 break;
6230 #endif
6231 case TOK_LB:
6232 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6233 return JS_FALSE;
6234 break;
6235 default:
6237 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6238 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6240 useful = JS_FALSE;
6241 if (!CheckSideEffects(cx, cg, pn2, &useful))
6242 return JS_FALSE;
6243 if (!useful) {
6244 off = noteIndex = -1;
6245 } else {
6246 if (pn2->pn_op == JSOP_SETCALL)
6247 pn2->pn_op = JSOP_CALL;
6248 if (!js_EmitTree(cx, cg, pn2))
6249 return JS_FALSE;
6250 off = CG_OFFSET(cg);
6251 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6252 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6253 return JS_FALSE;
6255 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6256 return JS_FALSE;
6257 if (noteIndex >= 0) {
6258 tmp = CG_OFFSET(cg);
6259 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6260 return JS_FALSE;
6263 break;
6265 #if JS_HAS_XML_SUPPORT
6266 case TOK_FILTER:
6267 if (!js_EmitTree(cx, cg, pn->pn_left))
6268 return JS_FALSE;
6269 jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
6270 if (jmp < 0)
6271 return JS_FALSE;
6272 top = js_Emit1(cx, cg, JSOP_TRACE);
6273 if (top < 0)
6274 return JS_FALSE;
6275 if (!js_EmitTree(cx, cg, pn->pn_right))
6276 return JS_FALSE;
6277 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6278 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6279 return JS_FALSE;
6280 break;
6281 #endif
6283 case TOK_DOT:
6285 * Pop a stack operand, convert it to object, get a property named by
6286 * this bytecode's immediate-indexed atom operand, and push its value
6287 * (not a reference to it).
6289 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6290 break;
6292 case TOK_LB:
6293 #if JS_HAS_XML_SUPPORT
6294 case TOK_DBLDOT:
6295 #endif
6297 * Pop two operands, convert the left one to object and the right one
6298 * to property name (atom or tagged int), get the named property, and
6299 * push its value. Set the "obj" register to the result of ToObject
6300 * on the left operand.
6302 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6303 break;
6305 case TOK_NEW:
6306 case TOK_LP:
6308 bool callop = (PN_TYPE(pn) == TOK_LP);
6311 * Emit callable invocation or operator new (constructor call) code.
6312 * First, emit code for the left operand to evaluate the callable or
6313 * constructable object expression.
6315 * For operator new applied to other expressions than E4X ones, we emit
6316 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6317 * interpose the lambda-initialized method read barrier -- see the code
6318 * in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6320 * Then (or in a call case that has no explicit reference-base object)
6321 * we emit JSOP_NULL as a placeholder local GC root to hold the |this|
6322 * parameter: in the operator new case, the newborn instance; in the
6323 * base-less call case, a cookie meaning "use the global object as the
6324 * |this| value" (or in ES5 strict mode, "use undefined", so we should
6325 * use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
6327 pn2 = pn->pn_head;
6328 switch (pn2->pn_type) {
6329 case TOK_NAME:
6330 if (!EmitNameOp(cx, cg, pn2, callop))
6331 return JS_FALSE;
6332 break;
6333 case TOK_DOT:
6334 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
6335 return JS_FALSE;
6336 break;
6337 case TOK_LB:
6338 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6339 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
6340 return JS_FALSE;
6341 break;
6342 case TOK_UNARYOP:
6343 #if JS_HAS_XML_SUPPORT
6344 if (pn2->pn_op == JSOP_XMLNAME) {
6345 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6346 return JS_FALSE;
6347 callop = true; /* suppress JSOP_NULL after */
6348 break;
6350 #endif
6351 /* FALL THROUGH */
6352 default:
6354 * Push null as a placeholder for the global object, per ECMA-262
6355 * 11.2.3 step 6.
6357 if (!js_EmitTree(cx, cg, pn2))
6358 return JS_FALSE;
6359 callop = false; /* trigger JSOP_NULL after */
6360 break;
6362 if (!callop && js_Emit1(cx, cg, JSOP_NULL) < 0)
6363 return JS_FALSE;
6365 /* Remember start of callable-object bytecode for decompilation hint. */
6366 off = top;
6369 * Emit code for each argument in order, then emit the JSOP_*CALL or
6370 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6371 * were pushed on the operand stack.
6373 uintN oldflags = cg->flags;
6374 cg->flags &= ~TCF_IN_FOR_INIT;
6375 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6376 if (!js_EmitTree(cx, cg, pn3))
6377 return JS_FALSE;
6379 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6380 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6381 return JS_FALSE;
6383 argc = pn->pn_count - 1;
6384 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6385 return JS_FALSE;
6386 if (PN_OP(pn) == JSOP_CALL) {
6387 /* Add a trace hint opcode for recursion. */
6388 if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
6389 return JS_FALSE;
6391 if (PN_OP(pn) == JSOP_EVAL)
6392 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6393 break;
6396 case TOK_LEXICALSCOPE:
6398 JSObjectBox *objbox;
6399 uintN count;
6401 objbox = pn->pn_objbox;
6402 js_PushBlockScope(cg, &stmtInfo, objbox->object, CG_OFFSET(cg));
6405 * If this lexical scope is not for a catch block, let block or let
6406 * expression, or any kind of for loop (where the scope starts in the
6407 * head after the first part if for (;;), else in the body if for-in);
6408 * and if our container is top-level but not a function body, or else
6409 * a block statement; then emit a SRC_BRACE note. All other container
6410 * statements get braces by default from the decompiler.
6412 noteIndex = -1;
6413 type = PN_TYPE(pn->expr());
6414 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6415 (!(stmt = stmtInfo.down)
6416 ? !cg->inFunction()
6417 : stmt->type == STMT_BLOCK)) {
6418 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6419 /* There must be no source note already output for the next op. */
6420 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6421 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6422 !GettableNoteForNextOp(cg));
6423 #endif
6424 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6425 if (noteIndex < 0)
6426 return JS_FALSE;
6429 JS_ASSERT(CG_OFFSET(cg) == top);
6430 if (!EmitEnterBlock(cx, pn, cg))
6431 return JS_FALSE;
6433 if (!js_EmitTree(cx, cg, pn->pn_expr))
6434 return JS_FALSE;
6436 op = PN_OP(pn);
6437 if (op == JSOP_LEAVEBLOCKEXPR) {
6438 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6439 return JS_FALSE;
6440 } else {
6441 if (noteIndex >= 0 &&
6442 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6443 CG_OFFSET(cg) - top)) {
6444 return JS_FALSE;
6448 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6449 count = OBJ_BLOCK_COUNT(cx, objbox->object);
6450 EMIT_UINT16_IMM_OP(op, count);
6452 ok = js_PopStatementCG(cx, cg);
6453 break;
6456 #if JS_HAS_BLOCK_SCOPE
6457 case TOK_LET:
6458 /* Let statements have their variable declarations on the left. */
6459 if (pn->pn_arity == PN_BINARY) {
6460 pn2 = pn->pn_right;
6461 pn = pn->pn_left;
6462 } else {
6463 pn2 = NULL;
6466 /* Non-null pn2 means that pn is the variable list from a let head. */
6467 JS_ASSERT(pn->pn_arity == PN_LIST);
6468 if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
6469 return JS_FALSE;
6471 /* Thus non-null pn2 is the body of the let block or expression. */
6472 tmp = CG_OFFSET(cg);
6473 if (pn2 && !js_EmitTree(cx, cg, pn2))
6474 return JS_FALSE;
6476 if (noteIndex >= 0 &&
6477 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6478 CG_OFFSET(cg) - tmp)) {
6479 return JS_FALSE;
6481 break;
6482 #endif /* JS_HAS_BLOCK_SCOPE */
6484 #if JS_HAS_GENERATORS
6485 case TOK_ARRAYPUSH: {
6486 jsint slot;
6489 * The array object's stack index is in cg->arrayCompDepth. See below
6490 * under the array initialiser code generator for array comprehension
6491 * special casing.
6493 if (!js_EmitTree(cx, cg, pn->pn_kid))
6494 return JS_FALSE;
6495 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6496 if (slot < 0)
6497 return JS_FALSE;
6498 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6499 break;
6501 #endif
6503 case TOK_RB:
6504 #if JS_HAS_GENERATORS
6505 case TOK_ARRAYCOMP:
6506 #endif
6508 * Emit code for [a, b, c] that is equivalent to constructing a new
6509 * array and in source order evaluating each element value and adding
6510 * it to the array, without invoking latent setters. We use the
6511 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6512 * avoid dup'ing and popping the array as each element is added, as
6513 * JSOP_SETELEM/JSOP_SETPROP would do.
6515 * If no sharp variable is defined, the initializer is not for an array
6516 * comprehension, the initializer is not overlarge, and the initializer
6517 * is not in global code (whose stack growth cannot be precisely modeled
6518 * due to the need to reserve space for global variables and regular
6519 * expressions), use JSOP_NEWARRAY to minimize opcodes and to create the
6520 * array using a fast, all-at-once process rather than a slow, element-
6521 * by-element process.
6523 #if JS_HAS_SHARP_VARS
6524 sharpnum = -1;
6525 do_emit_array:
6526 #endif
6528 op = (JS_LIKELY(pn->pn_count < JS_BIT(16)) && cg->inFunction())
6529 ? JSOP_NEWARRAY
6530 : JSOP_NEWINIT;
6532 #if JS_HAS_GENERATORS
6533 if (pn->pn_type == TOK_ARRAYCOMP)
6534 op = JSOP_NEWINIT;
6535 #endif
6536 #if JS_HAS_SHARP_VARS
6537 JS_ASSERT_IF(sharpnum >= 0, cg->hasSharps());
6538 if (cg->hasSharps())
6539 op = JSOP_NEWINIT;
6540 #endif
6542 if (op == JSOP_NEWINIT && !EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6543 return JS_FALSE;
6545 #if JS_HAS_GENERATORS
6546 if (pn->pn_type == TOK_ARRAYCOMP) {
6547 uintN saveDepth;
6550 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6551 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6552 * its kids under pn2 to generate this comprehension.
6554 JS_ASSERT(cg->stackDepth > 0);
6555 saveDepth = cg->arrayCompDepth;
6556 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6557 if (!js_EmitTree(cx, cg, pn->pn_head))
6558 return JS_FALSE;
6559 cg->arrayCompDepth = saveDepth;
6561 /* Emit the usual op needed for decompilation. */
6562 if (!EmitEndInit(cx, cg, 1))
6563 return JS_FALSE;
6564 break;
6566 #endif /* JS_HAS_GENERATORS */
6568 pn2 = pn->pn_head;
6569 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6570 if (op == JSOP_NEWINIT && !EmitNumberOp(cx, atomIndex, cg))
6571 return JS_FALSE;
6572 if (pn2->pn_type == TOK_COMMA && pn2->pn_arity == PN_NULLARY) {
6573 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6574 return JS_FALSE;
6575 } else {
6576 if (!js_EmitTree(cx, cg, pn2))
6577 return JS_FALSE;
6579 if (op == JSOP_NEWINIT && js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6580 return JS_FALSE;
6582 JS_ASSERT(atomIndex == pn->pn_count);
6584 if (pn->pn_xflags & PNX_ENDCOMMA) {
6585 /* Emit a source note so we know to decompile an extra comma. */
6586 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6587 return JS_FALSE;
6590 if (op == JSOP_NEWINIT) {
6592 * Emit an op to finish the array and, secondarily, to aid in sharp
6593 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6595 if (!EmitEndInit(cx, cg, atomIndex))
6596 return JS_FALSE;
6597 break;
6600 JS_ASSERT(atomIndex < JS_BIT(16));
6601 EMIT_UINT16_IMM_OP(JSOP_NEWARRAY, atomIndex);
6602 break;
6604 case TOK_RC: {
6605 #if JS_HAS_SHARP_VARS
6606 sharpnum = -1;
6607 do_emit_object:
6608 #endif
6609 #if JS_HAS_DESTRUCTURING_SHORTHAND
6610 if (pn->pn_xflags & PNX_DESTRUCT) {
6611 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
6612 return JS_FALSE;
6614 #endif
6616 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6617 * a new object and in source order evaluating each property value and
6618 * adding the property to the object, without invoking latent setters.
6619 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6620 * ignore setters and to avoid dup'ing and popping the object as each
6621 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6623 if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
6624 return JS_FALSE;
6626 uintN methodInits = 0, slowMethodInits = 0;
6627 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6628 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6629 pn3 = pn2->pn_left;
6630 if (pn3->pn_type == TOK_NUMBER) {
6631 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6632 return JS_FALSE;
6635 /* Emit code for the property initializer. */
6636 if (!js_EmitTree(cx, cg, pn2->pn_right))
6637 return JS_FALSE;
6639 op = PN_OP(pn2);
6640 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6641 if (js_Emit1(cx, cg, op) < 0)
6642 return JS_FALSE;
6645 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6646 if (pn3->pn_type == TOK_NUMBER) {
6647 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6648 return JS_FALSE;
6649 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6650 return JS_FALSE;
6651 } else {
6652 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6653 pn3->pn_type == TOK_STRING);
6654 ale = cg->atomList.add(cg->parser, pn3->pn_atom);
6655 if (!ale)
6656 return JS_FALSE;
6658 /* Check whether we can optimize to JSOP_INITMETHOD. */
6659 JSParseNode *init = pn2->pn_right;
6660 bool lambda = PN_OP(init) == JSOP_LAMBDA;
6661 if (lambda)
6662 ++methodInits;
6663 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable())
6665 op = JSOP_INITMETHOD;
6666 pn2->pn_op = uint8(op);
6667 } else {
6668 op = JSOP_INITPROP;
6669 if (lambda)
6670 ++slowMethodInits;
6673 EMIT_INDEX_OP(op, ALE_INDEX(ale));
6677 if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
6678 if (js_Emit1(cx, cg, JSOP_UNBRAND) < 0)
6679 return JS_FALSE;
6681 if (!EmitEndInit(cx, cg, pn->pn_count))
6682 return JS_FALSE;
6683 break;
6686 #if JS_HAS_SHARP_VARS
6687 case TOK_DEFSHARP:
6688 JS_ASSERT(cg->hasSharps());
6689 sharpnum = pn->pn_num;
6690 pn = pn->pn_kid;
6691 if (pn->pn_type == TOK_RB)
6692 goto do_emit_array;
6693 # if JS_HAS_GENERATORS
6694 if (pn->pn_type == TOK_ARRAYCOMP)
6695 goto do_emit_array;
6696 # endif
6697 if (pn->pn_type == TOK_RC)
6698 goto do_emit_object;
6700 if (!js_EmitTree(cx, cg, pn))
6701 return JS_FALSE;
6702 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
6703 break;
6705 case TOK_USESHARP:
6706 JS_ASSERT(cg->hasSharps());
6707 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
6708 break;
6709 #endif /* JS_HAS_SHARP_VARS */
6711 case TOK_NAME:
6713 * Cope with a left-over function definition that was replaced by a use
6714 * of a later function definition of the same name. See FunctionDef and
6715 * MakeDefIntoUse in jsparse.cpp.
6717 if (pn->pn_op == JSOP_NOP)
6718 return JS_TRUE;
6719 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
6720 return JS_FALSE;
6721 break;
6723 #if JS_HAS_XML_SUPPORT
6724 case TOK_XMLATTR:
6725 case TOK_XMLSPACE:
6726 case TOK_XMLTEXT:
6727 case TOK_XMLCDATA:
6728 case TOK_XMLCOMMENT:
6729 #endif
6730 case TOK_STRING:
6731 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
6732 break;
6734 case TOK_NUMBER:
6735 ok = EmitNumberOp(cx, pn->pn_dval, cg);
6736 break;
6738 case TOK_REGEXP: {
6740 * If the regexp's script is one-shot and the regexp is not used in a
6741 * loop, we can avoid the extra fork-on-exec costs of JSOP_REGEXP by
6742 * selecting JSOP_OBJECT. Otherwise, to avoid incorrect proto, parent,
6743 * and lastIndex sharing, select JSOP_REGEXP.
6745 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
6746 bool singleton = !cg->fun && cg->compileAndGo();
6747 if (singleton) {
6748 for (JSStmtInfo *stmt = cg->topStmt; stmt; stmt = stmt->down) {
6749 if (STMT_IS_LOOP(stmt)) {
6750 singleton = false;
6751 break;
6755 if (singleton) {
6756 ok = EmitObjectOp(cx, pn->pn_objbox, JSOP_OBJECT, cg);
6757 } else {
6758 ok = EmitIndexOp(cx, JSOP_REGEXP,
6759 cg->regexpList.index(pn->pn_objbox),
6760 cg);
6762 break;
6765 #if JS_HAS_XML_SUPPORT
6766 case TOK_ANYNAME:
6767 #endif
6768 case TOK_PRIMARY:
6769 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6770 return JS_FALSE;
6771 break;
6773 #if JS_HAS_DEBUGGER_KEYWORD
6774 case TOK_DEBUGGER:
6775 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6776 return JS_FALSE;
6777 break;
6778 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6780 #if JS_HAS_XML_SUPPORT
6781 case TOK_XMLELEM:
6782 case TOK_XMLLIST:
6783 if (pn->pn_op == JSOP_XMLOBJECT) {
6784 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6785 break;
6788 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
6789 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
6790 case TOK_XMLETAGO:
6791 JS_ASSERT(0);
6792 /* FALL THROUGH */
6793 case TOK_XMLPTAGC:
6794 case TOK_XMLSTAGO:
6795 break;
6796 default:
6797 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6798 return JS_FALSE;
6801 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6802 if (pn2->pn_type == TOK_LC &&
6803 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6804 return JS_FALSE;
6806 if (!js_EmitTree(cx, cg, pn2))
6807 return JS_FALSE;
6808 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6809 return JS_FALSE;
6812 if (pn->pn_xflags & PNX_XMLROOT) {
6813 if (pn->pn_count == 0) {
6814 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6815 atom = cx->runtime->atomState.emptyAtom;
6816 ale = cg->atomList.add(cg->parser, atom);
6817 if (!ale)
6818 return JS_FALSE;
6819 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6821 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6822 return JS_FALSE;
6824 #ifdef DEBUG
6825 else
6826 JS_ASSERT(pn->pn_count != 0);
6827 #endif
6828 break;
6830 case TOK_XMLPTAGC:
6831 if (pn->pn_op == JSOP_XMLOBJECT) {
6832 ok = EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg);
6833 break;
6835 /* FALL THROUGH */
6837 case TOK_XMLSTAGO:
6838 case TOK_XMLETAGO:
6840 uint32 i;
6842 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6843 return JS_FALSE;
6845 ale = cg->atomList.add(cg->parser,
6846 (pn->pn_type == TOK_XMLETAGO)
6847 ? cx->runtime->atomState.etagoAtom
6848 : cx->runtime->atomState.stagoAtom);
6849 if (!ale)
6850 return JS_FALSE;
6851 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6853 JS_ASSERT(pn->pn_count != 0);
6854 pn2 = pn->pn_head;
6855 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6856 return JS_FALSE;
6857 if (!js_EmitTree(cx, cg, pn2))
6858 return JS_FALSE;
6859 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6860 return JS_FALSE;
6862 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6863 if (pn2->pn_type == TOK_LC &&
6864 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6865 return JS_FALSE;
6867 if (!js_EmitTree(cx, cg, pn2))
6868 return JS_FALSE;
6869 if ((i & 1) && pn2->pn_type == TOK_LC) {
6870 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6871 return JS_FALSE;
6873 if (js_Emit1(cx, cg,
6874 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6875 return JS_FALSE;
6879 ale = cg->atomList.add(cg->parser,
6880 (pn->pn_type == TOK_XMLPTAGC)
6881 ? cx->runtime->atomState.ptagcAtom
6882 : cx->runtime->atomState.tagcAtom);
6883 if (!ale)
6884 return JS_FALSE;
6885 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6886 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6887 return JS_FALSE;
6889 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
6890 return JS_FALSE;
6891 break;
6894 case TOK_XMLNAME:
6895 if (pn->pn_arity == PN_LIST) {
6896 JS_ASSERT(pn->pn_count != 0);
6897 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6898 if (pn2->pn_type == TOK_LC &&
6899 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6900 return JS_FALSE;
6902 if (!js_EmitTree(cx, cg, pn2))
6903 return JS_FALSE;
6904 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6905 return JS_FALSE;
6907 } else {
6908 JS_ASSERT(pn->pn_arity == PN_NULLARY);
6909 ok = (pn->pn_op == JSOP_OBJECT)
6910 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
6911 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
6913 break;
6915 case TOK_XMLPI:
6916 ale = cg->atomList.add(cg->parser, pn->pn_atom2);
6917 if (!ale)
6918 return JS_FALSE;
6919 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6920 return JS_FALSE;
6921 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6922 return JS_FALSE;
6923 break;
6924 #endif /* JS_HAS_XML_SUPPORT */
6926 default:
6927 JS_ASSERT(0);
6930 if (ok && --cg->emitLevel == 0) {
6931 if (cg->spanDeps)
6932 ok = OptimizeSpanDeps(cx, cg);
6933 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
6934 return JS_FALSE;
6937 return ok;
6941 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6942 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6944 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6945 {"null", 0, 0, 0},
6946 {"if", 0, 0, 0},
6947 {"if-else", 2, 0, 1},
6948 {"for", 3, 1, 1},
6949 {"while", 1, 0, 1},
6950 {"continue", 0, 0, 0},
6951 {"decl", 1, 1, 1},
6952 {"pcdelta", 1, 0, 1},
6953 {"assignop", 0, 0, 0},
6954 {"cond", 1, 0, 1},
6955 {"brace", 1, 0, 1},
6956 {"hidden", 0, 0, 0},
6957 {"pcbase", 1, 0, -1},
6958 {"label", 1, 0, 0},
6959 {"labelbrace", 1, 0, 0},
6960 {"endbrace", 0, 0, 0},
6961 {"break2label", 1, 0, 0},
6962 {"cont2label", 1, 0, 0},
6963 {"switch", 2, 0, 1},
6964 {"funcdef", 1, 0, 0},
6965 {"catch", 1, 0, 1},
6966 {"extended", -1, 0, 0},
6967 {"newline", 0, 0, 0},
6968 {"setline", 1, 0, 0},
6969 {"xdelta", 0, 0, 0},
6972 static intN
6973 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6975 intN index;
6976 JSArenaPool *pool;
6977 size_t size;
6979 index = CG_NOTE_COUNT(cg);
6980 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6981 pool = cg->notePool;
6982 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6983 if (!CG_NOTES(cg)) {
6984 /* Allocate the first note array lazily; leave noteMask alone. */
6985 pool->allocateCast<jssrcnote *>(CG_NOTES(cg), size);
6986 } else {
6987 /* Grow by doubling note array size; update noteMask on success. */
6988 pool->growCast<jssrcnote *>(CG_NOTES(cg), size, size);
6989 if (CG_NOTES(cg))
6990 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6992 if (!CG_NOTES(cg)) {
6993 js_ReportOutOfScriptQuota(cx);
6994 return -1;
6998 CG_NOTE_COUNT(cg) = index + 1;
6999 return index;
7002 intN
7003 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
7005 intN index, n;
7006 jssrcnote *sn;
7007 ptrdiff_t offset, delta, xdelta;
7010 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
7011 * incrementing CG_NOTE_COUNT(cg).
7013 index = AllocSrcNote(cx, cg);
7014 if (index < 0)
7015 return -1;
7016 sn = &CG_NOTES(cg)[index];
7019 * Compute delta from the last annotated bytecode's offset. If it's too
7020 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7022 offset = CG_OFFSET(cg);
7023 delta = offset - CG_LAST_NOTE_OFFSET(cg);
7024 CG_LAST_NOTE_OFFSET(cg) = offset;
7025 if (delta >= SN_DELTA_LIMIT) {
7026 do {
7027 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
7028 SN_MAKE_XDELTA(sn, xdelta);
7029 delta -= xdelta;
7030 index = AllocSrcNote(cx, cg);
7031 if (index < 0)
7032 return -1;
7033 sn = &CG_NOTES(cg)[index];
7034 } while (delta >= SN_DELTA_LIMIT);
7038 * Initialize type and delta, then allocate the minimum number of notes
7039 * needed for type's arity. Usually, we won't need more, but if an offset
7040 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7042 SN_MAKE_NOTE(sn, type, delta);
7043 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
7044 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
7045 return -1;
7047 return index;
7050 intN
7051 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7052 ptrdiff_t offset)
7054 intN index;
7056 index = js_NewSrcNote(cx, cg, type);
7057 if (index >= 0) {
7058 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
7059 return -1;
7061 return index;
7064 intN
7065 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7066 ptrdiff_t offset1, ptrdiff_t offset2)
7068 intN index;
7070 index = js_NewSrcNote(cx, cg, type);
7071 if (index >= 0) {
7072 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
7073 return -1;
7074 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
7075 return -1;
7077 return index;
7080 static JSBool
7081 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
7083 JSArenaPool *pool;
7084 size_t size;
7086 /* Grow by doubling note array size; update noteMask on success. */
7087 pool = cg->notePool;
7088 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7089 pool->growCast<jssrcnote *>(CG_NOTES(cg), size, size);
7090 if (!CG_NOTES(cg)) {
7091 js_ReportOutOfScriptQuota(cx);
7092 return JS_FALSE;
7094 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7095 return JS_TRUE;
7098 jssrcnote *
7099 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
7100 ptrdiff_t delta)
7102 ptrdiff_t base, limit, newdelta, diff;
7103 intN index;
7106 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7107 * main script note deltas, and only by a small positive amount.
7109 JS_ASSERT(cg->current == &cg->main);
7110 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7112 base = SN_DELTA(sn);
7113 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7114 newdelta = base + delta;
7115 if (newdelta < limit) {
7116 SN_SET_DELTA(sn, newdelta);
7117 } else {
7118 index = sn - cg->main.notes;
7119 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
7120 if (!GrowSrcNotes(cx, cg))
7121 return NULL;
7122 sn = cg->main.notes + index;
7124 diff = cg->main.noteCount - index;
7125 cg->main.noteCount++;
7126 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
7127 SN_MAKE_XDELTA(sn, delta);
7128 sn++;
7130 return sn;
7133 JS_FRIEND_API(uintN)
7134 js_SrcNoteLength(jssrcnote *sn)
7136 uintN arity;
7137 jssrcnote *base;
7139 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7140 for (base = sn++; arity; sn++, arity--) {
7141 if (*sn & SN_3BYTE_OFFSET_FLAG)
7142 sn += 2;
7144 return sn - base;
7147 JS_FRIEND_API(ptrdiff_t)
7148 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7150 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7151 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7152 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7153 for (sn++; which; sn++, which--) {
7154 if (*sn & SN_3BYTE_OFFSET_FLAG)
7155 sn += 2;
7157 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7158 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7159 | (sn[1] << 8)
7160 | sn[2]);
7162 return (ptrdiff_t)*sn;
7165 JSBool
7166 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7167 uintN which, ptrdiff_t offset)
7169 jssrcnote *sn;
7170 ptrdiff_t diff;
7172 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7173 ReportStatementTooLarge(cx, cg);
7174 return JS_FALSE;
7177 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7178 sn = &CG_NOTES(cg)[index];
7179 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7180 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7181 for (sn++; which; sn++, which--) {
7182 if (*sn & SN_3BYTE_OFFSET_FLAG)
7183 sn += 2;
7186 /* See if the new offset requires three bytes. */
7187 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7188 /* Maybe this offset was already set to a three-byte value. */
7189 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7190 /* Losing, need to insert another two bytes for this offset. */
7191 index = sn - CG_NOTES(cg);
7194 * Simultaneously test to see if the source note array must grow to
7195 * accommodate either the first or second byte of additional storage
7196 * required by this 3-byte offset.
7198 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7199 if (!GrowSrcNotes(cx, cg))
7200 return JS_FALSE;
7201 sn = CG_NOTES(cg) + index;
7203 CG_NOTE_COUNT(cg) += 2;
7205 diff = CG_NOTE_COUNT(cg) - (index + 3);
7206 JS_ASSERT(diff >= 0);
7207 if (diff > 0)
7208 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7210 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7211 *sn++ = (jssrcnote)(offset >> 8);
7213 *sn = (jssrcnote)offset;
7214 return JS_TRUE;
7217 #ifdef DEBUG_notme
7218 #define DEBUG_srcnotesize
7219 #endif
7221 #ifdef DEBUG_srcnotesize
7222 #define NBINS 10
7223 static uint32 hist[NBINS];
7225 void DumpSrcNoteSizeHist()
7227 static FILE *fp;
7228 int i, n;
7230 if (!fp) {
7231 fp = fopen("/tmp/srcnotes.hist", "w");
7232 if (!fp)
7233 return;
7234 setvbuf(fp, NULL, _IONBF, 0);
7236 fprintf(fp, "SrcNote size histogram:\n");
7237 for (i = 0; i < NBINS; i++) {
7238 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7239 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7240 fputc('*', fp);
7241 fputc('\n', fp);
7243 fputc('\n', fp);
7245 #endif
7248 * Fill in the storage at notes with prolog and main srcnotes; the space at
7249 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7250 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7251 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7253 JSBool
7254 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7256 uintN prologCount, mainCount, totalCount;
7257 ptrdiff_t offset, delta;
7258 jssrcnote *sn;
7260 JS_ASSERT(cg->current == &cg->main);
7262 prologCount = cg->prolog.noteCount;
7263 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7264 CG_SWITCH_TO_PROLOG(cg);
7265 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7266 return JS_FALSE;
7267 prologCount = cg->prolog.noteCount;
7268 CG_SWITCH_TO_MAIN(cg);
7269 } else {
7271 * Either no prolog srcnotes, or no line number change over prolog.
7272 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7273 * of the first main note, by adding to its delta and possibly even
7274 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7275 * that came at and after the last annotated bytecode.
7277 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7278 JS_ASSERT(offset >= 0);
7279 if (offset > 0 && cg->main.noteCount != 0) {
7280 /* NB: Use as much of the first main note's delta as we can. */
7281 sn = cg->main.notes;
7282 delta = SN_IS_XDELTA(sn)
7283 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7284 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7285 if (offset < delta)
7286 delta = offset;
7287 for (;;) {
7288 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7289 return JS_FALSE;
7290 offset -= delta;
7291 if (offset == 0)
7292 break;
7293 delta = JS_MIN(offset, SN_XDELTA_MASK);
7294 sn = cg->main.notes;
7299 mainCount = cg->main.noteCount;
7300 totalCount = prologCount + mainCount;
7301 if (prologCount)
7302 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7303 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7304 SN_MAKE_TERMINATOR(&notes[totalCount]);
7306 #ifdef DEBUG_notme
7307 { int bin = JS_CeilingLog2(totalCount);
7308 if (bin >= NBINS)
7309 bin = NBINS - 1;
7310 ++hist[bin];
7312 #endif
7313 return JS_TRUE;
7316 static JSBool
7317 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7318 uintN stackDepth, size_t start, size_t end)
7320 JSTryNode *tryNode;
7322 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7323 JS_ASSERT(start <= end);
7324 JS_ASSERT((size_t)(uint32)start == start);
7325 JS_ASSERT((size_t)(uint32)end == end);
7327 cx->tempPool.allocateType<JSTryNode>(tryNode);
7328 if (!tryNode) {
7329 js_ReportOutOfScriptQuota(cx);
7330 return JS_FALSE;
7333 tryNode->note.kind = kind;
7334 tryNode->note.stackDepth = (uint16)stackDepth;
7335 tryNode->note.start = (uint32)start;
7336 tryNode->note.length = (uint32)(end - start);
7337 tryNode->prev = cg->lastTryNode;
7338 cg->lastTryNode = tryNode;
7339 cg->ntrynotes++;
7340 return JS_TRUE;
7343 void
7344 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7346 JSTryNode *tryNode;
7347 JSTryNote *tn;
7349 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7350 tn = array->vector + array->length;
7351 tryNode = cg->lastTryNode;
7352 do {
7353 *--tn = tryNode->note;
7354 } while ((tryNode = tryNode->prev) != NULL);
7355 JS_ASSERT(tn == array->vector);
7359 * Find the index of the given object for code generator.
7361 * Since the emitter refers to each parsed object only once, for the index we
7362 * use the number of already indexes objects. We also add the object to a list
7363 * to convert the list to a fixed-size array when we complete code generation,
7364 * see JSCGObjectList::finish below.
7366 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7367 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7368 * directly used to store and fetch a reference to a cloned RegExp object that
7369 * shares the same JSRegExp private data created for the object literal in
7370 * objbox. We need a cloned object to hold lastIndex and other direct properties
7371 * that should not be shared among threads sharing a precompiled function or
7372 * script.
7374 * If the code being compiled is function code, allocate a reserved slot in
7375 * the cloned function object that shares its precompiled script with other
7376 * cloned function objects and with the compiler-created clone-parent. There
7377 * are nregexps = script->regexps()->length such reserved slots in each
7378 * function object cloned from fun->object. NB: during compilation, a funobj
7379 * slots element must never be allocated, because js_AllocSlot could hand out
7380 * one of the slots that should be given to a regexp clone.
7382 * If the code being compiled is global code, the cloned regexp are stored in
7383 * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set
7384 * fp->nvars to ngvars + nregexps.
7386 * The slots initially contain undefined or null. We populate them lazily when
7387 * JSOP_REGEXP is executed for the first time.
7389 * Why clone regexp objects? ECMA specifies that when a regular expression
7390 * literal is scanned, a RegExp object is created. In the spec, compilation
7391 * and execution happen indivisibly, but in this implementation and many of
7392 * its embeddings, code is precompiled early and re-executed in multiple
7393 * threads, or using multiple global objects, or both, for efficiency.
7395 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7396 * objects, which makes for collisions on the lastIndex property (especially
7397 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7398 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7400 uintN
7401 JSCGObjectList::index(JSObjectBox *objbox)
7403 JS_ASSERT(!objbox->emitLink);
7404 objbox->emitLink = lastbox;
7405 lastbox = objbox;
7406 return length++;
7409 void
7410 JSCGObjectList::finish(JSObjectArray *array)
7412 JSObject **cursor;
7413 JSObjectBox *objbox;
7415 JS_ASSERT(length <= INDEX_LIMIT);
7416 JS_ASSERT(length == array->length);
7418 cursor = array->vector + array->length;
7419 objbox = lastbox;
7420 do {
7421 --cursor;
7422 JS_ASSERT(!*cursor);
7423 *cursor = objbox->object;
7424 } while ((objbox = objbox->emitLink) != NULL);
7425 JS_ASSERT(cursor == array->vector);