Rubber-stamped by Brady Eidson.
[webbrowser.git] / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
blobb0a0877c15ba83b47b4dff470e37e487c99f3d1d
1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "config.h"
31 #include "BytecodeGenerator.h"
33 #include "BatchedTransitionOptimizer.h"
34 #include "PrototypeFunction.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
37 #include "UString.h"
39 using namespace std;
41 namespace JSC {
44 The layout of a register frame looks like this:
46 For
48 function f(x, y) {
49 var v1;
50 function g() { }
51 var v2;
52 return (x) * (y);
55 assuming (x) and (y) generated temporaries t1 and t2, you would have
57 ------------------------------------
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
59 ------------------------------------
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
61 ------------------------------------
62 | params->|<-locals | temps->
64 Because temporary registers are allocated in a stack-like fashion, we
65 can reclaim them with a simple popping algorithm. The same goes for labels.
66 (We never reclaim parameter or local registers, because parameters and
67 locals are DontDelete.)
69 The register layout before a function call looks like this:
71 For
73 function f(x, y)
77 f(1);
79 > <------------------------------
80 < > reserved: call frame | 1 | <-- value held
81 > >snip< <------------------------------
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
83 > <------------------------------
84 | params->|<-locals | temps->
86 The call instruction fills in the "call frame" registers. It also pads
87 missing arguments at the end of the call:
89 > <-----------------------------------
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
91 > >snip< <-----------------------------------
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
93 > <-----------------------------------
94 | params->|<-locals | temps->
96 After filling in missing arguments, the call instruction sets up the new
97 stack frame to overlap the end of the old stack frame:
99 |----------------------------------> <
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
101 |----------------------------------> >snip< <
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
103 |----------------------------------> <
104 | | params->|<-locals | temps->
106 That way, arguments are "copied" into the callee's stack frame for free.
108 If the caller supplies too many arguments, this trick doesn't work. The
109 extra arguments protrude into space reserved for locals and temporaries.
110 In that case, the call instruction makes a real copy of the call frame header,
111 along with just the arguments expected by the callee, leaving the original
112 call frame header and arguments behind. (The call instruction can't just discard
113 extra arguments, because the "arguments" object may access them later.)
114 This copying strategy ensures that all named values will be at the indices
115 expected by the callee.
118 #ifndef NDEBUG
119 static bool s_dumpsGeneratedCode = false;
120 #endif
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
124 #ifndef NDEBUG
125 s_dumpsGeneratedCode = dumpsGeneratedCode;
126 #else
127 UNUSED_PARAM(dumpsGeneratedCode);
128 #endif
131 bool BytecodeGenerator::dumpsGeneratedCode()
133 #ifndef NDEBUG
134 return s_dumpsGeneratedCode;
135 #else
136 return false;
137 #endif
140 void BytecodeGenerator::generate()
142 m_codeBlock->setThisRegister(m_thisRegister.index());
144 m_scopeNode->emitBytecode(*this);
146 #ifndef NDEBUG
147 m_codeBlock->setInstructionCount(m_codeBlock->instructions().size());
149 if (s_dumpsGeneratedCode)
150 m_codeBlock->dump(m_scopeChain->globalObject()->globalExec());
151 #endif
153 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
154 symbolTable().clear();
156 m_codeBlock->setIsNumericCompareFunction(instructions() == m_globalData->numericCompareFunction(m_scopeChain->globalObject()->globalExec()));
158 #if !ENABLE(OPCODE_SAMPLING)
159 if (!m_regeneratingForExceptionInfo && (m_codeType == FunctionCode || m_codeType == EvalCode))
160 m_codeBlock->clearExceptionInfo();
161 #endif
163 m_codeBlock->shrinkToFit();
166 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
168 int index = m_calleeRegisters.size();
169 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
170 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
172 if (!result.second) {
173 r0 = &registerFor(result.first->second.getIndex());
174 return false;
177 ++m_codeBlock->m_numVars;
178 r0 = newRegister();
179 return true;
182 bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
184 int index = m_nextGlobalIndex;
185 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
186 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
188 if (!result.second)
189 index = result.first->second.getIndex();
190 else {
191 --m_nextGlobalIndex;
192 m_globals.append(index + m_globalVarStorageOffset);
195 r0 = &registerFor(index);
196 return result.second;
199 void BytecodeGenerator::preserveLastVar()
201 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
202 m_lastVar = &m_calleeRegisters.last();
205 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock)
206 : m_shouldEmitDebugHooks(!!debugger)
207 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
208 , m_scopeChain(&scopeChain)
209 , m_symbolTable(symbolTable)
210 , m_scopeNode(programNode)
211 , m_codeBlock(codeBlock)
212 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
213 , m_finallyDepth(0)
214 , m_dynamicScopeDepth(0)
215 , m_baseScopeDepth(0)
216 , m_codeType(GlobalCode)
217 , m_nextGlobalIndex(-1)
218 , m_nextConstantOffset(0)
219 , m_globalConstantIndex(0)
220 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
221 , m_lastOpcodeID(op_end)
222 , m_emitNodeDepth(0)
223 , m_regeneratingForExceptionInfo(false)
224 , m_codeBlockBeingRegeneratedFrom(0)
226 if (m_shouldEmitDebugHooks)
227 m_codeBlock->setNeedsFullScopeChain(true);
229 emitOpcode(op_enter);
230 codeBlock->setGlobalData(m_globalData);
232 // FIXME: Move code that modifies the global object to Interpreter::execute.
234 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
236 JSGlobalObject* globalObject = scopeChain.globalObject();
237 ExecState* exec = globalObject->globalExec();
238 RegisterFile* registerFile = &exec->globalData().interpreter->registerFile();
240 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
241 m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size();
243 // Add previously defined symbols to bookkeeping.
244 m_globals.grow(symbolTable->size());
245 SymbolTable::iterator end = symbolTable->end();
246 for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it)
247 registerFor(it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset);
249 BatchedTransitionOptimizer optimizer(globalObject);
251 const VarStack& varStack = programNode->varStack();
252 const FunctionStack& functionStack = programNode->functionStack();
253 bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals();
254 if (canOptimizeNewGlobals) {
255 // Shift new symbols so they get stored prior to existing symbols.
256 m_nextGlobalIndex -= symbolTable->size();
258 for (size_t i = 0; i < functionStack.size(); ++i) {
259 FunctionBodyNode* function = functionStack[i];
260 globalObject->removeDirect(function->ident()); // Make sure our new function is not shadowed by an old property.
261 emitNewFunction(addGlobalVar(function->ident(), false), function);
264 Vector<RegisterID*, 32> newVars;
265 for (size_t i = 0; i < varStack.size(); ++i)
266 if (!globalObject->hasProperty(exec, *varStack[i].first))
267 newVars.append(addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant));
269 preserveLastVar();
271 for (size_t i = 0; i < newVars.size(); ++i)
272 emitLoad(newVars[i], jsUndefined());
273 } else {
274 for (size_t i = 0; i < functionStack.size(); ++i) {
275 FunctionBodyNode* function = functionStack[i];
276 globalObject->putWithAttributes(exec, function->ident(), new (exec) JSFunction(exec, makeFunction(exec, function), scopeChain.node()), DontDelete);
278 for (size_t i = 0; i < varStack.size(); ++i) {
279 if (globalObject->hasProperty(exec, *varStack[i].first))
280 continue;
281 int attributes = DontDelete;
282 if (varStack[i].second & DeclarationStacks::IsConstant)
283 attributes |= ReadOnly;
284 globalObject->putWithAttributes(exec, *varStack[i].first, jsUndefined(), attributes);
287 preserveLastVar();
291 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock)
292 : m_shouldEmitDebugHooks(!!debugger)
293 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
294 , m_scopeChain(&scopeChain)
295 , m_symbolTable(symbolTable)
296 , m_scopeNode(functionBody)
297 , m_codeBlock(codeBlock)
298 , m_finallyDepth(0)
299 , m_dynamicScopeDepth(0)
300 , m_baseScopeDepth(0)
301 , m_codeType(FunctionCode)
302 , m_nextConstantOffset(0)
303 , m_globalConstantIndex(0)
304 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
305 , m_lastOpcodeID(op_end)
306 , m_emitNodeDepth(0)
307 , m_regeneratingForExceptionInfo(false)
308 , m_codeBlockBeingRegeneratedFrom(0)
310 if (m_shouldEmitDebugHooks)
311 m_codeBlock->setNeedsFullScopeChain(true);
313 codeBlock->setGlobalData(m_globalData);
315 bool usesArguments = functionBody->usesArguments();
316 codeBlock->setUsesArguments(usesArguments);
317 if (usesArguments) {
318 m_argumentsRegister.setIndex(RegisterFile::OptionalCalleeArguments);
319 addVar(propertyNames().arguments, false);
322 if (m_codeBlock->needsFullScopeChain()) {
323 ++m_codeBlock->m_numVars;
324 m_activationRegisterIndex = newRegister()->index();
325 emitOpcode(op_enter_with_activation);
326 instructions().append(m_activationRegisterIndex);
327 } else
328 emitOpcode(op_enter);
330 if (usesArguments) {
331 emitOpcode(op_init_arguments);
333 // The debugger currently retrieves the arguments object from an activation rather than pulling
334 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
335 // but for now we force eager creation of the arguments object when debugging.
336 if (m_shouldEmitDebugHooks)
337 emitOpcode(op_create_arguments);
340 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
341 for (size_t i = 0; i < functionStack.size(); ++i) {
342 FunctionBodyNode* function = functionStack[i];
343 const Identifier& ident = function->ident();
344 m_functions.add(ident.ustring().rep());
345 emitNewFunction(addVar(ident, false), function);
348 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
349 for (size_t i = 0; i < varStack.size(); ++i)
350 addVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
352 FunctionParameters& parameters = *functionBody->parameters();
353 size_t parameterCount = parameters.size();
354 m_nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1;
355 m_parameters.grow(1 + parameterCount); // reserve space for "this"
357 // Add "this" as a parameter
358 m_thisRegister.setIndex(m_nextParameterIndex);
359 ++m_nextParameterIndex;
360 ++m_codeBlock->m_numParameters;
362 if (functionBody->usesThis() || m_shouldEmitDebugHooks) {
363 emitOpcode(op_convert_this);
364 instructions().append(m_thisRegister.index());
367 for (size_t i = 0; i < parameterCount; ++i)
368 addParameter(parameters[i]);
370 preserveLastVar();
373 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock)
374 : m_shouldEmitDebugHooks(!!debugger)
375 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
376 , m_scopeChain(&scopeChain)
377 , m_symbolTable(symbolTable)
378 , m_scopeNode(evalNode)
379 , m_codeBlock(codeBlock)
380 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
381 , m_finallyDepth(0)
382 , m_dynamicScopeDepth(0)
383 , m_baseScopeDepth(codeBlock->baseScopeDepth())
384 , m_codeType(EvalCode)
385 , m_nextConstantOffset(0)
386 , m_globalConstantIndex(0)
387 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
388 , m_lastOpcodeID(op_end)
389 , m_emitNodeDepth(0)
390 , m_regeneratingForExceptionInfo(false)
391 , m_codeBlockBeingRegeneratedFrom(0)
393 if (m_shouldEmitDebugHooks || m_baseScopeDepth)
394 m_codeBlock->setNeedsFullScopeChain(true);
396 emitOpcode(op_enter);
397 codeBlock->setGlobalData(m_globalData);
398 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
400 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
401 for (size_t i = 0; i < functionStack.size(); ++i)
402 m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
404 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
405 unsigned numVariables = varStack.size();
406 Vector<Identifier> variables;
407 variables.reserveCapacity(numVariables);
408 for (size_t i = 0; i < numVariables; ++i)
409 variables.append(*varStack[i].first);
410 codeBlock->adoptVariables(variables);
412 preserveLastVar();
415 RegisterID* BytecodeGenerator::addParameter(const Identifier& ident)
417 // Parameters overwrite var declarations, but not function declarations.
418 RegisterID* result = 0;
419 UString::Rep* rep = ident.ustring().rep();
420 if (!m_functions.contains(rep)) {
421 symbolTable().set(rep, m_nextParameterIndex);
422 RegisterID& parameter = registerFor(m_nextParameterIndex);
423 parameter.setIndex(m_nextParameterIndex);
424 result = &parameter;
427 // To maintain the calling convention, we have to allocate unique space for
428 // each parameter, even if the parameter doesn't make it into the symbol table.
429 ++m_nextParameterIndex;
430 ++m_codeBlock->m_numParameters;
431 return result;
434 RegisterID* BytecodeGenerator::registerFor(const Identifier& ident)
436 if (ident == propertyNames().thisIdentifier)
437 return &m_thisRegister;
439 if (!shouldOptimizeLocals())
440 return 0;
442 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
443 if (entry.isNull())
444 return 0;
446 if (ident == propertyNames().arguments)
447 createArgumentsIfNecessary();
449 return &registerFor(entry.getIndex());
452 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
454 if (ident != propertyNames().arguments)
455 return false;
457 if (!shouldOptimizeLocals())
458 return false;
460 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
461 if (entry.isNull())
462 return false;
464 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
465 return true;
467 return false;
470 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
472 ASSERT(willResolveToArguments(propertyNames().arguments));
474 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.ustring().rep());
475 ASSERT(!entry.isNull());
476 return &registerFor(entry.getIndex());
479 RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident)
481 if (m_codeType == EvalCode)
482 return 0;
484 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
485 if (entry.isNull())
486 return 0;
488 return &registerFor(entry.getIndex());
491 bool BytecodeGenerator::isLocal(const Identifier& ident)
493 if (ident == propertyNames().thisIdentifier)
494 return true;
496 return shouldOptimizeLocals() && symbolTable().contains(ident.ustring().rep());
499 bool BytecodeGenerator::isLocalConstant(const Identifier& ident)
501 return symbolTable().get(ident.ustring().rep()).isReadOnly();
504 RegisterID* BytecodeGenerator::newRegister()
506 m_calleeRegisters.append(m_calleeRegisters.size());
507 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
508 return &m_calleeRegisters.last();
511 RegisterID* BytecodeGenerator::newTemporary()
513 // Reclaim free register IDs.
514 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
515 m_calleeRegisters.removeLast();
517 RegisterID* result = newRegister();
518 result->setTemporary();
519 return result;
522 RegisterID* BytecodeGenerator::highestUsedRegister()
524 size_t count = m_codeBlock->m_numCalleeRegisters;
525 while (m_calleeRegisters.size() < count)
526 newRegister();
527 return &m_calleeRegisters.last();
530 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
532 // Reclaim free label scopes.
533 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
534 m_labelScopes.removeLast();
536 // Allocate new label scope.
537 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
538 m_labelScopes.append(scope);
539 return &m_labelScopes.last();
542 PassRefPtr<Label> BytecodeGenerator::newLabel()
544 // Reclaim free label IDs.
545 while (m_labels.size() && !m_labels.last().refCount())
546 m_labels.removeLast();
548 // Allocate new label ID.
549 m_labels.append(m_codeBlock);
550 return &m_labels.last();
553 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
555 unsigned newLabelIndex = instructions().size();
556 l0->setLocation(newLabelIndex);
558 if (m_codeBlock->numberOfJumpTargets()) {
559 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
560 ASSERT(lastLabelIndex <= newLabelIndex);
561 if (newLabelIndex == lastLabelIndex) {
562 // Peephole optimizations have already been disabled by emitting the last label
563 return l0;
567 m_codeBlock->addJumpTarget(newLabelIndex);
569 // This disables peephole optimizations when an instruction is a jump target
570 m_lastOpcodeID = op_end;
571 return l0;
574 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
576 instructions().append(globalData()->interpreter->getOpcode(opcodeID));
577 m_lastOpcodeID = opcodeID;
580 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
582 ASSERT(instructions().size() >= 4);
583 size_t size = instructions().size();
584 dstIndex = instructions().at(size - 3).u.operand;
585 src1Index = instructions().at(size - 2).u.operand;
586 src2Index = instructions().at(size - 1).u.operand;
589 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
591 ASSERT(instructions().size() >= 3);
592 size_t size = instructions().size();
593 dstIndex = instructions().at(size - 2).u.operand;
594 srcIndex = instructions().at(size - 1).u.operand;
597 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
599 ASSERT(instructions().size() >= 4);
600 instructions().shrink(instructions().size() - 4);
603 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
605 ASSERT(instructions().size() >= 3);
606 instructions().shrink(instructions().size() - 3);
609 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
611 size_t begin = instructions().size();
612 emitOpcode(target->isForward() ? op_jmp : op_loop);
613 instructions().append(target->bind(begin, instructions().size()));
614 return target;
617 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
619 if (m_lastOpcodeID == op_less) {
620 int dstIndex;
621 int src1Index;
622 int src2Index;
624 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
626 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
627 rewindBinaryOp();
629 size_t begin = instructions().size();
630 emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
631 instructions().append(src1Index);
632 instructions().append(src2Index);
633 instructions().append(target->bind(begin, instructions().size()));
634 return target;
636 } else if (m_lastOpcodeID == op_lesseq && !target->isForward()) {
637 int dstIndex;
638 int src1Index;
639 int src2Index;
641 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
643 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
644 rewindBinaryOp();
646 size_t begin = instructions().size();
647 emitOpcode(op_loop_if_lesseq);
648 instructions().append(src1Index);
649 instructions().append(src2Index);
650 instructions().append(target->bind(begin, instructions().size()));
651 return target;
653 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
654 int dstIndex;
655 int srcIndex;
657 retrieveLastUnaryOp(dstIndex, srcIndex);
659 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
660 rewindUnaryOp();
662 size_t begin = instructions().size();
663 emitOpcode(op_jeq_null);
664 instructions().append(srcIndex);
665 instructions().append(target->bind(begin, instructions().size()));
666 return target;
668 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
669 int dstIndex;
670 int srcIndex;
672 retrieveLastUnaryOp(dstIndex, srcIndex);
674 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
675 rewindUnaryOp();
677 size_t begin = instructions().size();
678 emitOpcode(op_jneq_null);
679 instructions().append(srcIndex);
680 instructions().append(target->bind(begin, instructions().size()));
681 return target;
685 size_t begin = instructions().size();
687 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
688 instructions().append(cond->index());
689 instructions().append(target->bind(begin, instructions().size()));
690 return target;
693 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
695 if (m_lastOpcodeID == op_less && target->isForward()) {
696 int dstIndex;
697 int src1Index;
698 int src2Index;
700 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
702 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
703 rewindBinaryOp();
705 size_t begin = instructions().size();
706 emitOpcode(op_jnless);
707 instructions().append(src1Index);
708 instructions().append(src2Index);
709 instructions().append(target->bind(begin, instructions().size()));
710 return target;
712 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
713 int dstIndex;
714 int src1Index;
715 int src2Index;
717 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
719 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
720 rewindBinaryOp();
722 size_t begin = instructions().size();
723 emitOpcode(op_jnlesseq);
724 instructions().append(src1Index);
725 instructions().append(src2Index);
726 instructions().append(target->bind(begin, instructions().size()));
727 return target;
729 } else if (m_lastOpcodeID == op_not) {
730 int dstIndex;
731 int srcIndex;
733 retrieveLastUnaryOp(dstIndex, srcIndex);
735 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
736 rewindUnaryOp();
738 size_t begin = instructions().size();
739 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
740 instructions().append(srcIndex);
741 instructions().append(target->bind(begin, instructions().size()));
742 return target;
744 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
745 int dstIndex;
746 int srcIndex;
748 retrieveLastUnaryOp(dstIndex, srcIndex);
750 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
751 rewindUnaryOp();
753 size_t begin = instructions().size();
754 emitOpcode(op_jneq_null);
755 instructions().append(srcIndex);
756 instructions().append(target->bind(begin, instructions().size()));
757 return target;
759 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
760 int dstIndex;
761 int srcIndex;
763 retrieveLastUnaryOp(dstIndex, srcIndex);
765 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
766 rewindUnaryOp();
768 size_t begin = instructions().size();
769 emitOpcode(op_jeq_null);
770 instructions().append(srcIndex);
771 instructions().append(target->bind(begin, instructions().size()));
772 return target;
776 size_t begin = instructions().size();
777 emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
778 instructions().append(cond->index());
779 instructions().append(target->bind(begin, instructions().size()));
780 return target;
783 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
785 size_t begin = instructions().size();
787 emitOpcode(op_jneq_ptr);
788 instructions().append(cond->index());
789 instructions().append(m_scopeChain->globalObject()->d()->callFunction);
790 instructions().append(target->bind(begin, instructions().size()));
791 return target;
794 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
796 size_t begin = instructions().size();
798 emitOpcode(op_jneq_ptr);
799 instructions().append(cond->index());
800 instructions().append(m_scopeChain->globalObject()->d()->applyFunction);
801 instructions().append(target->bind(begin, instructions().size()));
802 return target;
805 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
807 UString::Rep* rep = ident.ustring().rep();
808 pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
809 if (result.second) // new entry
810 m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
812 return result.first->second;
815 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
817 int index = m_nextConstantOffset;
819 pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
820 if (result.second) {
821 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
822 ++m_nextConstantOffset;
823 m_codeBlock->addConstantRegister(JSValue(v));
824 } else
825 index = result.first->second;
827 return &m_constantPoolRegisters[index];
830 unsigned BytecodeGenerator::addRegExp(RegExp* r)
832 return m_codeBlock->addRegExp(r);
835 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
837 emitOpcode(op_mov);
838 instructions().append(dst->index());
839 instructions().append(src->index());
840 return dst;
843 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
845 emitOpcode(opcodeID);
846 instructions().append(dst->index());
847 instructions().append(src->index());
848 return dst;
851 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
853 emitOpcode(op_pre_inc);
854 instructions().append(srcDst->index());
855 return srcDst;
858 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
860 emitOpcode(op_pre_dec);
861 instructions().append(srcDst->index());
862 return srcDst;
865 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
867 emitOpcode(op_post_inc);
868 instructions().append(dst->index());
869 instructions().append(srcDst->index());
870 return dst;
873 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
875 emitOpcode(op_post_dec);
876 instructions().append(dst->index());
877 instructions().append(srcDst->index());
878 return dst;
881 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
883 emitOpcode(opcodeID);
884 instructions().append(dst->index());
885 instructions().append(src1->index());
886 instructions().append(src2->index());
888 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
889 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
890 instructions().append(types.toInt());
892 return dst;
895 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
897 if (m_lastOpcodeID == op_typeof) {
898 int dstIndex;
899 int srcIndex;
901 retrieveLastUnaryOp(dstIndex, srcIndex);
903 if (src1->index() == dstIndex
904 && src1->isTemporary()
905 && m_codeBlock->isConstantRegisterIndex(src2->index())
906 && m_codeBlock->constantRegister(src2->index()).jsValue().isString()) {
907 const UString& value = asString(m_codeBlock->constantRegister(src2->index()).jsValue())->tryGetValue();
908 if (value == "undefined") {
909 rewindUnaryOp();
910 emitOpcode(op_is_undefined);
911 instructions().append(dst->index());
912 instructions().append(srcIndex);
913 return dst;
915 if (value == "boolean") {
916 rewindUnaryOp();
917 emitOpcode(op_is_boolean);
918 instructions().append(dst->index());
919 instructions().append(srcIndex);
920 return dst;
922 if (value == "number") {
923 rewindUnaryOp();
924 emitOpcode(op_is_number);
925 instructions().append(dst->index());
926 instructions().append(srcIndex);
927 return dst;
929 if (value == "string") {
930 rewindUnaryOp();
931 emitOpcode(op_is_string);
932 instructions().append(dst->index());
933 instructions().append(srcIndex);
934 return dst;
936 if (value == "object") {
937 rewindUnaryOp();
938 emitOpcode(op_is_object);
939 instructions().append(dst->index());
940 instructions().append(srcIndex);
941 return dst;
943 if (value == "function") {
944 rewindUnaryOp();
945 emitOpcode(op_is_function);
946 instructions().append(dst->index());
947 instructions().append(srcIndex);
948 return dst;
953 emitOpcode(opcodeID);
954 instructions().append(dst->index());
955 instructions().append(src1->index());
956 instructions().append(src2->index());
957 return dst;
960 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
962 return emitLoad(dst, jsBoolean(b));
965 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
967 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time.
968 // Later we can do the extra work to handle that like the other cases.
969 if (number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
970 return emitLoad(dst, jsNumber(globalData(), number));
971 JSValue& valueInMap = m_numberMap.add(number, JSValue()).first->second;
972 if (!valueInMap)
973 valueInMap = jsNumber(globalData(), number);
974 return emitLoad(dst, valueInMap);
977 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
979 JSString*& stringInMap = m_stringMap.add(identifier.ustring().rep(), 0).first->second;
980 if (!stringInMap)
981 stringInMap = jsOwnedString(globalData(), identifier.ustring());
982 return emitLoad(dst, JSValue(stringInMap));
985 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
987 RegisterID* constantID = addConstantValue(v);
988 if (dst)
989 return emitMove(dst, constantID);
990 return constantID;
993 bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, JSObject*& globalObject)
995 // Cases where we cannot statically optimize the lookup.
996 if (property == propertyNames().arguments || !canOptimizeNonLocals()) {
997 stackDepth = 0;
998 index = missingSymbolMarker();
1000 if (shouldOptimizeLocals() && m_codeType == GlobalCode) {
1001 ScopeChainIterator iter = m_scopeChain->begin();
1002 globalObject = *iter;
1003 ASSERT((++iter) == m_scopeChain->end());
1005 return false;
1008 size_t depth = 0;
1010 ScopeChainIterator iter = m_scopeChain->begin();
1011 ScopeChainIterator end = m_scopeChain->end();
1012 for (; iter != end; ++iter, ++depth) {
1013 JSObject* currentScope = *iter;
1014 if (!currentScope->isVariableObject())
1015 break;
1016 JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope);
1017 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.ustring().rep());
1019 // Found the property
1020 if (!entry.isNull()) {
1021 if (entry.isReadOnly() && forWriting) {
1022 stackDepth = 0;
1023 index = missingSymbolMarker();
1024 if (++iter == end)
1025 globalObject = currentVariableObject;
1026 return false;
1028 stackDepth = depth;
1029 index = entry.getIndex();
1030 if (++iter == end)
1031 globalObject = currentVariableObject;
1032 return true;
1034 if (currentVariableObject->isDynamicScope())
1035 break;
1038 // Can't locate the property but we're able to avoid a few lookups.
1039 stackDepth = depth;
1040 index = missingSymbolMarker();
1041 JSObject* scope = *iter;
1042 if (++iter == end)
1043 globalObject = scope;
1044 return true;
1047 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1049 emitOpcode(op_instanceof);
1050 instructions().append(dst->index());
1051 instructions().append(value->index());
1052 instructions().append(base->index());
1053 instructions().append(basePrototype->index());
1054 return dst;
1057 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property)
1059 size_t depth = 0;
1060 int index = 0;
1061 JSObject* globalObject = 0;
1062 if (!findScopedProperty(property, index, depth, false, globalObject) && !globalObject) {
1063 // We can't optimise at all :-(
1064 emitOpcode(op_resolve);
1065 instructions().append(dst->index());
1066 instructions().append(addConstant(property));
1067 return dst;
1070 if (globalObject) {
1071 bool forceGlobalResolve = false;
1072 if (m_regeneratingForExceptionInfo) {
1073 #if ENABLE(JIT)
1074 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1075 #else
1076 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1077 #endif
1080 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1081 // Directly index the property lookup across multiple scopes.
1082 return emitGetScopedVar(dst, depth, index, globalObject);
1085 #if ENABLE(JIT)
1086 m_codeBlock->addGlobalResolveInfo(instructions().size());
1087 #else
1088 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1089 #endif
1090 emitOpcode(op_resolve_global);
1091 instructions().append(dst->index());
1092 instructions().append(globalObject);
1093 instructions().append(addConstant(property));
1094 instructions().append(0);
1095 instructions().append(0);
1096 return dst;
1099 if (index != missingSymbolMarker()) {
1100 // Directly index the property lookup across multiple scopes.
1101 return emitGetScopedVar(dst, depth, index, globalObject);
1104 // In this case we are at least able to drop a few scope chains from the
1105 // lookup chain, although we still need to hash from then on.
1106 emitOpcode(op_resolve_skip);
1107 instructions().append(dst->index());
1108 instructions().append(addConstant(property));
1109 instructions().append(depth);
1110 return dst;
1113 RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject)
1115 if (globalObject) {
1116 emitOpcode(op_get_global_var);
1117 instructions().append(dst->index());
1118 instructions().append(asCell(globalObject));
1119 instructions().append(index);
1120 return dst;
1123 emitOpcode(op_get_scoped_var);
1124 instructions().append(dst->index());
1125 instructions().append(index);
1126 instructions().append(depth);
1127 return dst;
1130 RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject)
1132 if (globalObject) {
1133 emitOpcode(op_put_global_var);
1134 instructions().append(asCell(globalObject));
1135 instructions().append(index);
1136 instructions().append(value->index());
1137 return value;
1139 emitOpcode(op_put_scoped_var);
1140 instructions().append(index);
1141 instructions().append(depth);
1142 instructions().append(value->index());
1143 return value;
1146 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property)
1148 size_t depth = 0;
1149 int index = 0;
1150 JSObject* globalObject = 0;
1151 findScopedProperty(property, index, depth, false, globalObject);
1152 if (!globalObject) {
1153 // We can't optimise at all :-(
1154 emitOpcode(op_resolve_base);
1155 instructions().append(dst->index());
1156 instructions().append(addConstant(property));
1157 return dst;
1160 // Global object is the base
1161 return emitLoad(dst, JSValue(globalObject));
1164 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property)
1166 size_t depth = 0;
1167 int index = 0;
1168 JSObject* globalObject = 0;
1169 if (!findScopedProperty(property, index, depth, false, globalObject) || !globalObject) {
1170 // We can't optimise at all :-(
1171 emitOpcode(op_resolve_with_base);
1172 instructions().append(baseDst->index());
1173 instructions().append(propDst->index());
1174 instructions().append(addConstant(property));
1175 return baseDst;
1178 bool forceGlobalResolve = false;
1179 if (m_regeneratingForExceptionInfo) {
1180 #if ENABLE(JIT)
1181 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1182 #else
1183 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1184 #endif
1187 // Global object is the base
1188 emitLoad(baseDst, JSValue(globalObject));
1190 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1191 // Directly index the property lookup across multiple scopes.
1192 emitGetScopedVar(propDst, depth, index, globalObject);
1193 return baseDst;
1196 #if ENABLE(JIT)
1197 m_codeBlock->addGlobalResolveInfo(instructions().size());
1198 #else
1199 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1200 #endif
1201 emitOpcode(op_resolve_global);
1202 instructions().append(propDst->index());
1203 instructions().append(globalObject);
1204 instructions().append(addConstant(property));
1205 instructions().append(0);
1206 instructions().append(0);
1207 return baseDst;
1210 void BytecodeGenerator::emitMethodCheck()
1212 emitOpcode(op_method_check);
1215 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1217 #if ENABLE(JIT)
1218 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_get_by_id));
1219 #else
1220 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1221 #endif
1223 emitOpcode(op_get_by_id);
1224 instructions().append(dst->index());
1225 instructions().append(base->index());
1226 instructions().append(addConstant(property));
1227 instructions().append(0);
1228 instructions().append(0);
1229 instructions().append(0);
1230 instructions().append(0);
1231 return dst;
1234 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1236 #if ENABLE(JIT)
1237 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id));
1238 #else
1239 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1240 #endif
1242 emitOpcode(op_put_by_id);
1243 instructions().append(base->index());
1244 instructions().append(addConstant(property));
1245 instructions().append(value->index());
1246 instructions().append(0);
1247 instructions().append(0);
1248 instructions().append(0);
1249 instructions().append(0);
1250 return value;
1253 RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value)
1255 emitOpcode(op_put_getter);
1256 instructions().append(base->index());
1257 instructions().append(addConstant(property));
1258 instructions().append(value->index());
1259 return value;
1262 RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value)
1264 emitOpcode(op_put_setter);
1265 instructions().append(base->index());
1266 instructions().append(addConstant(property));
1267 instructions().append(value->index());
1268 return value;
1271 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1273 emitOpcode(op_del_by_id);
1274 instructions().append(dst->index());
1275 instructions().append(base->index());
1276 instructions().append(addConstant(property));
1277 return dst;
1280 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1282 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1283 ForInContext& context = m_forInContextStack[i - 1];
1284 if (context.propertyRegister == property) {
1285 emitOpcode(op_get_by_pname);
1286 instructions().append(dst->index());
1287 instructions().append(base->index());
1288 instructions().append(property->index());
1289 instructions().append(context.expectedSubscriptRegister->index());
1290 instructions().append(context.iterRegister->index());
1291 instructions().append(context.indexRegister->index());
1292 return dst;
1295 emitOpcode(op_get_by_val);
1296 instructions().append(dst->index());
1297 instructions().append(base->index());
1298 instructions().append(property->index());
1299 return dst;
1302 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1304 emitOpcode(op_put_by_val);
1305 instructions().append(base->index());
1306 instructions().append(property->index());
1307 instructions().append(value->index());
1308 return value;
1311 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1313 emitOpcode(op_del_by_val);
1314 instructions().append(dst->index());
1315 instructions().append(base->index());
1316 instructions().append(property->index());
1317 return dst;
1320 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1322 emitOpcode(op_put_by_index);
1323 instructions().append(base->index());
1324 instructions().append(index);
1325 instructions().append(value->index());
1326 return value;
1329 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1331 emitOpcode(op_new_object);
1332 instructions().append(dst->index());
1333 return dst;
1336 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements)
1338 Vector<RefPtr<RegisterID>, 16> argv;
1339 for (ElementNode* n = elements; n; n = n->next()) {
1340 if (n->elision())
1341 break;
1342 argv.append(newTemporary());
1343 // op_new_array requires the initial values to be a sequential range of registers
1344 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1345 emitNode(argv.last().get(), n->value());
1347 emitOpcode(op_new_array);
1348 instructions().append(dst->index());
1349 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1350 instructions().append(argv.size()); // argc
1351 return dst;
1354 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1356 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1358 emitOpcode(op_new_func);
1359 instructions().append(dst->index());
1360 instructions().append(index);
1361 return dst;
1364 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1366 emitOpcode(op_new_regexp);
1367 instructions().append(dst->index());
1368 instructions().append(addRegExp(regExp));
1369 return dst;
1373 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1375 FunctionBodyNode* function = n->body();
1376 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1378 emitOpcode(op_new_func_exp);
1379 instructions().append(r0->index());
1380 instructions().append(index);
1381 return r0;
1384 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1386 return emitCall(op_call, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1389 void BytecodeGenerator::createArgumentsIfNecessary()
1391 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
1392 emitOpcode(op_create_arguments);
1395 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1397 createArgumentsIfNecessary();
1398 return emitCall(op_call_eval, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1401 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1403 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1404 ASSERT(func->refCount());
1405 ASSERT(thisRegister->refCount());
1407 RegisterID* originalFunc = func;
1408 if (m_shouldEmitProfileHooks) {
1409 // If codegen decided to recycle func as this call's destination register,
1410 // we need to undo that optimization here so that func will still be around
1411 // for the sake of op_profile_did_call.
1412 if (dst == func) {
1413 RefPtr<RegisterID> movedThisRegister = emitMove(newTemporary(), thisRegister);
1414 RefPtr<RegisterID> movedFunc = emitMove(thisRegister, func);
1416 thisRegister = movedThisRegister.release().releaseRef();
1417 func = movedFunc.release().releaseRef();
1421 // Generate code for arguments.
1422 Vector<RefPtr<RegisterID>, 16> argv;
1423 argv.append(thisRegister);
1424 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next) {
1425 argv.append(newTemporary());
1426 // op_call requires the arguments to be a sequential range of registers
1427 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1428 emitNode(argv.last().get(), n);
1431 // Reserve space for call frame.
1432 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1433 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1434 callFrame.append(newTemporary());
1436 if (m_shouldEmitProfileHooks) {
1437 emitOpcode(op_profile_will_call);
1438 instructions().append(func->index());
1440 #if ENABLE(JIT)
1441 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index());
1442 #endif
1445 emitExpressionInfo(divot, startOffset, endOffset);
1447 #if ENABLE(JIT)
1448 m_codeBlock->addCallLinkInfo();
1449 #endif
1451 // Emit call.
1452 emitOpcode(opcodeID);
1453 instructions().append(dst->index()); // dst
1454 instructions().append(func->index()); // func
1455 instructions().append(argv.size()); // argCount
1456 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1458 if (m_shouldEmitProfileHooks) {
1459 emitOpcode(op_profile_did_call);
1460 instructions().append(func->index());
1462 if (dst == originalFunc) {
1463 thisRegister->deref();
1464 func->deref();
1468 return dst;
1471 RegisterID* BytecodeGenerator::emitLoadVarargs(RegisterID* argCountDst, RegisterID* arguments)
1473 ASSERT(argCountDst->index() < arguments->index());
1474 emitOpcode(op_load_varargs);
1475 instructions().append(argCountDst->index());
1476 instructions().append(arguments->index());
1477 return argCountDst;
1480 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* argCountRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1482 ASSERT(func->refCount());
1483 ASSERT(thisRegister->refCount());
1484 ASSERT(dst != func);
1485 if (m_shouldEmitProfileHooks) {
1486 emitOpcode(op_profile_will_call);
1487 instructions().append(func->index());
1489 #if ENABLE(JIT)
1490 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index());
1491 #endif
1494 emitExpressionInfo(divot, startOffset, endOffset);
1496 // Emit call.
1497 emitOpcode(op_call_varargs);
1498 instructions().append(dst->index()); // dst
1499 instructions().append(func->index()); // func
1500 instructions().append(argCountRegister->index()); // arg count
1501 instructions().append(thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset
1502 if (m_shouldEmitProfileHooks) {
1503 emitOpcode(op_profile_did_call);
1504 instructions().append(func->index());
1506 return dst;
1509 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1511 if (m_codeBlock->needsFullScopeChain()) {
1512 emitOpcode(op_tear_off_activation);
1513 instructions().append(m_activationRegisterIndex);
1514 } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1)
1515 emitOpcode(op_tear_off_arguments);
1517 return emitUnaryNoDstOp(op_ret, src);
1520 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1522 emitOpcode(opcodeID);
1523 instructions().append(src->index());
1524 return src;
1527 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1529 ASSERT(func->refCount());
1531 RegisterID* originalFunc = func;
1532 if (m_shouldEmitProfileHooks) {
1533 // If codegen decided to recycle func as this call's destination register,
1534 // we need to undo that optimization here so that func will still be around
1535 // for the sake of op_profile_did_call.
1536 if (dst == func) {
1537 RefPtr<RegisterID> movedFunc = emitMove(newTemporary(), func);
1538 func = movedFunc.release().releaseRef();
1542 RefPtr<RegisterID> funcProto = newTemporary();
1544 // Generate code for arguments.
1545 Vector<RefPtr<RegisterID>, 16> argv;
1546 argv.append(newTemporary()); // reserve space for "this"
1547 for (ArgumentListNode* n = argumentsNode ? argumentsNode->m_listNode : 0; n; n = n->m_next) {
1548 argv.append(newTemporary());
1549 // op_construct requires the arguments to be a sequential range of registers
1550 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1551 emitNode(argv.last().get(), n);
1554 if (m_shouldEmitProfileHooks) {
1555 emitOpcode(op_profile_will_call);
1556 instructions().append(func->index());
1559 // Load prototype.
1560 emitExpressionInfo(divot, startOffset, endOffset);
1561 emitGetByIdExceptionInfo(op_construct);
1562 emitGetById(funcProto.get(), func, globalData()->propertyNames->prototype);
1564 // Reserve space for call frame.
1565 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1566 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1567 callFrame.append(newTemporary());
1569 emitExpressionInfo(divot, startOffset, endOffset);
1571 #if ENABLE(JIT)
1572 m_codeBlock->addCallLinkInfo();
1573 #endif
1575 emitOpcode(op_construct);
1576 instructions().append(dst->index()); // dst
1577 instructions().append(func->index()); // func
1578 instructions().append(argv.size()); // argCount
1579 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1580 instructions().append(funcProto->index()); // proto
1581 instructions().append(argv[0]->index()); // thisRegister
1583 emitOpcode(op_construct_verify);
1584 instructions().append(dst->index());
1585 instructions().append(argv[0]->index());
1587 if (m_shouldEmitProfileHooks) {
1588 emitOpcode(op_profile_did_call);
1589 instructions().append(func->index());
1591 if (dst == originalFunc)
1592 func->deref();
1595 return dst;
1598 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1600 emitOpcode(op_strcat);
1601 instructions().append(dst->index());
1602 instructions().append(src->index());
1603 instructions().append(count);
1605 return dst;
1608 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1610 emitOpcode(op_to_primitive);
1611 instructions().append(dst->index());
1612 instructions().append(src->index());
1615 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1617 ASSERT(scope->isTemporary());
1618 ControlFlowContext context;
1619 context.isFinallyBlock = false;
1620 m_scopeContextStack.append(context);
1621 m_dynamicScopeDepth++;
1622 createArgumentsIfNecessary();
1624 return emitUnaryNoDstOp(op_push_scope, scope);
1627 void BytecodeGenerator::emitPopScope()
1629 ASSERT(m_scopeContextStack.size());
1630 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1632 emitOpcode(op_pop_scope);
1634 m_scopeContextStack.removeLast();
1635 m_dynamicScopeDepth--;
1638 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
1640 if (!m_shouldEmitDebugHooks)
1641 return;
1642 emitOpcode(op_debug);
1643 instructions().append(debugHookID);
1644 instructions().append(firstLine);
1645 instructions().append(lastLine);
1648 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst)
1650 ControlFlowContext scope;
1651 scope.isFinallyBlock = true;
1652 FinallyContext context = { target, retAddrDst };
1653 scope.finallyContext = context;
1654 m_scopeContextStack.append(scope);
1655 m_finallyDepth++;
1658 void BytecodeGenerator::popFinallyContext()
1660 ASSERT(m_scopeContextStack.size());
1661 ASSERT(m_scopeContextStack.last().isFinallyBlock);
1662 ASSERT(m_finallyDepth > 0);
1663 m_scopeContextStack.removeLast();
1664 m_finallyDepth--;
1667 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1669 // Reclaim free label scopes.
1671 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1672 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1673 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1674 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1675 // loop condition is a workaround.
1676 while (m_labelScopes.size()) {
1677 if (m_labelScopes.last().refCount())
1678 break;
1679 m_labelScopes.removeLast();
1682 if (!m_labelScopes.size())
1683 return 0;
1685 // We special-case the following, which is a syntax error in Firefox:
1686 // label:
1687 // break;
1688 if (name.isEmpty()) {
1689 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1690 LabelScope* scope = &m_labelScopes[i];
1691 if (scope->type() != LabelScope::NamedLabel) {
1692 ASSERT(scope->breakTarget());
1693 return scope;
1696 return 0;
1699 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1700 LabelScope* scope = &m_labelScopes[i];
1701 if (scope->name() && *scope->name() == name) {
1702 ASSERT(scope->breakTarget());
1703 return scope;
1706 return 0;
1709 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
1711 // Reclaim free label scopes.
1712 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1713 m_labelScopes.removeLast();
1715 if (!m_labelScopes.size())
1716 return 0;
1718 if (name.isEmpty()) {
1719 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1720 LabelScope* scope = &m_labelScopes[i];
1721 if (scope->type() == LabelScope::Loop) {
1722 ASSERT(scope->continueTarget());
1723 return scope;
1726 return 0;
1729 // Continue to the loop nested nearest to the label scope that matches
1730 // 'name'.
1731 LabelScope* result = 0;
1732 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1733 LabelScope* scope = &m_labelScopes[i];
1734 if (scope->type() == LabelScope::Loop) {
1735 ASSERT(scope->continueTarget());
1736 result = scope;
1738 if (scope->name() && *scope->name() == name)
1739 return result; // may be 0
1741 return 0;
1744 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
1746 while (topScope > bottomScope) {
1747 // First we count the number of dynamic scopes we need to remove to get
1748 // to a finally block.
1749 int nNormalScopes = 0;
1750 while (topScope > bottomScope) {
1751 if (topScope->isFinallyBlock)
1752 break;
1753 ++nNormalScopes;
1754 --topScope;
1757 if (nNormalScopes) {
1758 size_t begin = instructions().size();
1760 // We need to remove a number of dynamic scopes to get to the next
1761 // finally block
1762 emitOpcode(op_jmp_scopes);
1763 instructions().append(nNormalScopes);
1765 // If topScope == bottomScope then there isn't actually a finally block
1766 // left to emit, so make the jmp_scopes jump directly to the target label
1767 if (topScope == bottomScope) {
1768 instructions().append(target->bind(begin, instructions().size()));
1769 return target;
1772 // Otherwise we just use jmp_scopes to pop a group of scopes and go
1773 // to the next instruction
1774 RefPtr<Label> nextInsn = newLabel();
1775 instructions().append(nextInsn->bind(begin, instructions().size()));
1776 emitLabel(nextInsn.get());
1779 while (topScope > bottomScope && topScope->isFinallyBlock) {
1780 emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr);
1781 --topScope;
1784 return emitJump(target);
1787 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
1789 ASSERT(scopeDepth() - targetScopeDepth >= 0);
1790 ASSERT(target->isForward());
1792 size_t scopeDelta = scopeDepth() - targetScopeDepth;
1793 ASSERT(scopeDelta <= m_scopeContextStack.size());
1794 if (!scopeDelta)
1795 return emitJump(target);
1797 if (m_finallyDepth)
1798 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
1800 size_t begin = instructions().size();
1802 emitOpcode(op_jmp_scopes);
1803 instructions().append(scopeDelta);
1804 instructions().append(target->bind(begin, instructions().size()));
1805 return target;
1808 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
1810 size_t begin = instructions().size();
1812 emitOpcode(op_get_pnames);
1813 instructions().append(dst->index());
1814 instructions().append(base->index());
1815 instructions().append(i->index());
1816 instructions().append(size->index());
1817 instructions().append(breakTarget->bind(begin, instructions().size()));
1818 return dst;
1821 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
1823 size_t begin = instructions().size();
1825 emitOpcode(op_next_pname);
1826 instructions().append(dst->index());
1827 instructions().append(base->index());
1828 instructions().append(i->index());
1829 instructions().append(size->index());
1830 instructions().append(iter->index());
1831 instructions().append(target->bind(begin, instructions().size()));
1832 return dst;
1835 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
1837 #if ENABLE(JIT)
1838 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
1839 #else
1840 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
1841 #endif
1843 m_codeBlock->addExceptionHandler(info);
1844 emitOpcode(op_catch);
1845 instructions().append(targetRegister->index());
1846 return targetRegister;
1849 RegisterID* BytecodeGenerator::emitNewError(RegisterID* dst, ErrorType type, JSValue message)
1851 emitOpcode(op_new_error);
1852 instructions().append(dst->index());
1853 instructions().append(static_cast<int>(type));
1854 instructions().append(addConstantValue(message)->index());
1855 return dst;
1858 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally)
1860 size_t begin = instructions().size();
1862 emitOpcode(op_jsr);
1863 instructions().append(retAddrDst->index());
1864 instructions().append(finally->bind(begin, instructions().size()));
1865 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
1866 return finally;
1869 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc)
1871 emitOpcode(op_sret);
1872 instructions().append(retAddrSrc->index());
1875 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
1877 ControlFlowContext context;
1878 context.isFinallyBlock = false;
1879 m_scopeContextStack.append(context);
1880 m_dynamicScopeDepth++;
1882 createArgumentsIfNecessary();
1884 emitOpcode(op_push_new_scope);
1885 instructions().append(dst->index());
1886 instructions().append(addConstant(property));
1887 instructions().append(value->index());
1890 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
1892 SwitchInfo info = { instructions().size(), type };
1893 switch (type) {
1894 case SwitchInfo::SwitchImmediate:
1895 emitOpcode(op_switch_imm);
1896 break;
1897 case SwitchInfo::SwitchCharacter:
1898 emitOpcode(op_switch_char);
1899 break;
1900 case SwitchInfo::SwitchString:
1901 emitOpcode(op_switch_string);
1902 break;
1903 default:
1904 ASSERT_NOT_REACHED();
1907 instructions().append(0); // place holder for table index
1908 instructions().append(0); // place holder for default target
1909 instructions().append(scrutineeRegister->index());
1910 m_switchContextStack.append(info);
1913 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
1915 UNUSED_PARAM(max);
1916 ASSERT(node->isNumber());
1917 double value = static_cast<NumberNode*>(node)->value();
1918 int32_t key = static_cast<int32_t>(value);
1919 ASSERT(key == value);
1920 ASSERT(key >= min);
1921 ASSERT(key <= max);
1922 return key - min;
1925 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1927 jumpTable.min = min;
1928 jumpTable.branchOffsets.resize(max - min + 1);
1929 jumpTable.branchOffsets.fill(0);
1930 for (uint32_t i = 0; i < clauseCount; ++i) {
1931 // We're emitting this after the clause labels should have been fixed, so
1932 // the labels should not be "forward" references
1933 ASSERT(!labels[i]->isForward());
1934 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
1938 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
1940 UNUSED_PARAM(max);
1941 ASSERT(node->isString());
1942 UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep();
1943 ASSERT(clause->size() == 1);
1945 int32_t key = clause->data()[0];
1946 ASSERT(key >= min);
1947 ASSERT(key <= max);
1948 return key - min;
1951 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1953 jumpTable.min = min;
1954 jumpTable.branchOffsets.resize(max - min + 1);
1955 jumpTable.branchOffsets.fill(0);
1956 for (uint32_t i = 0; i < clauseCount; ++i) {
1957 // We're emitting this after the clause labels should have been fixed, so
1958 // the labels should not be "forward" references
1959 ASSERT(!labels[i]->isForward());
1960 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
1964 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
1966 for (uint32_t i = 0; i < clauseCount; ++i) {
1967 // We're emitting this after the clause labels should have been fixed, so
1968 // the labels should not be "forward" references
1969 ASSERT(!labels[i]->isForward());
1971 ASSERT(nodes[i]->isString());
1972 UString::Rep* clause = static_cast<StringNode*>(nodes[i])->value().ustring().rep();
1973 OffsetLocation location;
1974 location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
1975 jumpTable.offsetTable.add(clause, location);
1979 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
1981 SwitchInfo switchInfo = m_switchContextStack.last();
1982 m_switchContextStack.removeLast();
1983 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
1984 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
1985 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
1987 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
1988 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
1989 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
1990 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
1991 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
1993 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
1994 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
1995 } else {
1996 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
1997 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
1998 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2000 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2001 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2005 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2007 // It would be nice to do an even better job of identifying exactly where the expression is.
2008 // And we could make the caller pass the node pointer in, if there was some way of getting
2009 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2010 // is still good enough to get us an accurate line number.
2011 emitExpressionInfo(0, 0, 0);
2012 RegisterID* exception = emitNewError(newTemporary(), SyntaxError, jsString(globalData(), "Expression too deep"));
2013 emitThrow(exception);
2014 return exception;
2017 } // namespace JSC