2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BytecodeGenerator.h"
33 #include "BatchedTransitionOptimizer.h"
34 #include "PrototypeFunction.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
44 The layout of a register frame looks like this:
55 assuming (x) and (y) generated temporaries t1 and t2, you would have
57 ------------------------------------
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
59 ------------------------------------
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
61 ------------------------------------
62 | params->|<-locals | temps->
64 Because temporary registers are allocated in a stack-like fashion, we
65 can reclaim them with a simple popping algorithm. The same goes for labels.
66 (We never reclaim parameter or local registers, because parameters and
67 locals are DontDelete.)
69 The register layout before a function call looks like this:
79 > <------------------------------
80 < > reserved: call frame | 1 | <-- value held
81 > >snip< <------------------------------
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
83 > <------------------------------
84 | params->|<-locals | temps->
86 The call instruction fills in the "call frame" registers. It also pads
87 missing arguments at the end of the call:
89 > <-----------------------------------
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
91 > >snip< <-----------------------------------
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
93 > <-----------------------------------
94 | params->|<-locals | temps->
96 After filling in missing arguments, the call instruction sets up the new
97 stack frame to overlap the end of the old stack frame:
99 |----------------------------------> <
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
101 |----------------------------------> >snip< <
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
103 |----------------------------------> <
104 | | params->|<-locals | temps->
106 That way, arguments are "copied" into the callee's stack frame for free.
108 If the caller supplies too many arguments, this trick doesn't work. The
109 extra arguments protrude into space reserved for locals and temporaries.
110 In that case, the call instruction makes a real copy of the call frame header,
111 along with just the arguments expected by the callee, leaving the original
112 call frame header and arguments behind. (The call instruction can't just discard
113 extra arguments, because the "arguments" object may access them later.)
114 This copying strategy ensures that all named values will be at the indices
115 expected by the callee.
119 static bool s_dumpsGeneratedCode
= false;
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode
)
125 s_dumpsGeneratedCode
= dumpsGeneratedCode
;
127 UNUSED_PARAM(dumpsGeneratedCode
);
131 bool BytecodeGenerator::dumpsGeneratedCode()
134 return s_dumpsGeneratedCode
;
140 void BytecodeGenerator::generate()
142 m_codeBlock
->setThisRegister(m_thisRegister
.index());
144 m_scopeNode
->emitBytecode(*this);
147 m_codeBlock
->setInstructionCount(m_codeBlock
->instructions().size());
149 if (s_dumpsGeneratedCode
)
150 m_codeBlock
->dump(m_scopeChain
->globalObject()->globalExec());
153 if ((m_codeType
== FunctionCode
&& !m_codeBlock
->needsFullScopeChain() && !m_codeBlock
->usesArguments()) || m_codeType
== EvalCode
)
154 symbolTable().clear();
156 m_codeBlock
->setIsNumericCompareFunction(instructions() == m_globalData
->numericCompareFunction(m_scopeChain
->globalObject()->globalExec()));
158 #if !ENABLE(OPCODE_SAMPLING)
159 if (!m_regeneratingForExceptionInfo
&& (m_codeType
== FunctionCode
|| m_codeType
== EvalCode
))
160 m_codeBlock
->clearExceptionInfo();
163 m_codeBlock
->shrinkToFit();
166 bool BytecodeGenerator::addVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
168 int index
= m_calleeRegisters
.size();
169 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
170 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.ustring().rep(), newEntry
);
172 if (!result
.second
) {
173 r0
= ®isterFor(result
.first
->second
.getIndex());
177 ++m_codeBlock
->m_numVars
;
182 bool BytecodeGenerator::addGlobalVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
184 int index
= m_nextGlobalIndex
;
185 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
186 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.ustring().rep(), newEntry
);
189 index
= result
.first
->second
.getIndex();
192 m_globals
.append(index
+ m_globalVarStorageOffset
);
195 r0
= ®isterFor(index
);
196 return result
.second
;
199 void BytecodeGenerator::preserveLastVar()
201 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
202 m_lastVar
= &m_calleeRegisters
.last();
205 BytecodeGenerator::BytecodeGenerator(ProgramNode
* programNode
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, ProgramCodeBlock
* codeBlock
)
206 : m_shouldEmitDebugHooks(!!debugger
)
207 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
208 , m_scopeChain(&scopeChain
)
209 , m_symbolTable(symbolTable
)
210 , m_scopeNode(programNode
)
211 , m_codeBlock(codeBlock
)
212 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
214 , m_dynamicScopeDepth(0)
215 , m_baseScopeDepth(0)
216 , m_codeType(GlobalCode
)
217 , m_nextGlobalIndex(-1)
218 , m_nextConstantOffset(0)
219 , m_globalConstantIndex(0)
220 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
221 , m_lastOpcodeID(op_end
)
223 , m_regeneratingForExceptionInfo(false)
224 , m_codeBlockBeingRegeneratedFrom(0)
226 if (m_shouldEmitDebugHooks
)
227 m_codeBlock
->setNeedsFullScopeChain(true);
229 emitOpcode(op_enter
);
230 codeBlock
->setGlobalData(m_globalData
);
232 // FIXME: Move code that modifies the global object to Interpreter::execute.
234 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
236 JSGlobalObject
* globalObject
= scopeChain
.globalObject();
237 ExecState
* exec
= globalObject
->globalExec();
238 RegisterFile
* registerFile
= &exec
->globalData().interpreter
->registerFile();
240 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
241 m_globalVarStorageOffset
= -RegisterFile::CallFrameHeaderSize
- m_codeBlock
->m_numParameters
- registerFile
->size();
243 // Add previously defined symbols to bookkeeping.
244 m_globals
.grow(symbolTable
->size());
245 SymbolTable::iterator end
= symbolTable
->end();
246 for (SymbolTable::iterator it
= symbolTable
->begin(); it
!= end
; ++it
)
247 registerFor(it
->second
.getIndex()).setIndex(it
->second
.getIndex() + m_globalVarStorageOffset
);
249 BatchedTransitionOptimizer
optimizer(globalObject
);
251 const VarStack
& varStack
= programNode
->varStack();
252 const FunctionStack
& functionStack
= programNode
->functionStack();
253 bool canOptimizeNewGlobals
= symbolTable
->size() + functionStack
.size() + varStack
.size() < registerFile
->maxGlobals();
254 if (canOptimizeNewGlobals
) {
255 // Shift new symbols so they get stored prior to existing symbols.
256 m_nextGlobalIndex
-= symbolTable
->size();
258 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
259 FunctionBodyNode
* function
= functionStack
[i
];
260 globalObject
->removeDirect(function
->ident()); // Make sure our new function is not shadowed by an old property.
261 emitNewFunction(addGlobalVar(function
->ident(), false), function
);
264 Vector
<RegisterID
*, 32> newVars
;
265 for (size_t i
= 0; i
< varStack
.size(); ++i
)
266 if (!globalObject
->hasProperty(exec
, *varStack
[i
].first
))
267 newVars
.append(addGlobalVar(*varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
));
271 for (size_t i
= 0; i
< newVars
.size(); ++i
)
272 emitLoad(newVars
[i
], jsUndefined());
274 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
275 FunctionBodyNode
* function
= functionStack
[i
];
276 globalObject
->putWithAttributes(exec
, function
->ident(), new (exec
) JSFunction(exec
, makeFunction(exec
, function
), scopeChain
.node()), DontDelete
);
278 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
279 if (globalObject
->hasProperty(exec
, *varStack
[i
].first
))
281 int attributes
= DontDelete
;
282 if (varStack
[i
].second
& DeclarationStacks::IsConstant
)
283 attributes
|= ReadOnly
;
284 globalObject
->putWithAttributes(exec
, *varStack
[i
].first
, jsUndefined(), attributes
);
291 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode
* functionBody
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, CodeBlock
* codeBlock
)
292 : m_shouldEmitDebugHooks(!!debugger
)
293 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
294 , m_scopeChain(&scopeChain
)
295 , m_symbolTable(symbolTable
)
296 , m_scopeNode(functionBody
)
297 , m_codeBlock(codeBlock
)
299 , m_dynamicScopeDepth(0)
300 , m_baseScopeDepth(0)
301 , m_codeType(FunctionCode
)
302 , m_nextConstantOffset(0)
303 , m_globalConstantIndex(0)
304 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
305 , m_lastOpcodeID(op_end
)
307 , m_regeneratingForExceptionInfo(false)
308 , m_codeBlockBeingRegeneratedFrom(0)
310 if (m_shouldEmitDebugHooks
)
311 m_codeBlock
->setNeedsFullScopeChain(true);
313 codeBlock
->setGlobalData(m_globalData
);
315 bool usesArguments
= functionBody
->usesArguments();
316 codeBlock
->setUsesArguments(usesArguments
);
318 m_argumentsRegister
.setIndex(RegisterFile::OptionalCalleeArguments
);
319 addVar(propertyNames().arguments
, false);
322 if (m_codeBlock
->needsFullScopeChain()) {
323 ++m_codeBlock
->m_numVars
;
324 m_activationRegisterIndex
= newRegister()->index();
325 emitOpcode(op_enter_with_activation
);
326 instructions().append(m_activationRegisterIndex
);
328 emitOpcode(op_enter
);
331 emitOpcode(op_init_arguments
);
333 // The debugger currently retrieves the arguments object from an activation rather than pulling
334 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
335 // but for now we force eager creation of the arguments object when debugging.
336 if (m_shouldEmitDebugHooks
)
337 emitOpcode(op_create_arguments
);
340 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
341 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
342 FunctionBodyNode
* function
= functionStack
[i
];
343 const Identifier
& ident
= function
->ident();
344 m_functions
.add(ident
.ustring().rep());
345 emitNewFunction(addVar(ident
, false), function
);
348 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
349 for (size_t i
= 0; i
< varStack
.size(); ++i
)
350 addVar(*varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
352 FunctionParameters
& parameters
= *functionBody
->parameters();
353 size_t parameterCount
= parameters
.size();
354 m_nextParameterIndex
= -RegisterFile::CallFrameHeaderSize
- parameterCount
- 1;
355 m_parameters
.grow(1 + parameterCount
); // reserve space for "this"
357 // Add "this" as a parameter
358 m_thisRegister
.setIndex(m_nextParameterIndex
);
359 ++m_nextParameterIndex
;
360 ++m_codeBlock
->m_numParameters
;
362 if (functionBody
->usesThis() || m_shouldEmitDebugHooks
) {
363 emitOpcode(op_convert_this
);
364 instructions().append(m_thisRegister
.index());
367 for (size_t i
= 0; i
< parameterCount
; ++i
)
368 addParameter(parameters
[i
]);
373 BytecodeGenerator::BytecodeGenerator(EvalNode
* evalNode
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, EvalCodeBlock
* codeBlock
)
374 : m_shouldEmitDebugHooks(!!debugger
)
375 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
376 , m_scopeChain(&scopeChain
)
377 , m_symbolTable(symbolTable
)
378 , m_scopeNode(evalNode
)
379 , m_codeBlock(codeBlock
)
380 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
382 , m_dynamicScopeDepth(0)
383 , m_baseScopeDepth(codeBlock
->baseScopeDepth())
384 , m_codeType(EvalCode
)
385 , m_nextConstantOffset(0)
386 , m_globalConstantIndex(0)
387 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
388 , m_lastOpcodeID(op_end
)
390 , m_regeneratingForExceptionInfo(false)
391 , m_codeBlockBeingRegeneratedFrom(0)
393 if (m_shouldEmitDebugHooks
|| m_baseScopeDepth
)
394 m_codeBlock
->setNeedsFullScopeChain(true);
396 emitOpcode(op_enter
);
397 codeBlock
->setGlobalData(m_globalData
);
398 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
400 const DeclarationStacks::FunctionStack
& functionStack
= evalNode
->functionStack();
401 for (size_t i
= 0; i
< functionStack
.size(); ++i
)
402 m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, functionStack
[i
]));
404 const DeclarationStacks::VarStack
& varStack
= evalNode
->varStack();
405 unsigned numVariables
= varStack
.size();
406 Vector
<Identifier
> variables
;
407 variables
.reserveCapacity(numVariables
);
408 for (size_t i
= 0; i
< numVariables
; ++i
)
409 variables
.append(*varStack
[i
].first
);
410 codeBlock
->adoptVariables(variables
);
415 RegisterID
* BytecodeGenerator::addParameter(const Identifier
& ident
)
417 // Parameters overwrite var declarations, but not function declarations.
418 RegisterID
* result
= 0;
419 UString::Rep
* rep
= ident
.ustring().rep();
420 if (!m_functions
.contains(rep
)) {
421 symbolTable().set(rep
, m_nextParameterIndex
);
422 RegisterID
& parameter
= registerFor(m_nextParameterIndex
);
423 parameter
.setIndex(m_nextParameterIndex
);
427 // To maintain the calling convention, we have to allocate unique space for
428 // each parameter, even if the parameter doesn't make it into the symbol table.
429 ++m_nextParameterIndex
;
430 ++m_codeBlock
->m_numParameters
;
434 RegisterID
* BytecodeGenerator::registerFor(const Identifier
& ident
)
436 if (ident
== propertyNames().thisIdentifier
)
437 return &m_thisRegister
;
439 if (!shouldOptimizeLocals())
442 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
446 if (ident
== propertyNames().arguments
)
447 createArgumentsIfNecessary();
449 return ®isterFor(entry
.getIndex());
452 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
454 if (ident
!= propertyNames().arguments
)
457 if (!shouldOptimizeLocals())
460 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
464 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
470 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
472 ASSERT(willResolveToArguments(propertyNames().arguments
));
474 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.ustring().rep());
475 ASSERT(!entry
.isNull());
476 return ®isterFor(entry
.getIndex());
479 RegisterID
* BytecodeGenerator::constRegisterFor(const Identifier
& ident
)
481 if (m_codeType
== EvalCode
)
484 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
488 return ®isterFor(entry
.getIndex());
491 bool BytecodeGenerator::isLocal(const Identifier
& ident
)
493 if (ident
== propertyNames().thisIdentifier
)
496 return shouldOptimizeLocals() && symbolTable().contains(ident
.ustring().rep());
499 bool BytecodeGenerator::isLocalConstant(const Identifier
& ident
)
501 return symbolTable().get(ident
.ustring().rep()).isReadOnly();
504 RegisterID
* BytecodeGenerator::newRegister()
506 m_calleeRegisters
.append(m_calleeRegisters
.size());
507 m_codeBlock
->m_numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
508 return &m_calleeRegisters
.last();
511 RegisterID
* BytecodeGenerator::newTemporary()
513 // Reclaim free register IDs.
514 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
515 m_calleeRegisters
.removeLast();
517 RegisterID
* result
= newRegister();
518 result
->setTemporary();
522 RegisterID
* BytecodeGenerator::highestUsedRegister()
524 size_t count
= m_codeBlock
->m_numCalleeRegisters
;
525 while (m_calleeRegisters
.size() < count
)
527 return &m_calleeRegisters
.last();
530 PassRefPtr
<LabelScope
> BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
532 // Reclaim free label scopes.
533 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
534 m_labelScopes
.removeLast();
536 // Allocate new label scope.
537 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : PassRefPtr
<Label
>()); // Only loops have continue targets.
538 m_labelScopes
.append(scope
);
539 return &m_labelScopes
.last();
542 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
544 // Reclaim free label IDs.
545 while (m_labels
.size() && !m_labels
.last().refCount())
546 m_labels
.removeLast();
548 // Allocate new label ID.
549 m_labels
.append(m_codeBlock
);
550 return &m_labels
.last();
553 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
555 unsigned newLabelIndex
= instructions().size();
556 l0
->setLocation(newLabelIndex
);
558 if (m_codeBlock
->numberOfJumpTargets()) {
559 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
560 ASSERT(lastLabelIndex
<= newLabelIndex
);
561 if (newLabelIndex
== lastLabelIndex
) {
562 // Peephole optimizations have already been disabled by emitting the last label
567 m_codeBlock
->addJumpTarget(newLabelIndex
);
569 // This disables peephole optimizations when an instruction is a jump target
570 m_lastOpcodeID
= op_end
;
574 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
576 instructions().append(globalData()->interpreter
->getOpcode(opcodeID
));
577 m_lastOpcodeID
= opcodeID
;
580 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
582 ASSERT(instructions().size() >= 4);
583 size_t size
= instructions().size();
584 dstIndex
= instructions().at(size
- 3).u
.operand
;
585 src1Index
= instructions().at(size
- 2).u
.operand
;
586 src2Index
= instructions().at(size
- 1).u
.operand
;
589 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
591 ASSERT(instructions().size() >= 3);
592 size_t size
= instructions().size();
593 dstIndex
= instructions().at(size
- 2).u
.operand
;
594 srcIndex
= instructions().at(size
- 1).u
.operand
;
597 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
599 ASSERT(instructions().size() >= 4);
600 instructions().shrink(instructions().size() - 4);
603 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
605 ASSERT(instructions().size() >= 3);
606 instructions().shrink(instructions().size() - 3);
609 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
611 size_t begin
= instructions().size();
612 emitOpcode(target
->isForward() ? op_jmp
: op_loop
);
613 instructions().append(target
->bind(begin
, instructions().size()));
617 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
619 if (m_lastOpcodeID
== op_less
) {
624 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
626 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
629 size_t begin
= instructions().size();
630 emitOpcode(target
->isForward() ? op_jless
: op_loop_if_less
);
631 instructions().append(src1Index
);
632 instructions().append(src2Index
);
633 instructions().append(target
->bind(begin
, instructions().size()));
636 } else if (m_lastOpcodeID
== op_lesseq
) {
641 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
643 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
646 size_t begin
= instructions().size();
647 emitOpcode(target
->isForward() ? op_jlesseq
: op_loop_if_lesseq
);
648 instructions().append(src1Index
);
649 instructions().append(src2Index
);
650 instructions().append(target
->bind(begin
, instructions().size()));
653 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
657 retrieveLastUnaryOp(dstIndex
, srcIndex
);
659 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
662 size_t begin
= instructions().size();
663 emitOpcode(op_jeq_null
);
664 instructions().append(srcIndex
);
665 instructions().append(target
->bind(begin
, instructions().size()));
668 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
672 retrieveLastUnaryOp(dstIndex
, srcIndex
);
674 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
677 size_t begin
= instructions().size();
678 emitOpcode(op_jneq_null
);
679 instructions().append(srcIndex
);
680 instructions().append(target
->bind(begin
, instructions().size()));
685 size_t begin
= instructions().size();
687 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
688 instructions().append(cond
->index());
689 instructions().append(target
->bind(begin
, instructions().size()));
693 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
695 if (m_lastOpcodeID
== op_less
&& target
->isForward()) {
700 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
702 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
705 size_t begin
= instructions().size();
706 emitOpcode(op_jnless
);
707 instructions().append(src1Index
);
708 instructions().append(src2Index
);
709 instructions().append(target
->bind(begin
, instructions().size()));
712 } else if (m_lastOpcodeID
== op_lesseq
&& target
->isForward()) {
717 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
719 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
722 size_t begin
= instructions().size();
723 emitOpcode(op_jnlesseq
);
724 instructions().append(src1Index
);
725 instructions().append(src2Index
);
726 instructions().append(target
->bind(begin
, instructions().size()));
729 } else if (m_lastOpcodeID
== op_not
) {
733 retrieveLastUnaryOp(dstIndex
, srcIndex
);
735 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
738 size_t begin
= instructions().size();
739 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
740 instructions().append(srcIndex
);
741 instructions().append(target
->bind(begin
, instructions().size()));
744 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
748 retrieveLastUnaryOp(dstIndex
, srcIndex
);
750 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
753 size_t begin
= instructions().size();
754 emitOpcode(op_jneq_null
);
755 instructions().append(srcIndex
);
756 instructions().append(target
->bind(begin
, instructions().size()));
759 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
763 retrieveLastUnaryOp(dstIndex
, srcIndex
);
765 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
768 size_t begin
= instructions().size();
769 emitOpcode(op_jeq_null
);
770 instructions().append(srcIndex
);
771 instructions().append(target
->bind(begin
, instructions().size()));
776 size_t begin
= instructions().size();
777 emitOpcode(target
->isForward() ? op_jfalse
: op_loop_if_false
);
778 instructions().append(cond
->index());
779 instructions().append(target
->bind(begin
, instructions().size()));
783 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
785 size_t begin
= instructions().size();
787 emitOpcode(op_jneq_ptr
);
788 instructions().append(cond
->index());
789 instructions().append(m_scopeChain
->globalObject()->d()->callFunction
);
790 instructions().append(target
->bind(begin
, instructions().size()));
794 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
796 size_t begin
= instructions().size();
798 emitOpcode(op_jneq_ptr
);
799 instructions().append(cond
->index());
800 instructions().append(m_scopeChain
->globalObject()->d()->applyFunction
);
801 instructions().append(target
->bind(begin
, instructions().size()));
805 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
807 UString::Rep
* rep
= ident
.ustring().rep();
808 pair
<IdentifierMap::iterator
, bool> result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
809 if (result
.second
) // new entry
810 m_codeBlock
->addIdentifier(Identifier(m_globalData
, rep
));
812 return result
.first
->second
;
815 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
817 int index
= m_nextConstantOffset
;
819 pair
<JSValueMap::iterator
, bool> result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
821 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
822 ++m_nextConstantOffset
;
823 m_codeBlock
->addConstantRegister(JSValue(v
));
825 index
= result
.first
->second
;
827 return &m_constantPoolRegisters
[index
];
830 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
832 return m_codeBlock
->addRegExp(r
);
835 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
838 instructions().append(dst
->index());
839 instructions().append(src
->index());
843 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
845 emitOpcode(opcodeID
);
846 instructions().append(dst
->index());
847 instructions().append(src
->index());
851 RegisterID
* BytecodeGenerator::emitPreInc(RegisterID
* srcDst
)
853 emitOpcode(op_pre_inc
);
854 instructions().append(srcDst
->index());
858 RegisterID
* BytecodeGenerator::emitPreDec(RegisterID
* srcDst
)
860 emitOpcode(op_pre_dec
);
861 instructions().append(srcDst
->index());
865 RegisterID
* BytecodeGenerator::emitPostInc(RegisterID
* dst
, RegisterID
* srcDst
)
867 emitOpcode(op_post_inc
);
868 instructions().append(dst
->index());
869 instructions().append(srcDst
->index());
873 RegisterID
* BytecodeGenerator::emitPostDec(RegisterID
* dst
, RegisterID
* srcDst
)
875 emitOpcode(op_post_dec
);
876 instructions().append(dst
->index());
877 instructions().append(srcDst
->index());
881 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
883 emitOpcode(opcodeID
);
884 instructions().append(dst
->index());
885 instructions().append(src1
->index());
886 instructions().append(src2
->index());
888 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
889 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
890 instructions().append(types
.toInt());
895 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
897 if (m_lastOpcodeID
== op_typeof
) {
901 retrieveLastUnaryOp(dstIndex
, srcIndex
);
903 if (src1
->index() == dstIndex
904 && src1
->isTemporary()
905 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
906 && m_codeBlock
->constantRegister(src2
->index()).jsValue().isString()) {
907 const UString
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).jsValue())->tryGetValue();
908 if (value
== "undefined") {
910 emitOpcode(op_is_undefined
);
911 instructions().append(dst
->index());
912 instructions().append(srcIndex
);
915 if (value
== "boolean") {
917 emitOpcode(op_is_boolean
);
918 instructions().append(dst
->index());
919 instructions().append(srcIndex
);
922 if (value
== "number") {
924 emitOpcode(op_is_number
);
925 instructions().append(dst
->index());
926 instructions().append(srcIndex
);
929 if (value
== "string") {
931 emitOpcode(op_is_string
);
932 instructions().append(dst
->index());
933 instructions().append(srcIndex
);
936 if (value
== "object") {
938 emitOpcode(op_is_object
);
939 instructions().append(dst
->index());
940 instructions().append(srcIndex
);
943 if (value
== "function") {
945 emitOpcode(op_is_function
);
946 instructions().append(dst
->index());
947 instructions().append(srcIndex
);
953 emitOpcode(opcodeID
);
954 instructions().append(dst
->index());
955 instructions().append(src1
->index());
956 instructions().append(src2
->index());
960 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
962 return emitLoad(dst
, jsBoolean(b
));
965 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
967 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time.
968 // Later we can do the extra work to handle that like the other cases.
969 if (number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
970 return emitLoad(dst
, jsNumber(globalData(), number
));
971 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).first
->second
;
973 valueInMap
= jsNumber(globalData(), number
);
974 return emitLoad(dst
, valueInMap
);
977 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
979 JSString
*& stringInMap
= m_stringMap
.add(identifier
.ustring().rep(), 0).first
->second
;
981 stringInMap
= jsOwnedString(globalData(), identifier
.ustring());
982 return emitLoad(dst
, JSValue(stringInMap
));
985 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
987 RegisterID
* constantID
= addConstantValue(v
);
989 return emitMove(dst
, constantID
);
993 bool BytecodeGenerator::findScopedProperty(const Identifier
& property
, int& index
, size_t& stackDepth
, bool forWriting
, bool& requiresDynamicChecks
, JSObject
*& globalObject
)
995 // Cases where we cannot statically optimize the lookup.
996 if (property
== propertyNames().arguments
|| !canOptimizeNonLocals()) {
998 index
= missingSymbolMarker();
1000 if (shouldOptimizeLocals() && m_codeType
== GlobalCode
) {
1001 ScopeChainIterator iter
= m_scopeChain
->begin();
1002 globalObject
= *iter
;
1003 ASSERT((++iter
) == m_scopeChain
->end());
1009 requiresDynamicChecks
= false;
1010 ScopeChainIterator iter
= m_scopeChain
->begin();
1011 ScopeChainIterator end
= m_scopeChain
->end();
1012 for (; iter
!= end
; ++iter
, ++depth
) {
1013 JSObject
* currentScope
= *iter
;
1014 if (!currentScope
->isVariableObject())
1016 JSVariableObject
* currentVariableObject
= static_cast<JSVariableObject
*>(currentScope
);
1017 SymbolTableEntry entry
= currentVariableObject
->symbolTable().get(property
.ustring().rep());
1019 // Found the property
1020 if (!entry
.isNull()) {
1021 if (entry
.isReadOnly() && forWriting
) {
1023 index
= missingSymbolMarker();
1025 globalObject
= currentVariableObject
;
1029 index
= entry
.getIndex();
1031 globalObject
= currentVariableObject
;
1034 bool scopeRequiresDynamicChecks
= false;
1035 if (currentVariableObject
->isDynamicScope(scopeRequiresDynamicChecks
))
1037 requiresDynamicChecks
|= scopeRequiresDynamicChecks
;
1039 // Can't locate the property but we're able to avoid a few lookups.
1041 index
= missingSymbolMarker();
1042 JSObject
* scope
= *iter
;
1044 globalObject
= scope
;
1048 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, RegisterID
* basePrototype
)
1050 emitOpcode(op_instanceof
);
1051 instructions().append(dst
->index());
1052 instructions().append(value
->index());
1053 instructions().append(base
->index());
1054 instructions().append(basePrototype
->index());
1058 RegisterID
* BytecodeGenerator::emitResolve(RegisterID
* dst
, const Identifier
& property
)
1062 JSObject
* globalObject
= 0;
1063 bool requiresDynamicChecks
= false;
1064 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) && !globalObject
) {
1065 // We can't optimise at all :-(
1066 emitOpcode(op_resolve
);
1067 instructions().append(dst
->index());
1068 instructions().append(addConstant(property
));
1073 bool forceGlobalResolve
= false;
1074 if (m_regeneratingForExceptionInfo
) {
1076 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1078 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1082 if (index
!= missingSymbolMarker() && !forceGlobalResolve
&& !requiresDynamicChecks
) {
1083 // Directly index the property lookup across multiple scopes.
1084 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1088 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1090 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1092 emitOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1093 instructions().append(dst
->index());
1094 instructions().append(globalObject
);
1095 instructions().append(addConstant(property
));
1096 instructions().append(0);
1097 instructions().append(0);
1098 if (requiresDynamicChecks
)
1099 instructions().append(depth
);
1103 if (requiresDynamicChecks
) {
1104 // If we get here we have eval nested inside a |with| just give up
1105 emitOpcode(op_resolve
);
1106 instructions().append(dst
->index());
1107 instructions().append(addConstant(property
));
1111 if (index
!= missingSymbolMarker()) {
1112 // Directly index the property lookup across multiple scopes.
1113 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1116 // In this case we are at least able to drop a few scope chains from the
1117 // lookup chain, although we still need to hash from then on.
1118 emitOpcode(op_resolve_skip
);
1119 instructions().append(dst
->index());
1120 instructions().append(addConstant(property
));
1121 instructions().append(depth
);
1125 RegisterID
* BytecodeGenerator::emitGetScopedVar(RegisterID
* dst
, size_t depth
, int index
, JSValue globalObject
)
1128 emitOpcode(op_get_global_var
);
1129 instructions().append(dst
->index());
1130 instructions().append(asCell(globalObject
));
1131 instructions().append(index
);
1135 emitOpcode(op_get_scoped_var
);
1136 instructions().append(dst
->index());
1137 instructions().append(index
);
1138 instructions().append(depth
);
1142 RegisterID
* BytecodeGenerator::emitPutScopedVar(size_t depth
, int index
, RegisterID
* value
, JSValue globalObject
)
1145 emitOpcode(op_put_global_var
);
1146 instructions().append(asCell(globalObject
));
1147 instructions().append(index
);
1148 instructions().append(value
->index());
1151 emitOpcode(op_put_scoped_var
);
1152 instructions().append(index
);
1153 instructions().append(depth
);
1154 instructions().append(value
->index());
1158 RegisterID
* BytecodeGenerator::emitResolveBase(RegisterID
* dst
, const Identifier
& property
)
1162 JSObject
* globalObject
= 0;
1163 bool requiresDynamicChecks
= false;
1164 findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
);
1165 if (!globalObject
|| requiresDynamicChecks
) {
1166 // We can't optimise at all :-(
1167 emitOpcode(op_resolve_base
);
1168 instructions().append(dst
->index());
1169 instructions().append(addConstant(property
));
1173 // Global object is the base
1174 return emitLoad(dst
, JSValue(globalObject
));
1177 RegisterID
* BytecodeGenerator::emitResolveWithBase(RegisterID
* baseDst
, RegisterID
* propDst
, const Identifier
& property
)
1181 JSObject
* globalObject
= 0;
1182 bool requiresDynamicChecks
= false;
1183 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) || !globalObject
|| requiresDynamicChecks
) {
1184 // We can't optimise at all :-(
1185 emitOpcode(op_resolve_with_base
);
1186 instructions().append(baseDst
->index());
1187 instructions().append(propDst
->index());
1188 instructions().append(addConstant(property
));
1192 bool forceGlobalResolve
= false;
1193 if (m_regeneratingForExceptionInfo
) {
1195 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1197 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1201 // Global object is the base
1202 emitLoad(baseDst
, JSValue(globalObject
));
1204 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1205 // Directly index the property lookup across multiple scopes.
1206 emitGetScopedVar(propDst
, depth
, index
, globalObject
);
1211 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1213 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1215 emitOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1216 instructions().append(propDst
->index());
1217 instructions().append(globalObject
);
1218 instructions().append(addConstant(property
));
1219 instructions().append(0);
1220 instructions().append(0);
1221 if (requiresDynamicChecks
)
1222 instructions().append(depth
);
1226 void BytecodeGenerator::emitMethodCheck()
1228 emitOpcode(op_method_check
);
1231 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1234 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_get_by_id
));
1236 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1239 emitOpcode(op_get_by_id
);
1240 instructions().append(dst
->index());
1241 instructions().append(base
->index());
1242 instructions().append(addConstant(property
));
1243 instructions().append(0);
1244 instructions().append(0);
1245 instructions().append(0);
1246 instructions().append(0);
1250 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1253 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_put_by_id
));
1255 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1258 emitOpcode(op_put_by_id
);
1259 instructions().append(base
->index());
1260 instructions().append(addConstant(property
));
1261 instructions().append(value
->index());
1262 instructions().append(0);
1263 instructions().append(0);
1264 instructions().append(0);
1265 instructions().append(0);
1266 instructions().append(0);
1270 RegisterID
* BytecodeGenerator::emitDirectPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1273 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_put_by_id
));
1275 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1278 emitOpcode(op_put_by_id
);
1279 instructions().append(base
->index());
1280 instructions().append(addConstant(property
));
1281 instructions().append(value
->index());
1282 instructions().append(0);
1283 instructions().append(0);
1284 instructions().append(0);
1285 instructions().append(0);
1286 instructions().append(property
!= m_globalData
->propertyNames
->underscoreProto
);
1290 RegisterID
* BytecodeGenerator::emitPutGetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1292 emitOpcode(op_put_getter
);
1293 instructions().append(base
->index());
1294 instructions().append(addConstant(property
));
1295 instructions().append(value
->index());
1299 RegisterID
* BytecodeGenerator::emitPutSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1301 emitOpcode(op_put_setter
);
1302 instructions().append(base
->index());
1303 instructions().append(addConstant(property
));
1304 instructions().append(value
->index());
1308 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1310 emitOpcode(op_del_by_id
);
1311 instructions().append(dst
->index());
1312 instructions().append(base
->index());
1313 instructions().append(addConstant(property
));
1317 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1319 for (size_t i
= m_forInContextStack
.size(); i
> 0; i
--) {
1320 ForInContext
& context
= m_forInContextStack
[i
- 1];
1321 if (context
.propertyRegister
== property
) {
1322 emitOpcode(op_get_by_pname
);
1323 instructions().append(dst
->index());
1324 instructions().append(base
->index());
1325 instructions().append(property
->index());
1326 instructions().append(context
.expectedSubscriptRegister
->index());
1327 instructions().append(context
.iterRegister
->index());
1328 instructions().append(context
.indexRegister
->index());
1332 emitOpcode(op_get_by_val
);
1333 instructions().append(dst
->index());
1334 instructions().append(base
->index());
1335 instructions().append(property
->index());
1339 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1341 emitOpcode(op_put_by_val
);
1342 instructions().append(base
->index());
1343 instructions().append(property
->index());
1344 instructions().append(value
->index());
1348 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1350 emitOpcode(op_del_by_val
);
1351 instructions().append(dst
->index());
1352 instructions().append(base
->index());
1353 instructions().append(property
->index());
1357 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1359 emitOpcode(op_put_by_index
);
1360 instructions().append(base
->index());
1361 instructions().append(index
);
1362 instructions().append(value
->index());
1366 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1368 emitOpcode(op_new_object
);
1369 instructions().append(dst
->index());
1373 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
)
1375 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1376 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1379 argv
.append(newTemporary());
1380 // op_new_array requires the initial values to be a sequential range of registers
1381 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1382 emitNode(argv
.last().get(), n
->value());
1384 emitOpcode(op_new_array
);
1385 instructions().append(dst
->index());
1386 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1387 instructions().append(argv
.size()); // argc
1391 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1393 unsigned index
= m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, function
));
1395 emitOpcode(op_new_func
);
1396 instructions().append(dst
->index());
1397 instructions().append(index
);
1401 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1403 emitOpcode(op_new_regexp
);
1404 instructions().append(dst
->index());
1405 instructions().append(addRegExp(regExp
));
1410 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1412 FunctionBodyNode
* function
= n
->body();
1413 unsigned index
= m_codeBlock
->addFunctionExpr(makeFunction(m_globalData
, function
));
1415 emitOpcode(op_new_func_exp
);
1416 instructions().append(r0
->index());
1417 instructions().append(index
);
1421 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1423 return emitCall(op_call
, dst
, func
, thisRegister
, argumentsNode
, divot
, startOffset
, endOffset
);
1426 void BytecodeGenerator::createArgumentsIfNecessary()
1428 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
1429 emitOpcode(op_create_arguments
);
1432 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1434 createArgumentsIfNecessary();
1435 return emitCall(op_call_eval
, dst
, func
, thisRegister
, argumentsNode
, divot
, startOffset
, endOffset
);
1438 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1440 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1441 ASSERT(func
->refCount());
1442 ASSERT(thisRegister
->refCount());
1444 RegisterID
* originalFunc
= func
;
1445 if (m_shouldEmitProfileHooks
) {
1446 // If codegen decided to recycle func as this call's destination register,
1447 // we need to undo that optimization here so that func will still be around
1448 // for the sake of op_profile_did_call.
1450 RefPtr
<RegisterID
> movedThisRegister
= emitMove(newTemporary(), thisRegister
);
1451 RefPtr
<RegisterID
> movedFunc
= emitMove(thisRegister
, func
);
1453 thisRegister
= movedThisRegister
.release().releaseRef();
1454 func
= movedFunc
.release().releaseRef();
1458 // Generate code for arguments.
1459 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1460 argv
.append(thisRegister
);
1461 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
) {
1462 argv
.append(newTemporary());
1463 // op_call requires the arguments to be a sequential range of registers
1464 ASSERT(argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1465 emitNode(argv
.last().get(), n
);
1468 // Reserve space for call frame.
1469 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1470 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1471 callFrame
.append(newTemporary());
1473 if (m_shouldEmitProfileHooks
) {
1474 emitOpcode(op_profile_will_call
);
1475 instructions().append(func
->index());
1478 m_codeBlock
->addFunctionRegisterInfo(instructions().size(), func
->index());
1482 emitExpressionInfo(divot
, startOffset
, endOffset
);
1485 m_codeBlock
->addCallLinkInfo();
1489 emitOpcode(opcodeID
);
1490 instructions().append(dst
->index()); // dst
1491 instructions().append(func
->index()); // func
1492 instructions().append(argv
.size()); // argCount
1493 instructions().append(argv
[0]->index() + argv
.size() + RegisterFile::CallFrameHeaderSize
); // registerOffset
1495 if (m_shouldEmitProfileHooks
) {
1496 emitOpcode(op_profile_did_call
);
1497 instructions().append(func
->index());
1499 if (dst
== originalFunc
) {
1500 thisRegister
->deref();
1508 RegisterID
* BytecodeGenerator::emitLoadVarargs(RegisterID
* argCountDst
, RegisterID
* arguments
)
1510 ASSERT(argCountDst
->index() < arguments
->index());
1511 emitOpcode(op_load_varargs
);
1512 instructions().append(argCountDst
->index());
1513 instructions().append(arguments
->index());
1517 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* argCountRegister
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1519 ASSERT(func
->refCount());
1520 ASSERT(thisRegister
->refCount());
1521 ASSERT(dst
!= func
);
1522 if (m_shouldEmitProfileHooks
) {
1523 emitOpcode(op_profile_will_call
);
1524 instructions().append(func
->index());
1527 m_codeBlock
->addFunctionRegisterInfo(instructions().size(), func
->index());
1531 emitExpressionInfo(divot
, startOffset
, endOffset
);
1534 emitOpcode(op_call_varargs
);
1535 instructions().append(dst
->index()); // dst
1536 instructions().append(func
->index()); // func
1537 instructions().append(argCountRegister
->index()); // arg count
1538 instructions().append(thisRegister
->index() + RegisterFile::CallFrameHeaderSize
); // initial registerOffset
1539 if (m_shouldEmitProfileHooks
) {
1540 emitOpcode(op_profile_did_call
);
1541 instructions().append(func
->index());
1546 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1548 if (m_codeBlock
->needsFullScopeChain()) {
1549 emitOpcode(op_tear_off_activation
);
1550 instructions().append(m_activationRegisterIndex
);
1551 } else if (m_codeBlock
->usesArguments() && m_codeBlock
->m_numParameters
> 1)
1552 emitOpcode(op_tear_off_arguments
);
1554 return emitUnaryNoDstOp(op_ret
, src
);
1557 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1559 emitOpcode(opcodeID
);
1560 instructions().append(src
->index());
1564 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1566 ASSERT(func
->refCount());
1568 RegisterID
* originalFunc
= func
;
1569 if (m_shouldEmitProfileHooks
) {
1570 // If codegen decided to recycle func as this call's destination register,
1571 // we need to undo that optimization here so that func will still be around
1572 // for the sake of op_profile_did_call.
1574 RefPtr
<RegisterID
> movedFunc
= emitMove(newTemporary(), func
);
1575 func
= movedFunc
.release().releaseRef();
1579 RefPtr
<RegisterID
> funcProto
= newTemporary();
1581 // Generate code for arguments.
1582 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1583 argv
.append(newTemporary()); // reserve space for "this"
1584 for (ArgumentListNode
* n
= argumentsNode
? argumentsNode
->m_listNode
: 0; n
; n
= n
->m_next
) {
1585 argv
.append(newTemporary());
1586 // op_construct requires the arguments to be a sequential range of registers
1587 ASSERT(argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1588 emitNode(argv
.last().get(), n
);
1591 if (m_shouldEmitProfileHooks
) {
1592 emitOpcode(op_profile_will_call
);
1593 instructions().append(func
->index());
1597 emitExpressionInfo(divot
, startOffset
, endOffset
);
1598 emitGetByIdExceptionInfo(op_construct
);
1599 emitGetById(funcProto
.get(), func
, globalData()->propertyNames
->prototype
);
1601 // Reserve space for call frame.
1602 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1603 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1604 callFrame
.append(newTemporary());
1606 emitExpressionInfo(divot
, startOffset
, endOffset
);
1609 m_codeBlock
->addCallLinkInfo();
1612 emitOpcode(op_construct
);
1613 instructions().append(dst
->index()); // dst
1614 instructions().append(func
->index()); // func
1615 instructions().append(argv
.size()); // argCount
1616 instructions().append(argv
[0]->index() + argv
.size() + RegisterFile::CallFrameHeaderSize
); // registerOffset
1617 instructions().append(funcProto
->index()); // proto
1618 instructions().append(argv
[0]->index()); // thisRegister
1620 emitOpcode(op_construct_verify
);
1621 instructions().append(dst
->index());
1622 instructions().append(argv
[0]->index());
1624 if (m_shouldEmitProfileHooks
) {
1625 emitOpcode(op_profile_did_call
);
1626 instructions().append(func
->index());
1628 if (dst
== originalFunc
)
1635 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1637 emitOpcode(op_strcat
);
1638 instructions().append(dst
->index());
1639 instructions().append(src
->index());
1640 instructions().append(count
);
1645 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
1647 emitOpcode(op_to_primitive
);
1648 instructions().append(dst
->index());
1649 instructions().append(src
->index());
1652 RegisterID
* BytecodeGenerator::emitPushScope(RegisterID
* scope
)
1654 ASSERT(scope
->isTemporary());
1655 ControlFlowContext context
;
1656 context
.isFinallyBlock
= false;
1657 m_scopeContextStack
.append(context
);
1658 m_dynamicScopeDepth
++;
1659 createArgumentsIfNecessary();
1661 return emitUnaryNoDstOp(op_push_scope
, scope
);
1664 void BytecodeGenerator::emitPopScope()
1666 ASSERT(m_scopeContextStack
.size());
1667 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
1669 emitOpcode(op_pop_scope
);
1671 m_scopeContextStack
.removeLast();
1672 m_dynamicScopeDepth
--;
1675 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, int firstLine
, int lastLine
)
1677 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1678 if (debugHookID
!= DidReachBreakpoint
)
1681 if (!m_shouldEmitDebugHooks
)
1684 emitOpcode(op_debug
);
1685 instructions().append(debugHookID
);
1686 instructions().append(firstLine
);
1687 instructions().append(lastLine
);
1690 void BytecodeGenerator::pushFinallyContext(Label
* target
, RegisterID
* retAddrDst
)
1692 ControlFlowContext scope
;
1693 scope
.isFinallyBlock
= true;
1694 FinallyContext context
= { target
, retAddrDst
};
1695 scope
.finallyContext
= context
;
1696 m_scopeContextStack
.append(scope
);
1700 void BytecodeGenerator::popFinallyContext()
1702 ASSERT(m_scopeContextStack
.size());
1703 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
1704 ASSERT(m_finallyDepth
> 0);
1705 m_scopeContextStack
.removeLast();
1709 LabelScope
* BytecodeGenerator::breakTarget(const Identifier
& name
)
1711 // Reclaim free label scopes.
1713 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1714 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1715 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1716 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1717 // loop condition is a workaround.
1718 while (m_labelScopes
.size()) {
1719 if (m_labelScopes
.last().refCount())
1721 m_labelScopes
.removeLast();
1724 if (!m_labelScopes
.size())
1727 // We special-case the following, which is a syntax error in Firefox:
1730 if (name
.isEmpty()) {
1731 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1732 LabelScope
* scope
= &m_labelScopes
[i
];
1733 if (scope
->type() != LabelScope::NamedLabel
) {
1734 ASSERT(scope
->breakTarget());
1741 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1742 LabelScope
* scope
= &m_labelScopes
[i
];
1743 if (scope
->name() && *scope
->name() == name
) {
1744 ASSERT(scope
->breakTarget());
1751 LabelScope
* BytecodeGenerator::continueTarget(const Identifier
& name
)
1753 // Reclaim free label scopes.
1754 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
1755 m_labelScopes
.removeLast();
1757 if (!m_labelScopes
.size())
1760 if (name
.isEmpty()) {
1761 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1762 LabelScope
* scope
= &m_labelScopes
[i
];
1763 if (scope
->type() == LabelScope::Loop
) {
1764 ASSERT(scope
->continueTarget());
1771 // Continue to the loop nested nearest to the label scope that matches
1773 LabelScope
* result
= 0;
1774 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1775 LabelScope
* scope
= &m_labelScopes
[i
];
1776 if (scope
->type() == LabelScope::Loop
) {
1777 ASSERT(scope
->continueTarget());
1780 if (scope
->name() && *scope
->name() == name
)
1781 return result
; // may be 0
1786 PassRefPtr
<Label
> BytecodeGenerator::emitComplexJumpScopes(Label
* target
, ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
1788 while (topScope
> bottomScope
) {
1789 // First we count the number of dynamic scopes we need to remove to get
1790 // to a finally block.
1791 int nNormalScopes
= 0;
1792 while (topScope
> bottomScope
) {
1793 if (topScope
->isFinallyBlock
)
1799 if (nNormalScopes
) {
1800 size_t begin
= instructions().size();
1802 // We need to remove a number of dynamic scopes to get to the next
1804 emitOpcode(op_jmp_scopes
);
1805 instructions().append(nNormalScopes
);
1807 // If topScope == bottomScope then there isn't actually a finally block
1808 // left to emit, so make the jmp_scopes jump directly to the target label
1809 if (topScope
== bottomScope
) {
1810 instructions().append(target
->bind(begin
, instructions().size()));
1814 // Otherwise we just use jmp_scopes to pop a group of scopes and go
1815 // to the next instruction
1816 RefPtr
<Label
> nextInsn
= newLabel();
1817 instructions().append(nextInsn
->bind(begin
, instructions().size()));
1818 emitLabel(nextInsn
.get());
1821 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
1822 emitJumpSubroutine(topScope
->finallyContext
.retAddrDst
, topScope
->finallyContext
.finallyAddr
);
1826 return emitJump(target
);
1829 PassRefPtr
<Label
> BytecodeGenerator::emitJumpScopes(Label
* target
, int targetScopeDepth
)
1831 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
1832 ASSERT(target
->isForward());
1834 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
1835 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
1837 return emitJump(target
);
1840 return emitComplexJumpScopes(target
, &m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
1842 size_t begin
= instructions().size();
1844 emitOpcode(op_jmp_scopes
);
1845 instructions().append(scopeDelta
);
1846 instructions().append(target
->bind(begin
, instructions().size()));
1850 RegisterID
* BytecodeGenerator::emitGetPropertyNames(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, Label
* breakTarget
)
1852 size_t begin
= instructions().size();
1854 emitOpcode(op_get_pnames
);
1855 instructions().append(dst
->index());
1856 instructions().append(base
->index());
1857 instructions().append(i
->index());
1858 instructions().append(size
->index());
1859 instructions().append(breakTarget
->bind(begin
, instructions().size()));
1863 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, RegisterID
* iter
, Label
* target
)
1865 size_t begin
= instructions().size();
1867 emitOpcode(op_next_pname
);
1868 instructions().append(dst
->index());
1869 instructions().append(base
->index());
1870 instructions().append(i
->index());
1871 instructions().append(size
->index());
1872 instructions().append(iter
->index());
1873 instructions().append(target
->bind(begin
, instructions().size()));
1877 RegisterID
* BytecodeGenerator::emitCatch(RegisterID
* targetRegister
, Label
* start
, Label
* end
)
1880 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
, CodeLocationLabel() };
1882 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
};
1885 m_codeBlock
->addExceptionHandler(info
);
1886 emitOpcode(op_catch
);
1887 instructions().append(targetRegister
->index());
1888 return targetRegister
;
1891 RegisterID
* BytecodeGenerator::emitNewError(RegisterID
* dst
, ErrorType type
, JSValue message
)
1893 emitOpcode(op_new_error
);
1894 instructions().append(dst
->index());
1895 instructions().append(static_cast<int>(type
));
1896 instructions().append(addConstantValue(message
)->index());
1900 PassRefPtr
<Label
> BytecodeGenerator::emitJumpSubroutine(RegisterID
* retAddrDst
, Label
* finally
)
1902 size_t begin
= instructions().size();
1905 instructions().append(retAddrDst
->index());
1906 instructions().append(finally
->bind(begin
, instructions().size()));
1907 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
1911 void BytecodeGenerator::emitSubroutineReturn(RegisterID
* retAddrSrc
)
1913 emitOpcode(op_sret
);
1914 instructions().append(retAddrSrc
->index());
1917 void BytecodeGenerator::emitPushNewScope(RegisterID
* dst
, const Identifier
& property
, RegisterID
* value
)
1919 ControlFlowContext context
;
1920 context
.isFinallyBlock
= false;
1921 m_scopeContextStack
.append(context
);
1922 m_dynamicScopeDepth
++;
1924 createArgumentsIfNecessary();
1926 emitOpcode(op_push_new_scope
);
1927 instructions().append(dst
->index());
1928 instructions().append(addConstant(property
));
1929 instructions().append(value
->index());
1932 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
1934 SwitchInfo info
= { instructions().size(), type
};
1936 case SwitchInfo::SwitchImmediate
:
1937 emitOpcode(op_switch_imm
);
1939 case SwitchInfo::SwitchCharacter
:
1940 emitOpcode(op_switch_char
);
1942 case SwitchInfo::SwitchString
:
1943 emitOpcode(op_switch_string
);
1946 ASSERT_NOT_REACHED();
1949 instructions().append(0); // place holder for table index
1950 instructions().append(0); // place holder for default target
1951 instructions().append(scrutineeRegister
->index());
1952 m_switchContextStack
.append(info
);
1955 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
1958 ASSERT(node
->isNumber());
1959 double value
= static_cast<NumberNode
*>(node
)->value();
1960 int32_t key
= static_cast<int32_t>(value
);
1961 ASSERT(key
== value
);
1967 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
1969 jumpTable
.min
= min
;
1970 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
1971 jumpTable
.branchOffsets
.fill(0);
1972 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
1973 // We're emitting this after the clause labels should have been fixed, so
1974 // the labels should not be "forward" references
1975 ASSERT(!labels
[i
]->isForward());
1976 jumpTable
.add(keyForImmediateSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
1980 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
1983 ASSERT(node
->isString());
1984 UString::Rep
* clause
= static_cast<StringNode
*>(node
)->value().ustring().rep();
1985 ASSERT(clause
->length() == 1);
1987 int32_t key
= clause
->characters()[0];
1993 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
1995 jumpTable
.min
= min
;
1996 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
1997 jumpTable
.branchOffsets
.fill(0);
1998 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
1999 // We're emitting this after the clause labels should have been fixed, so
2000 // the labels should not be "forward" references
2001 ASSERT(!labels
[i
]->isForward());
2002 jumpTable
.add(keyForCharacterSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2006 static void prepareJumpTableForStringSwitch(StringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
2008 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2009 // We're emitting this after the clause labels should have been fixed, so
2010 // the labels should not be "forward" references
2011 ASSERT(!labels
[i
]->isForward());
2013 ASSERT(nodes
[i
]->isString());
2014 UString::Rep
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().ustring().rep();
2015 OffsetLocation location
;
2016 location
.branchOffset
= labels
[i
]->bind(switchAddress
, switchAddress
+ 3);
2017 jumpTable
.offsetTable
.add(clause
, location
);
2021 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
2023 SwitchInfo switchInfo
= m_switchContextStack
.last();
2024 m_switchContextStack
.removeLast();
2025 if (switchInfo
.switchType
== SwitchInfo::SwitchImmediate
) {
2026 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfImmediateSwitchJumpTables();
2027 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2029 SimpleJumpTable
& jumpTable
= m_codeBlock
->addImmediateSwitchJumpTable();
2030 prepareJumpTableForImmediateSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2031 } else if (switchInfo
.switchType
== SwitchInfo::SwitchCharacter
) {
2032 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfCharacterSwitchJumpTables();
2033 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2035 SimpleJumpTable
& jumpTable
= m_codeBlock
->addCharacterSwitchJumpTable();
2036 prepareJumpTableForCharacterSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2038 ASSERT(switchInfo
.switchType
== SwitchInfo::SwitchString
);
2039 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
2040 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2042 StringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
2043 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
);
2047 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
2049 // It would be nice to do an even better job of identifying exactly where the expression is.
2050 // And we could make the caller pass the node pointer in, if there was some way of getting
2051 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2052 // is still good enough to get us an accurate line number.
2053 emitExpressionInfo(0, 0, 0);
2054 RegisterID
* exception
= emitNewError(newTemporary(), SyntaxError
, jsString(globalData(), "Expression too deep"));
2055 emitThrow(exception
);