2 * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BytecodeGenerator.h"
33 #include "BatchedTransitionOptimizer.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "LowLevelInterpreter.h"
37 #include "ScopeChain.h"
38 #include "StrongInlines.h"
46 The layout of a register frame looks like this:
57 assuming (x) and (y) generated temporaries t1 and t2, you would have
59 ------------------------------------
60 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
61 ------------------------------------
62 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
63 ------------------------------------
64 | params->|<-locals | temps->
66 Because temporary registers are allocated in a stack-like fashion, we
67 can reclaim them with a simple popping algorithm. The same goes for labels.
68 (We never reclaim parameter or local registers, because parameters and
69 locals are DontDelete.)
71 The register layout before a function call looks like this:
81 > <------------------------------
82 < > reserved: call frame | 1 | <-- value held
83 > >snip< <------------------------------
84 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
85 > <------------------------------
86 | params->|<-locals | temps->
88 The call instruction fills in the "call frame" registers. It also pads
89 missing arguments at the end of the call:
91 > <-----------------------------------
92 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
93 > >snip< <-----------------------------------
94 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
95 > <-----------------------------------
96 | params->|<-locals | temps->
98 After filling in missing arguments, the call instruction sets up the new
99 stack frame to overlap the end of the old stack frame:
101 |----------------------------------> <
102 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
103 |----------------------------------> >snip< <
104 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
105 |----------------------------------> <
106 | | params->|<-locals | temps->
108 That way, arguments are "copied" into the callee's stack frame for free.
110 If the caller supplies too many arguments, this trick doesn't work. The
111 extra arguments protrude into space reserved for locals and temporaries.
112 In that case, the call instruction makes a real copy of the call frame header,
113 along with just the arguments expected by the callee, leaving the original
114 call frame header and arguments behind. (The call instruction can't just discard
115 extra arguments, because the "arguments" object may access them later.)
116 This copying strategy ensures that all named values will be at the indices
117 expected by the callee.
120 static bool s_dumpsGeneratedCode
= false;
122 void Label::setLocation(unsigned location
)
124 m_location
= location
;
126 unsigned size
= m_unresolvedJumps
.size();
127 for (unsigned i
= 0; i
< size
; ++i
)
128 m_generator
->m_instructions
[m_unresolvedJumps
[i
].second
].u
.operand
= m_location
- m_unresolvedJumps
[i
].first
;
131 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode
)
133 s_dumpsGeneratedCode
= dumpsGeneratedCode
;
136 bool BytecodeGenerator::dumpsGeneratedCode()
138 return s_dumpsGeneratedCode
;
141 JSObject
* BytecodeGenerator::generate()
143 SamplingRegion
samplingRegion("Bytecode Generation");
145 m_codeBlock
->setThisRegister(m_thisRegister
.index());
147 m_scopeNode
->emitBytecode(*this);
149 m_codeBlock
->instructions() = RefCountedArray
<Instruction
>(m_instructions
);
151 if (s_dumpsGeneratedCode
)
152 m_codeBlock
->dump(m_scopeChain
->globalObject
->globalExec());
154 if ((m_codeType
== FunctionCode
&& !m_codeBlock
->needsFullScopeChain() && !m_codeBlock
->usesArguments()) || m_codeType
== EvalCode
)
155 symbolTable().clear();
157 m_codeBlock
->shrinkToFit();
159 if (m_expressionTooDeep
)
160 return createOutOfMemoryError(m_scopeChain
->globalObject
.get());
164 bool BytecodeGenerator::addVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
166 int index
= m_calleeRegisters
.size();
167 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
168 SymbolTable::AddResult result
= symbolTable().add(ident
.impl(), newEntry
);
170 if (!result
.isNewEntry
) {
171 r0
= ®isterFor(result
.iterator
->second
.getIndex());
179 int BytecodeGenerator::addGlobalVar(const Identifier
& ident
, bool isConstant
)
181 int index
= symbolTable().size();
182 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
183 SymbolTable::AddResult result
= symbolTable().add(ident
.impl(), newEntry
);
184 if (!result
.isNewEntry
)
185 index
= result
.iterator
->second
.getIndex();
189 void BytecodeGenerator::preserveLastVar()
191 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
192 m_lastVar
= &m_calleeRegisters
.last();
195 BytecodeGenerator::BytecodeGenerator(ProgramNode
* programNode
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, ProgramCodeBlock
* codeBlock
, CompilationKind compilationKind
)
196 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
197 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->globalObjectMethodTable()->supportsProfiling(scopeChain
->globalObject
.get()))
198 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain
->globalObject
.get()))
199 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
200 , m_symbolTable(symbolTable
)
201 , m_scopeNode(programNode
)
202 , m_codeBlock(codeBlock
)
203 , m_thisRegister(CallFrame::thisArgumentOffset())
205 , m_dynamicScopeDepth(0)
206 , m_baseScopeDepth(0)
207 , m_codeType(GlobalCode
)
208 , m_nextConstantOffset(0)
209 , m_globalConstantIndex(0)
210 , m_hasCreatedActivation(true)
211 , m_firstLazyFunction(0)
212 , m_lastLazyFunction(0)
213 , m_globalData(scopeChain
->globalData
)
214 , m_lastOpcodeID(op_end
)
216 , m_lastOpcodePosition(0)
218 , m_stack(wtfThreadData().stack())
219 , m_usesExceptions(false)
220 , m_expressionTooDeep(false)
222 m_globalData
->startedCompiling(m_codeBlock
);
223 if (m_shouldEmitDebugHooks
)
224 m_codeBlock
->setNeedsFullScopeChain(true);
226 emitOpcode(op_enter
);
227 codeBlock
->setGlobalData(m_globalData
);
229 // FIXME: Move code that modifies the global object to Interpreter::execute.
231 m_codeBlock
->setNumParameters(1); // Allocate space for "this"
232 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
234 if (compilationKind
== OptimizingCompilation
)
237 JSGlobalObject
* globalObject
= scopeChain
->globalObject
.get();
238 ExecState
* exec
= globalObject
->globalExec();
240 BatchedTransitionOptimizer
optimizer(*m_globalData
, globalObject
);
242 const VarStack
& varStack
= programNode
->varStack();
243 const FunctionStack
& functionStack
= programNode
->functionStack();
245 size_t newGlobals
= varStack
.size() + functionStack
.size();
248 globalObject
->resizeRegisters(symbolTable
->size() + newGlobals
);
250 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
251 FunctionBodyNode
* function
= functionStack
[i
];
252 globalObject
->removeDirect(*m_globalData
, function
->ident()); // Newly declared functions overwrite existing properties.
254 JSValue value
= JSFunction::create(exec
, makeFunction(exec
, function
), scopeChain
);
255 int index
= addGlobalVar(function
->ident(), false);
256 globalObject
->registerAt(index
).set(*m_globalData
, globalObject
, value
);
259 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
260 if (globalObject
->hasProperty(exec
, *varStack
[i
].first
))
262 addGlobalVar(*varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
266 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode
* functionBody
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, CodeBlock
* codeBlock
, CompilationKind
)
267 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
268 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->globalObjectMethodTable()->supportsProfiling(scopeChain
->globalObject
.get()))
269 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain
->globalObject
.get()))
270 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
271 , m_symbolTable(symbolTable
)
272 , m_scopeNode(functionBody
)
273 , m_codeBlock(codeBlock
)
274 , m_activationRegister(0)
276 , m_dynamicScopeDepth(0)
277 , m_baseScopeDepth(0)
278 , m_codeType(FunctionCode
)
279 , m_nextConstantOffset(0)
280 , m_globalConstantIndex(0)
281 , m_hasCreatedActivation(false)
282 , m_firstLazyFunction(0)
283 , m_lastLazyFunction(0)
284 , m_globalData(scopeChain
->globalData
)
285 , m_lastOpcodeID(op_end
)
287 , m_lastOpcodePosition(0)
289 , m_stack(wtfThreadData().stack())
290 , m_usesExceptions(false)
291 , m_expressionTooDeep(false)
293 m_globalData
->startedCompiling(m_codeBlock
);
294 if (m_shouldEmitDebugHooks
)
295 m_codeBlock
->setNeedsFullScopeChain(true);
297 codeBlock
->setGlobalData(m_globalData
);
299 emitOpcode(op_enter
);
300 if (m_codeBlock
->needsFullScopeChain()) {
301 m_activationRegister
= addVar();
302 emitInitLazyRegister(m_activationRegister
);
303 m_codeBlock
->setActivationRegister(m_activationRegister
->index());
306 // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
307 // object, if created.
308 if (m_codeBlock
->needsFullScopeChain() || functionBody
->usesArguments()) {
309 RegisterID
* unmodifiedArgumentsRegister
= addVar(); // Anonymous, so it can't be modified by user code.
310 RegisterID
* argumentsRegister
= addVar(propertyNames().arguments
, false); // Can be changed by assigning to 'arguments'.
312 // We can save a little space by hard-coding the knowledge that the two
313 // 'arguments' values are stored in consecutive registers, and storing
314 // only the index of the assignable one.
315 codeBlock
->setArgumentsRegister(argumentsRegister
->index());
316 ASSERT_UNUSED(unmodifiedArgumentsRegister
, unmodifiedArgumentsRegister
->index() == JSC::unmodifiedArgumentsRegister(codeBlock
->argumentsRegister()));
318 emitInitLazyRegister(argumentsRegister
);
319 emitInitLazyRegister(unmodifiedArgumentsRegister
);
321 if (m_codeBlock
->isStrictMode()) {
322 emitOpcode(op_create_arguments
);
323 instructions().append(argumentsRegister
->index());
326 // The debugger currently retrieves the arguments object from an activation rather than pulling
327 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
328 // but for now we force eager creation of the arguments object when debugging.
329 if (m_shouldEmitDebugHooks
) {
330 emitOpcode(op_create_arguments
);
331 instructions().append(argumentsRegister
->index());
335 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
336 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
338 // Captured variables and functions go first so that activations don't have
339 // to step over the non-captured locals to mark them.
340 m_hasCreatedActivation
= false;
341 if (functionBody
->hasCapturedVariables()) {
342 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
343 FunctionBodyNode
* function
= functionStack
[i
];
344 const Identifier
& ident
= function
->ident();
345 if (functionBody
->captures(ident
)) {
346 if (!m_hasCreatedActivation
) {
347 m_hasCreatedActivation
= true;
348 emitOpcode(op_create_activation
);
349 instructions().append(m_activationRegister
->index());
351 m_functions
.add(ident
.impl());
352 emitNewFunction(addVar(ident
, false), function
);
355 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
356 const Identifier
& ident
= *varStack
[i
].first
;
357 if (functionBody
->captures(ident
))
358 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
361 bool canLazilyCreateFunctions
= !functionBody
->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks
;
362 if (!canLazilyCreateFunctions
&& !m_hasCreatedActivation
) {
363 m_hasCreatedActivation
= true;
364 emitOpcode(op_create_activation
);
365 instructions().append(m_activationRegister
->index());
368 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
369 m_firstLazyFunction
= codeBlock
->m_numVars
;
370 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
371 FunctionBodyNode
* function
= functionStack
[i
];
372 const Identifier
& ident
= function
->ident();
373 if (!functionBody
->captures(ident
)) {
374 m_functions
.add(ident
.impl());
375 RefPtr
<RegisterID
> reg
= addVar(ident
, false);
376 // Don't lazily create functions that override the name 'arguments'
377 // as this would complicate lazy instantiation of actual arguments.
378 if (!canLazilyCreateFunctions
|| ident
== propertyNames().arguments
)
379 emitNewFunction(reg
.get(), function
);
381 emitInitLazyRegister(reg
.get());
382 m_lazyFunctions
.set(reg
->index(), function
);
386 m_lastLazyFunction
= canLazilyCreateFunctions
? codeBlock
->m_numVars
: m_firstLazyFunction
;
387 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
388 const Identifier
& ident
= *varStack
[i
].first
;
389 if (!functionBody
->captures(ident
))
390 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
393 if (m_shouldEmitDebugHooks
)
394 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
396 FunctionParameters
& parameters
= *functionBody
->parameters();
397 m_parameters
.grow(parameters
.size() + 1); // reserve space for "this"
399 // Add "this" as a parameter
400 int nextParameterIndex
= CallFrame::thisArgumentOffset();
401 m_thisRegister
.setIndex(nextParameterIndex
--);
402 m_codeBlock
->addParameter();
404 for (size_t i
= 0; i
< parameters
.size(); ++i
)
405 addParameter(parameters
[i
], nextParameterIndex
--);
409 if (isConstructor()) {
410 RefPtr
<RegisterID
> func
= newTemporary();
411 RefPtr
<RegisterID
> funcProto
= newTemporary();
413 emitOpcode(op_get_callee
);
414 instructions().append(func
->index());
416 emitGetById(funcProto
.get(), func
.get(), globalData()->propertyNames
->prototype
);
418 emitOpcode(op_create_this
);
419 instructions().append(m_thisRegister
.index());
420 instructions().append(funcProto
->index());
421 } else if (!codeBlock
->isStrictMode() && (functionBody
->usesThis() || codeBlock
->usesEval() || m_shouldEmitDebugHooks
)) {
422 emitOpcode(op_convert_this
);
423 instructions().append(m_thisRegister
.index());
427 BytecodeGenerator::BytecodeGenerator(EvalNode
* evalNode
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, EvalCodeBlock
* codeBlock
, CompilationKind
)
428 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
429 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->globalObjectMethodTable()->supportsProfiling(scopeChain
->globalObject
.get()))
430 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain
->globalObject
.get()))
431 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
432 , m_symbolTable(symbolTable
)
433 , m_scopeNode(evalNode
)
434 , m_codeBlock(codeBlock
)
435 , m_thisRegister(CallFrame::thisArgumentOffset())
437 , m_dynamicScopeDepth(0)
438 , m_baseScopeDepth(codeBlock
->baseScopeDepth())
439 , m_codeType(EvalCode
)
440 , m_nextConstantOffset(0)
441 , m_globalConstantIndex(0)
442 , m_hasCreatedActivation(true)
443 , m_firstLazyFunction(0)
444 , m_lastLazyFunction(0)
445 , m_globalData(scopeChain
->globalData
)
446 , m_lastOpcodeID(op_end
)
448 , m_lastOpcodePosition(0)
450 , m_stack(wtfThreadData().stack())
451 , m_usesExceptions(false)
452 , m_expressionTooDeep(false)
454 m_globalData
->startedCompiling(m_codeBlock
);
455 if (m_shouldEmitDebugHooks
|| m_baseScopeDepth
)
456 m_codeBlock
->setNeedsFullScopeChain(true);
458 emitOpcode(op_enter
);
459 codeBlock
->setGlobalData(m_globalData
);
460 m_codeBlock
->setNumParameters(1);
462 const DeclarationStacks::FunctionStack
& functionStack
= evalNode
->functionStack();
463 for (size_t i
= 0; i
< functionStack
.size(); ++i
)
464 m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, functionStack
[i
]));
466 const DeclarationStacks::VarStack
& varStack
= evalNode
->varStack();
467 unsigned numVariables
= varStack
.size();
468 Vector
<Identifier
> variables
;
469 variables
.reserveCapacity(numVariables
);
470 for (size_t i
= 0; i
< numVariables
; ++i
)
471 variables
.append(*varStack
[i
].first
);
472 codeBlock
->adoptVariables(variables
);
473 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
477 BytecodeGenerator::~BytecodeGenerator()
479 m_globalData
->finishedCompiling(m_codeBlock
);
482 RegisterID
* BytecodeGenerator::emitInitLazyRegister(RegisterID
* reg
)
484 emitOpcode(op_init_lazy_reg
);
485 instructions().append(reg
->index());
489 void BytecodeGenerator::addParameter(const Identifier
& ident
, int parameterIndex
)
491 // Parameters overwrite var declarations, but not function declarations.
492 StringImpl
* rep
= ident
.impl();
493 if (!m_functions
.contains(rep
)) {
494 symbolTable().set(rep
, parameterIndex
);
495 RegisterID
& parameter
= registerFor(parameterIndex
);
496 parameter
.setIndex(parameterIndex
);
499 // To maintain the calling convention, we have to allocate unique space for
500 // each parameter, even if the parameter doesn't make it into the symbol table.
501 m_codeBlock
->addParameter();
504 RegisterID
* BytecodeGenerator::registerFor(const Identifier
& ident
)
506 if (ident
== propertyNames().thisIdentifier
)
507 return &m_thisRegister
;
509 if (m_codeType
== GlobalCode
)
512 if (!shouldOptimizeLocals())
515 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
519 if (ident
== propertyNames().arguments
)
520 createArgumentsIfNecessary();
522 return createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
525 RegisterID
* BytecodeGenerator::constRegisterFor(const Identifier
& ident
)
527 if (m_codeType
== EvalCode
)
530 if (m_codeType
== GlobalCode
)
533 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
537 return createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
540 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
542 if (ident
!= propertyNames().arguments
)
545 if (!shouldOptimizeLocals())
548 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
552 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
558 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
560 ASSERT(willResolveToArguments(propertyNames().arguments
));
562 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.impl());
563 ASSERT(!entry
.isNull());
564 return ®isterFor(entry
.getIndex());
567 RegisterID
* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID
* reg
)
569 if (m_lastLazyFunction
<= reg
->index() || reg
->index() < m_firstLazyFunction
)
571 emitLazyNewFunction(reg
, m_lazyFunctions
.get(reg
->index()));
575 bool BytecodeGenerator::isLocal(const Identifier
& ident
)
577 if (ident
== propertyNames().thisIdentifier
)
580 return shouldOptimizeLocals() && symbolTable().contains(ident
.impl());
583 bool BytecodeGenerator::isLocalConstant(const Identifier
& ident
)
585 return symbolTable().get(ident
.impl()).isReadOnly();
588 RegisterID
* BytecodeGenerator::newRegister()
590 m_calleeRegisters
.append(m_calleeRegisters
.size());
591 m_codeBlock
->m_numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
592 return &m_calleeRegisters
.last();
595 RegisterID
* BytecodeGenerator::newTemporary()
597 // Reclaim free register IDs.
598 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
599 m_calleeRegisters
.removeLast();
601 RegisterID
* result
= newRegister();
602 result
->setTemporary();
606 RegisterID
* BytecodeGenerator::highestUsedRegister()
608 size_t count
= m_codeBlock
->m_numCalleeRegisters
;
609 while (m_calleeRegisters
.size() < count
)
611 return &m_calleeRegisters
.last();
614 PassRefPtr
<LabelScope
> BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
616 // Reclaim free label scopes.
617 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
618 m_labelScopes
.removeLast();
620 // Allocate new label scope.
621 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : PassRefPtr
<Label
>()); // Only loops have continue targets.
622 m_labelScopes
.append(scope
);
623 return &m_labelScopes
.last();
626 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
628 // Reclaim free label IDs.
629 while (m_labels
.size() && !m_labels
.last().refCount())
630 m_labels
.removeLast();
632 // Allocate new label ID.
633 m_labels
.append(this);
634 return &m_labels
.last();
637 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
639 unsigned newLabelIndex
= instructions().size();
640 l0
->setLocation(newLabelIndex
);
642 if (m_codeBlock
->numberOfJumpTargets()) {
643 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
644 ASSERT(lastLabelIndex
<= newLabelIndex
);
645 if (newLabelIndex
== lastLabelIndex
) {
646 // Peephole optimizations have already been disabled by emitting the last label
651 m_codeBlock
->addJumpTarget(newLabelIndex
);
653 // This disables peephole optimizations when an instruction is a jump target
654 m_lastOpcodeID
= op_end
;
658 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
661 size_t opcodePosition
= instructions().size();
662 ASSERT(opcodePosition
- m_lastOpcodePosition
== opcodeLength(m_lastOpcodeID
) || m_lastOpcodeID
== op_end
);
663 m_lastOpcodePosition
= opcodePosition
;
665 instructions().append(globalData()->interpreter
->getOpcode(opcodeID
));
666 m_lastOpcodeID
= opcodeID
;
669 ValueProfile
* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID
)
671 #if ENABLE(VALUE_PROFILER)
672 ValueProfile
* result
= m_codeBlock
->addValueProfile(instructions().size());
674 ValueProfile
* result
= 0;
676 emitOpcode(opcodeID
);
680 void BytecodeGenerator::emitLoopHint()
683 emitOpcode(op_loop_hint
);
687 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
689 ASSERT(instructions().size() >= 4);
690 size_t size
= instructions().size();
691 dstIndex
= instructions().at(size
- 3).u
.operand
;
692 src1Index
= instructions().at(size
- 2).u
.operand
;
693 src2Index
= instructions().at(size
- 1).u
.operand
;
696 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
698 ASSERT(instructions().size() >= 3);
699 size_t size
= instructions().size();
700 dstIndex
= instructions().at(size
- 2).u
.operand
;
701 srcIndex
= instructions().at(size
- 1).u
.operand
;
704 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
706 ASSERT(instructions().size() >= 4);
707 instructions().shrink(instructions().size() - 4);
708 m_lastOpcodeID
= op_end
;
711 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
713 ASSERT(instructions().size() >= 3);
714 instructions().shrink(instructions().size() - 3);
715 m_lastOpcodeID
= op_end
;
718 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
720 size_t begin
= instructions().size();
721 emitOpcode(target
->isForward() ? op_jmp
: op_loop
);
722 instructions().append(target
->bind(begin
, instructions().size()));
726 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
728 if (m_lastOpcodeID
== op_less
) {
733 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
735 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
738 size_t begin
= instructions().size();
739 emitOpcode(target
->isForward() ? op_jless
: op_loop_if_less
);
740 instructions().append(src1Index
);
741 instructions().append(src2Index
);
742 instructions().append(target
->bind(begin
, instructions().size()));
745 } else if (m_lastOpcodeID
== op_lesseq
) {
750 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
752 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
755 size_t begin
= instructions().size();
756 emitOpcode(target
->isForward() ? op_jlesseq
: op_loop_if_lesseq
);
757 instructions().append(src1Index
);
758 instructions().append(src2Index
);
759 instructions().append(target
->bind(begin
, instructions().size()));
762 } else if (m_lastOpcodeID
== op_greater
) {
767 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
769 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
772 size_t begin
= instructions().size();
773 emitOpcode(target
->isForward() ? op_jgreater
: op_loop_if_greater
);
774 instructions().append(src1Index
);
775 instructions().append(src2Index
);
776 instructions().append(target
->bind(begin
, instructions().size()));
779 } else if (m_lastOpcodeID
== op_greatereq
) {
784 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
786 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
789 size_t begin
= instructions().size();
790 emitOpcode(target
->isForward() ? op_jgreatereq
: op_loop_if_greatereq
);
791 instructions().append(src1Index
);
792 instructions().append(src2Index
);
793 instructions().append(target
->bind(begin
, instructions().size()));
796 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
800 retrieveLastUnaryOp(dstIndex
, srcIndex
);
802 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
805 size_t begin
= instructions().size();
806 emitOpcode(op_jeq_null
);
807 instructions().append(srcIndex
);
808 instructions().append(target
->bind(begin
, instructions().size()));
811 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
815 retrieveLastUnaryOp(dstIndex
, srcIndex
);
817 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
820 size_t begin
= instructions().size();
821 emitOpcode(op_jneq_null
);
822 instructions().append(srcIndex
);
823 instructions().append(target
->bind(begin
, instructions().size()));
828 size_t begin
= instructions().size();
830 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
831 instructions().append(cond
->index());
832 instructions().append(target
->bind(begin
, instructions().size()));
836 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
838 if (m_lastOpcodeID
== op_less
&& target
->isForward()) {
843 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
845 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
848 size_t begin
= instructions().size();
849 emitOpcode(op_jnless
);
850 instructions().append(src1Index
);
851 instructions().append(src2Index
);
852 instructions().append(target
->bind(begin
, instructions().size()));
855 } else if (m_lastOpcodeID
== op_lesseq
&& target
->isForward()) {
860 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
862 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
865 size_t begin
= instructions().size();
866 emitOpcode(op_jnlesseq
);
867 instructions().append(src1Index
);
868 instructions().append(src2Index
);
869 instructions().append(target
->bind(begin
, instructions().size()));
872 } else if (m_lastOpcodeID
== op_greater
&& target
->isForward()) {
877 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
879 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
882 size_t begin
= instructions().size();
883 emitOpcode(op_jngreater
);
884 instructions().append(src1Index
);
885 instructions().append(src2Index
);
886 instructions().append(target
->bind(begin
, instructions().size()));
889 } else if (m_lastOpcodeID
== op_greatereq
&& target
->isForward()) {
894 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
896 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
899 size_t begin
= instructions().size();
900 emitOpcode(op_jngreatereq
);
901 instructions().append(src1Index
);
902 instructions().append(src2Index
);
903 instructions().append(target
->bind(begin
, instructions().size()));
906 } else if (m_lastOpcodeID
== op_not
) {
910 retrieveLastUnaryOp(dstIndex
, srcIndex
);
912 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
915 size_t begin
= instructions().size();
916 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
917 instructions().append(srcIndex
);
918 instructions().append(target
->bind(begin
, instructions().size()));
921 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
925 retrieveLastUnaryOp(dstIndex
, srcIndex
);
927 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
930 size_t begin
= instructions().size();
931 emitOpcode(op_jneq_null
);
932 instructions().append(srcIndex
);
933 instructions().append(target
->bind(begin
, instructions().size()));
936 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
940 retrieveLastUnaryOp(dstIndex
, srcIndex
);
942 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
945 size_t begin
= instructions().size();
946 emitOpcode(op_jeq_null
);
947 instructions().append(srcIndex
);
948 instructions().append(target
->bind(begin
, instructions().size()));
953 size_t begin
= instructions().size();
954 emitOpcode(target
->isForward() ? op_jfalse
: op_loop_if_false
);
955 instructions().append(cond
->index());
956 instructions().append(target
->bind(begin
, instructions().size()));
960 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
962 size_t begin
= instructions().size();
964 emitOpcode(op_jneq_ptr
);
965 instructions().append(cond
->index());
966 instructions().append(Instruction(*m_globalData
, m_codeBlock
->ownerExecutable(), m_scopeChain
->globalObject
->callFunction()));
967 instructions().append(target
->bind(begin
, instructions().size()));
971 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
973 size_t begin
= instructions().size();
975 emitOpcode(op_jneq_ptr
);
976 instructions().append(cond
->index());
977 instructions().append(Instruction(*m_globalData
, m_codeBlock
->ownerExecutable(), m_scopeChain
->globalObject
->applyFunction()));
978 instructions().append(target
->bind(begin
, instructions().size()));
982 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
984 StringImpl
* rep
= ident
.impl();
985 IdentifierMap::AddResult result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
986 if (result
.isNewEntry
)
987 m_codeBlock
->addIdentifier(Identifier(m_globalData
, rep
));
989 return result
.iterator
->second
;
992 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
994 int index
= m_nextConstantOffset
;
996 JSValueMap::AddResult result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
997 if (result
.isNewEntry
) {
998 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
999 ++m_nextConstantOffset
;
1000 m_codeBlock
->addConstant(JSValue(v
));
1002 index
= result
.iterator
->second
;
1004 return &m_constantPoolRegisters
[index
];
1007 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
1009 return m_codeBlock
->addRegExp(r
);
1012 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
1015 instructions().append(dst
->index());
1016 instructions().append(src
->index());
1020 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
1022 emitOpcode(opcodeID
);
1023 instructions().append(dst
->index());
1024 instructions().append(src
->index());
1028 RegisterID
* BytecodeGenerator::emitPreInc(RegisterID
* srcDst
)
1030 emitOpcode(op_pre_inc
);
1031 instructions().append(srcDst
->index());
1035 RegisterID
* BytecodeGenerator::emitPreDec(RegisterID
* srcDst
)
1037 emitOpcode(op_pre_dec
);
1038 instructions().append(srcDst
->index());
1042 RegisterID
* BytecodeGenerator::emitPostInc(RegisterID
* dst
, RegisterID
* srcDst
)
1044 emitOpcode(op_post_inc
);
1045 instructions().append(dst
->index());
1046 instructions().append(srcDst
->index());
1050 RegisterID
* BytecodeGenerator::emitPostDec(RegisterID
* dst
, RegisterID
* srcDst
)
1052 emitOpcode(op_post_dec
);
1053 instructions().append(dst
->index());
1054 instructions().append(srcDst
->index());
1058 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
1060 emitOpcode(opcodeID
);
1061 instructions().append(dst
->index());
1062 instructions().append(src1
->index());
1063 instructions().append(src2
->index());
1065 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
1066 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
1067 instructions().append(types
.toInt());
1072 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
1074 if (m_lastOpcodeID
== op_typeof
) {
1078 retrieveLastUnaryOp(dstIndex
, srcIndex
);
1080 if (src1
->index() == dstIndex
1081 && src1
->isTemporary()
1082 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
1083 && m_codeBlock
->constantRegister(src2
->index()).get().isString()) {
1084 const UString
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).get())->tryGetValue();
1085 if (value
== "undefined") {
1087 emitOpcode(op_is_undefined
);
1088 instructions().append(dst
->index());
1089 instructions().append(srcIndex
);
1092 if (value
== "boolean") {
1094 emitOpcode(op_is_boolean
);
1095 instructions().append(dst
->index());
1096 instructions().append(srcIndex
);
1099 if (value
== "number") {
1101 emitOpcode(op_is_number
);
1102 instructions().append(dst
->index());
1103 instructions().append(srcIndex
);
1106 if (value
== "string") {
1108 emitOpcode(op_is_string
);
1109 instructions().append(dst
->index());
1110 instructions().append(srcIndex
);
1113 if (value
== "object") {
1115 emitOpcode(op_is_object
);
1116 instructions().append(dst
->index());
1117 instructions().append(srcIndex
);
1120 if (value
== "function") {
1122 emitOpcode(op_is_function
);
1123 instructions().append(dst
->index());
1124 instructions().append(srcIndex
);
1130 emitOpcode(opcodeID
);
1131 instructions().append(dst
->index());
1132 instructions().append(src1
->index());
1133 instructions().append(src2
->index());
1137 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
1139 return emitLoad(dst
, jsBoolean(b
));
1142 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
1144 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1145 // Later we can do the extra work to handle that like the other cases. They also don't
1146 // work correctly with NaN as a key.
1147 if (isnan(number
) || number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
1148 return emitLoad(dst
, jsNumber(number
));
1149 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).iterator
->second
;
1151 valueInMap
= jsNumber(number
);
1152 return emitLoad(dst
, valueInMap
);
1155 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
1157 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).iterator
->second
;
1159 stringInMap
= jsOwnedString(globalData(), identifier
.ustring());
1160 return emitLoad(dst
, JSValue(stringInMap
));
1163 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
1165 RegisterID
* constantID
= addConstantValue(v
);
1167 return emitMove(dst
, constantID
);
1171 bool BytecodeGenerator::findScopedProperty(const Identifier
& property
, int& index
, size_t& stackDepth
, bool forWriting
, bool& requiresDynamicChecks
, JSObject
*& globalObject
)
1173 // Cases where we cannot statically optimize the lookup.
1174 if (property
== propertyNames().arguments
|| !canOptimizeNonLocals()) {
1176 index
= missingSymbolMarker();
1178 if (shouldOptimizeLocals() && m_codeType
== GlobalCode
) {
1179 ScopeChainIterator iter
= m_scopeChain
->begin();
1180 globalObject
= iter
->get();
1181 ASSERT((++iter
) == m_scopeChain
->end());
1187 requiresDynamicChecks
= false;
1188 ScopeChainIterator iter
= m_scopeChain
->begin();
1189 ScopeChainIterator end
= m_scopeChain
->end();
1190 for (; iter
!= end
; ++iter
, ++depth
) {
1191 JSObject
* currentScope
= iter
->get();
1192 if (!currentScope
->isVariableObject())
1194 JSVariableObject
* currentVariableObject
= jsCast
<JSVariableObject
*>(currentScope
);
1195 SymbolTableEntry entry
= currentVariableObject
->symbolTable().get(property
.impl());
1197 // Found the property
1198 if (!entry
.isNull()) {
1199 if (entry
.isReadOnly() && forWriting
) {
1201 index
= missingSymbolMarker();
1203 globalObject
= currentVariableObject
;
1206 stackDepth
= depth
+ m_codeBlock
->needsFullScopeChain();
1207 index
= entry
.getIndex();
1209 globalObject
= currentVariableObject
;
1212 bool scopeRequiresDynamicChecks
= false;
1213 if (currentVariableObject
->isDynamicScope(scopeRequiresDynamicChecks
))
1215 requiresDynamicChecks
|= scopeRequiresDynamicChecks
;
1217 // Can't locate the property but we're able to avoid a few lookups.
1218 stackDepth
= depth
+ m_codeBlock
->needsFullScopeChain();
1219 index
= missingSymbolMarker();
1220 JSObject
* scope
= iter
->get();
1222 globalObject
= scope
;
1226 void BytecodeGenerator::emitCheckHasInstance(RegisterID
* base
)
1228 emitOpcode(op_check_has_instance
);
1229 instructions().append(base
->index());
1232 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, RegisterID
* basePrototype
)
1234 emitOpcode(op_instanceof
);
1235 instructions().append(dst
->index());
1236 instructions().append(value
->index());
1237 instructions().append(base
->index());
1238 instructions().append(basePrototype
->index());
1242 static const unsigned maxGlobalResolves
= 128;
1244 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1246 return m_codeBlock
->globalResolveInfoCount() > maxGlobalResolves
&& !m_labelScopes
.size();
1249 RegisterID
* BytecodeGenerator::emitResolve(RegisterID
* dst
, const Identifier
& property
)
1253 JSObject
* globalObject
= 0;
1254 bool requiresDynamicChecks
= false;
1255 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) && !globalObject
) {
1256 // We can't optimise at all :-(
1257 ValueProfile
* profile
= emitProfiledOpcode(op_resolve
);
1258 instructions().append(dst
->index());
1259 instructions().append(addConstant(property
));
1260 instructions().append(profile
);
1263 if (shouldAvoidResolveGlobal()) {
1265 requiresDynamicChecks
= true;
1269 if (index
!= missingSymbolMarker() && !requiresDynamicChecks
) {
1270 // Directly index the property lookup across multiple scopes.
1271 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1275 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1277 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1278 ValueProfile
* profile
= emitProfiledOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1279 instructions().append(dst
->index());
1280 instructions().append(addConstant(property
));
1281 instructions().append(0);
1282 instructions().append(0);
1283 if (requiresDynamicChecks
)
1284 instructions().append(depth
);
1285 instructions().append(profile
);
1289 if (requiresDynamicChecks
) {
1290 // If we get here we have eval nested inside a |with| just give up
1291 ValueProfile
* profile
= emitProfiledOpcode(op_resolve
);
1292 instructions().append(dst
->index());
1293 instructions().append(addConstant(property
));
1294 instructions().append(profile
);
1298 if (index
!= missingSymbolMarker()) {
1299 // Directly index the property lookup across multiple scopes.
1300 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1303 // In this case we are at least able to drop a few scope chains from the
1304 // lookup chain, although we still need to hash from then on.
1305 ValueProfile
* profile
= emitProfiledOpcode(op_resolve_skip
);
1306 instructions().append(dst
->index());
1307 instructions().append(addConstant(property
));
1308 instructions().append(depth
);
1309 instructions().append(profile
);
1313 RegisterID
* BytecodeGenerator::emitGetScopedVar(RegisterID
* dst
, size_t depth
, int index
, JSValue globalObject
)
1316 if (m_lastOpcodeID
== op_put_global_var
) {
1319 retrieveLastUnaryOp(dstIndex
, srcIndex
);
1321 if (dstIndex
== index
&& srcIndex
== dst
->index())
1325 ValueProfile
* profile
= emitProfiledOpcode(op_get_global_var
);
1326 instructions().append(dst
->index());
1327 instructions().append(index
);
1328 instructions().append(profile
);
1332 ValueProfile
* profile
= emitProfiledOpcode(op_get_scoped_var
);
1333 instructions().append(dst
->index());
1334 instructions().append(index
);
1335 instructions().append(depth
);
1336 instructions().append(profile
);
1340 RegisterID
* BytecodeGenerator::emitPutScopedVar(size_t depth
, int index
, RegisterID
* value
, JSValue globalObject
)
1343 emitOpcode(op_put_global_var
);
1344 instructions().append(index
);
1345 instructions().append(value
->index());
1348 emitOpcode(op_put_scoped_var
);
1349 instructions().append(index
);
1350 instructions().append(depth
);
1351 instructions().append(value
->index());
1355 RegisterID
* BytecodeGenerator::emitResolveBase(RegisterID
* dst
, const Identifier
& property
)
1359 JSObject
* globalObject
= 0;
1360 bool requiresDynamicChecks
= false;
1361 findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
);
1362 if (!globalObject
|| requiresDynamicChecks
) {
1363 // We can't optimise at all :-(
1364 ValueProfile
* profile
= emitProfiledOpcode(op_resolve_base
);
1365 instructions().append(dst
->index());
1366 instructions().append(addConstant(property
));
1367 instructions().append(false);
1368 instructions().append(profile
);
1372 // Global object is the base
1373 return emitLoad(dst
, JSValue(globalObject
));
1376 RegisterID
* BytecodeGenerator::emitResolveBaseForPut(RegisterID
* dst
, const Identifier
& property
)
1378 if (!m_codeBlock
->isStrictMode())
1379 return emitResolveBase(dst
, property
);
1382 JSObject
* globalObject
= 0;
1383 bool requiresDynamicChecks
= false;
1384 findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
);
1385 if (!globalObject
|| requiresDynamicChecks
) {
1386 // We can't optimise at all :-(
1387 ValueProfile
* profile
= emitProfiledOpcode(op_resolve_base
);
1388 instructions().append(dst
->index());
1389 instructions().append(addConstant(property
));
1390 instructions().append(true);
1391 instructions().append(profile
);
1395 // Global object is the base
1396 RefPtr
<RegisterID
> result
= emitLoad(dst
, JSValue(globalObject
));
1397 emitOpcode(op_ensure_property_exists
);
1398 instructions().append(dst
->index());
1399 instructions().append(addConstant(property
));
1400 return result
.get();
1403 RegisterID
* BytecodeGenerator::emitResolveWithBase(RegisterID
* baseDst
, RegisterID
* propDst
, const Identifier
& property
)
1407 JSObject
* globalObject
= 0;
1408 bool requiresDynamicChecks
= false;
1409 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) || !globalObject
|| requiresDynamicChecks
) {
1410 // We can't optimise at all :-(
1411 ValueProfile
* profile
= emitProfiledOpcode(op_resolve_with_base
);
1412 instructions().append(baseDst
->index());
1413 instructions().append(propDst
->index());
1414 instructions().append(addConstant(property
));
1415 instructions().append(profile
);
1419 bool forceGlobalResolve
= false;
1421 // Global object is the base
1422 emitLoad(baseDst
, JSValue(globalObject
));
1424 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1425 // Directly index the property lookup across multiple scopes.
1426 emitGetScopedVar(propDst
, depth
, index
, globalObject
);
1429 if (shouldAvoidResolveGlobal()) {
1430 ValueProfile
* profile
= emitProfiledOpcode(op_resolve
);
1431 instructions().append(propDst
->index());
1432 instructions().append(addConstant(property
));
1433 instructions().append(profile
);
1437 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1439 #if ENABLE(CLASSIC_INTERPRETER)
1440 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1442 ValueProfile
* profile
= emitProfiledOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1443 instructions().append(propDst
->index());
1444 instructions().append(addConstant(property
));
1445 instructions().append(0);
1446 instructions().append(0);
1447 if (requiresDynamicChecks
)
1448 instructions().append(depth
);
1449 instructions().append(profile
);
1453 RegisterID
* BytecodeGenerator::emitResolveWithThis(RegisterID
* baseDst
, RegisterID
* propDst
, const Identifier
& property
)
1457 JSObject
* globalObject
= 0;
1458 bool requiresDynamicChecks
= false;
1459 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) || !globalObject
|| requiresDynamicChecks
) {
1460 // We can't optimise at all :-(
1461 ValueProfile
* profile
= emitProfiledOpcode(op_resolve_with_this
);
1462 instructions().append(baseDst
->index());
1463 instructions().append(propDst
->index());
1464 instructions().append(addConstant(property
));
1465 instructions().append(profile
);
1469 bool forceGlobalResolve
= false;
1471 // Global object is the base
1472 emitLoad(baseDst
, jsUndefined());
1474 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1475 // Directly index the property lookup across multiple scopes.
1476 emitGetScopedVar(propDst
, depth
, index
, globalObject
);
1479 if (shouldAvoidResolveGlobal()) {
1480 ValueProfile
* profile
= emitProfiledOpcode(op_resolve
);
1481 instructions().append(propDst
->index());
1482 instructions().append(addConstant(property
));
1483 instructions().append(profile
);
1487 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1489 #if ENABLE(CLASSIC_INTERPRETER)
1490 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1492 ValueProfile
* profile
= emitProfiledOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1493 instructions().append(propDst
->index());
1494 instructions().append(addConstant(property
));
1495 instructions().append(0);
1496 instructions().append(0);
1497 if (requiresDynamicChecks
)
1498 instructions().append(depth
);
1499 instructions().append(profile
);
1503 void BytecodeGenerator::emitMethodCheck()
1505 emitOpcode(op_method_check
);
1508 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1510 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1512 ValueProfile
* profile
= emitProfiledOpcode(op_get_by_id
);
1513 instructions().append(dst
->index());
1514 instructions().append(base
->index());
1515 instructions().append(addConstant(property
));
1516 instructions().append(0);
1517 instructions().append(0);
1518 instructions().append(0);
1519 instructions().append(0);
1520 instructions().append(profile
);
1524 RegisterID
* BytecodeGenerator::emitGetArgumentsLength(RegisterID
* dst
, RegisterID
* base
)
1526 emitOpcode(op_get_arguments_length
);
1527 instructions().append(dst
->index());
1528 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1529 instructions().append(base
->index());
1530 instructions().append(addConstant(propertyNames().length
));
1534 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1536 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1538 emitOpcode(op_put_by_id
);
1539 instructions().append(base
->index());
1540 instructions().append(addConstant(property
));
1541 instructions().append(value
->index());
1542 instructions().append(0);
1543 instructions().append(0);
1544 instructions().append(0);
1545 instructions().append(0);
1546 instructions().append(0);
1550 RegisterID
* BytecodeGenerator::emitDirectPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1552 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1554 emitOpcode(op_put_by_id
);
1555 instructions().append(base
->index());
1556 instructions().append(addConstant(property
));
1557 instructions().append(value
->index());
1558 instructions().append(0);
1559 instructions().append(0);
1560 instructions().append(0);
1561 instructions().append(0);
1562 instructions().append(property
!= m_globalData
->propertyNames
->underscoreProto
);
1566 void BytecodeGenerator::emitPutGetterSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* getter
, RegisterID
* setter
)
1568 emitOpcode(op_put_getter_setter
);
1569 instructions().append(base
->index());
1570 instructions().append(addConstant(property
));
1571 instructions().append(getter
->index());
1572 instructions().append(setter
->index());
1575 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1577 emitOpcode(op_del_by_id
);
1578 instructions().append(dst
->index());
1579 instructions().append(base
->index());
1580 instructions().append(addConstant(property
));
1584 RegisterID
* BytecodeGenerator::emitGetArgumentByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1586 ValueProfile
* profile
= emitProfiledOpcode(op_get_argument_by_val
);
1587 instructions().append(dst
->index());
1588 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1589 instructions().append(base
->index());
1590 instructions().append(property
->index());
1591 instructions().append(profile
);
1595 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1597 for (size_t i
= m_forInContextStack
.size(); i
> 0; i
--) {
1598 ForInContext
& context
= m_forInContextStack
[i
- 1];
1599 if (context
.propertyRegister
== property
) {
1600 emitOpcode(op_get_by_pname
);
1601 instructions().append(dst
->index());
1602 instructions().append(base
->index());
1603 instructions().append(property
->index());
1604 instructions().append(context
.expectedSubscriptRegister
->index());
1605 instructions().append(context
.iterRegister
->index());
1606 instructions().append(context
.indexRegister
->index());
1610 ValueProfile
* profile
= emitProfiledOpcode(op_get_by_val
);
1611 instructions().append(dst
->index());
1612 instructions().append(base
->index());
1613 instructions().append(property
->index());
1614 instructions().append(profile
);
1618 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1620 emitOpcode(op_put_by_val
);
1621 instructions().append(base
->index());
1622 instructions().append(property
->index());
1623 instructions().append(value
->index());
1627 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1629 emitOpcode(op_del_by_val
);
1630 instructions().append(dst
->index());
1631 instructions().append(base
->index());
1632 instructions().append(property
->index());
1636 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1638 emitOpcode(op_put_by_index
);
1639 instructions().append(base
->index());
1640 instructions().append(index
);
1641 instructions().append(value
->index());
1645 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1647 emitOpcode(op_new_object
);
1648 instructions().append(dst
->index());
1652 unsigned BytecodeGenerator::addConstantBuffer(unsigned length
)
1654 return m_codeBlock
->addConstantBuffer(length
);
1657 JSString
* BytecodeGenerator::addStringConstant(const Identifier
& identifier
)
1659 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).iterator
->second
;
1661 stringInMap
= jsString(globalData(), identifier
.ustring());
1662 addConstantValue(stringInMap
);
1667 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
, unsigned length
)
1669 #if !ASSERT_DISABLED
1670 unsigned checkLength
= 0;
1672 bool hadVariableExpression
= false;
1674 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1675 if (!n
->value()->isNumber() && !n
->value()->isString()) {
1676 hadVariableExpression
= true;
1681 #if !ASSERT_DISABLED
1685 if (!hadVariableExpression
) {
1686 ASSERT(length
== checkLength
);
1687 unsigned constantBufferIndex
= addConstantBuffer(length
);
1688 JSValue
* constantBuffer
= m_codeBlock
->constantBuffer(constantBufferIndex
);
1690 for (ElementNode
* n
= elements
; index
< length
; n
= n
->next()) {
1691 if (n
->value()->isNumber())
1692 constantBuffer
[index
++] = jsNumber(static_cast<NumberNode
*>(n
->value())->value());
1694 ASSERT(n
->value()->isString());
1695 constantBuffer
[index
++] = addStringConstant(static_cast<StringNode
*>(n
->value())->value());
1698 emitOpcode(op_new_array_buffer
);
1699 instructions().append(dst
->index());
1700 instructions().append(constantBufferIndex
);
1701 instructions().append(length
);
1706 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1707 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1710 argv
.append(newTemporary());
1711 // op_new_array requires the initial values to be a sequential range of registers
1712 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1713 emitNode(argv
.last().get(), n
->value());
1715 emitOpcode(op_new_array
);
1716 instructions().append(dst
->index());
1717 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1718 instructions().append(argv
.size()); // argc
1722 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1724 return emitNewFunctionInternal(dst
, m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, function
)), false);
1727 RegisterID
* BytecodeGenerator::emitLazyNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1729 FunctionOffsetMap::AddResult ptr
= m_functionOffsets
.add(function
, 0);
1731 ptr
.iterator
->second
= m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, function
));
1732 return emitNewFunctionInternal(dst
, ptr
.iterator
->second
, true);
1735 RegisterID
* BytecodeGenerator::emitNewFunctionInternal(RegisterID
* dst
, unsigned index
, bool doNullCheck
)
1737 createActivationIfNecessary();
1738 emitOpcode(op_new_func
);
1739 instructions().append(dst
->index());
1740 instructions().append(index
);
1741 instructions().append(doNullCheck
);
1745 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1747 emitOpcode(op_new_regexp
);
1748 instructions().append(dst
->index());
1749 instructions().append(addRegExp(regExp
));
1753 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1755 FunctionBodyNode
* function
= n
->body();
1756 unsigned index
= m_codeBlock
->addFunctionExpr(makeFunction(m_globalData
, function
));
1758 createActivationIfNecessary();
1759 emitOpcode(op_new_func_exp
);
1760 instructions().append(r0
->index());
1761 instructions().append(index
);
1765 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1767 return emitCall(op_call
, dst
, func
, callArguments
, divot
, startOffset
, endOffset
);
1770 void BytecodeGenerator::createArgumentsIfNecessary()
1772 if (m_codeType
!= FunctionCode
)
1775 if (!m_codeBlock
->usesArguments())
1778 // If we're in strict mode we tear off the arguments on function
1779 // entry, so there's no need to check if we need to create them
1781 if (m_codeBlock
->isStrictMode())
1784 emitOpcode(op_create_arguments
);
1785 instructions().append(m_codeBlock
->argumentsRegister());
1788 void BytecodeGenerator::createActivationIfNecessary()
1790 if (m_hasCreatedActivation
)
1792 if (!m_codeBlock
->needsFullScopeChain())
1794 emitOpcode(op_create_activation
);
1795 instructions().append(m_activationRegister
->index());
1798 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1800 return emitCall(op_call_eval
, dst
, func
, callArguments
, divot
, startOffset
, endOffset
);
1803 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1805 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1806 ASSERT(func
->refCount());
1808 if (m_shouldEmitProfileHooks
)
1809 emitMove(callArguments
.profileHookRegister(), func
);
1811 // Generate code for arguments.
1812 unsigned argument
= 0;
1813 for (ArgumentListNode
* n
= callArguments
.argumentsNode()->m_listNode
; n
; n
= n
->m_next
)
1814 emitNode(callArguments
.argumentRegister(argument
++), n
);
1816 // Reserve space for call frame.
1817 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1818 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1819 callFrame
.append(newTemporary());
1821 if (m_shouldEmitProfileHooks
) {
1822 emitOpcode(op_profile_will_call
);
1823 instructions().append(callArguments
.profileHookRegister()->index());
1826 emitExpressionInfo(divot
, startOffset
, endOffset
);
1829 emitOpcode(opcodeID
);
1830 instructions().append(func
->index()); // func
1831 instructions().append(callArguments
.argumentCountIncludingThis()); // argCount
1832 instructions().append(callArguments
.registerOffset()); // registerOffset
1834 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1836 instructions().append(0);
1838 instructions().append(0);
1839 if (dst
!= ignoredResult()) {
1840 ValueProfile
* profile
= emitProfiledOpcode(op_call_put_result
);
1841 instructions().append(dst
->index()); // dst
1842 instructions().append(profile
);
1845 if (m_shouldEmitProfileHooks
) {
1846 emitOpcode(op_profile_did_call
);
1847 instructions().append(callArguments
.profileHookRegister()->index());
1853 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* arguments
, RegisterID
* firstFreeRegister
, RegisterID
* profileHookRegister
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1855 if (m_shouldEmitProfileHooks
) {
1856 emitMove(profileHookRegister
, func
);
1857 emitOpcode(op_profile_will_call
);
1858 instructions().append(profileHookRegister
->index());
1861 emitExpressionInfo(divot
, startOffset
, endOffset
);
1864 emitOpcode(op_call_varargs
);
1865 instructions().append(func
->index());
1866 instructions().append(thisRegister
->index());
1867 instructions().append(arguments
->index());
1868 instructions().append(firstFreeRegister
->index());
1869 if (dst
!= ignoredResult()) {
1870 ValueProfile
* profile
= emitProfiledOpcode(op_call_put_result
);
1871 instructions().append(dst
->index());
1872 instructions().append(profile
);
1874 if (m_shouldEmitProfileHooks
) {
1875 emitOpcode(op_profile_did_call
);
1876 instructions().append(profileHookRegister
->index());
1881 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1883 if (m_codeBlock
->needsFullScopeChain()) {
1884 emitOpcode(op_tear_off_activation
);
1885 instructions().append(m_activationRegister
->index());
1886 instructions().append(m_codeBlock
->argumentsRegister());
1887 } else if (m_codeBlock
->usesArguments() && m_codeBlock
->numParameters() != 1 && !m_codeBlock
->isStrictMode()) {
1888 emitOpcode(op_tear_off_arguments
);
1889 instructions().append(m_codeBlock
->argumentsRegister());
1892 // Constructors use op_ret_object_or_this to check the result is an
1893 // object, unless we can trivially determine the check is not
1894 // necessary (currently, if the return value is 'this').
1895 if (isConstructor() && (src
->index() != m_thisRegister
.index())) {
1896 emitOpcode(op_ret_object_or_this
);
1897 instructions().append(src
->index());
1898 instructions().append(m_thisRegister
.index());
1901 return emitUnaryNoDstOp(op_ret
, src
);
1904 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1906 emitOpcode(opcodeID
);
1907 instructions().append(src
->index());
1911 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1913 ASSERT(func
->refCount());
1915 if (m_shouldEmitProfileHooks
)
1916 emitMove(callArguments
.profileHookRegister(), func
);
1918 // Generate code for arguments.
1919 unsigned argument
= 0;
1920 if (ArgumentsNode
* argumentsNode
= callArguments
.argumentsNode()) {
1921 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
)
1922 emitNode(callArguments
.argumentRegister(argument
++), n
);
1925 if (m_shouldEmitProfileHooks
) {
1926 emitOpcode(op_profile_will_call
);
1927 instructions().append(callArguments
.profileHookRegister()->index());
1930 // Reserve space for call frame.
1931 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1932 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1933 callFrame
.append(newTemporary());
1935 emitExpressionInfo(divot
, startOffset
, endOffset
);
1937 emitOpcode(op_construct
);
1938 instructions().append(func
->index()); // func
1939 instructions().append(callArguments
.argumentCountIncludingThis()); // argCount
1940 instructions().append(callArguments
.registerOffset()); // registerOffset
1942 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1944 instructions().append(0);
1946 instructions().append(0);
1947 if (dst
!= ignoredResult()) {
1948 ValueProfile
* profile
= emitProfiledOpcode(op_call_put_result
);
1949 instructions().append(dst
->index()); // dst
1950 instructions().append(profile
);
1953 if (m_shouldEmitProfileHooks
) {
1954 emitOpcode(op_profile_did_call
);
1955 instructions().append(callArguments
.profileHookRegister()->index());
1961 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1963 emitOpcode(op_strcat
);
1964 instructions().append(dst
->index());
1965 instructions().append(src
->index());
1966 instructions().append(count
);
1971 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
1973 emitOpcode(op_to_primitive
);
1974 instructions().append(dst
->index());
1975 instructions().append(src
->index());
1978 RegisterID
* BytecodeGenerator::emitPushScope(RegisterID
* scope
)
1980 ASSERT(scope
->isTemporary());
1981 ControlFlowContext context
;
1982 context
.isFinallyBlock
= false;
1983 m_scopeContextStack
.append(context
);
1984 m_dynamicScopeDepth
++;
1986 return emitUnaryNoDstOp(op_push_scope
, scope
);
1989 void BytecodeGenerator::emitPopScope()
1991 ASSERT(m_scopeContextStack
.size());
1992 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
1994 emitOpcode(op_pop_scope
);
1996 m_scopeContextStack
.removeLast();
1997 m_dynamicScopeDepth
--;
2000 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, int firstLine
, int lastLine
)
2002 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2003 if (debugHookID
!= DidReachBreakpoint
)
2006 if (!m_shouldEmitDebugHooks
)
2009 emitOpcode(op_debug
);
2010 instructions().append(debugHookID
);
2011 instructions().append(firstLine
);
2012 instructions().append(lastLine
);
2015 void BytecodeGenerator::pushFinallyContext(StatementNode
* finallyBlock
)
2017 ControlFlowContext scope
;
2018 scope
.isFinallyBlock
= true;
2019 FinallyContext context
= {
2021 m_scopeContextStack
.size(),
2022 m_switchContextStack
.size(),
2023 m_forInContextStack
.size(),
2024 m_labelScopes
.size(),
2028 scope
.finallyContext
= context
;
2029 m_scopeContextStack
.append(scope
);
2033 void BytecodeGenerator::popFinallyContext()
2035 ASSERT(m_scopeContextStack
.size());
2036 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
2037 ASSERT(m_finallyDepth
> 0);
2038 m_scopeContextStack
.removeLast();
2042 LabelScope
* BytecodeGenerator::breakTarget(const Identifier
& name
)
2044 // Reclaim free label scopes.
2046 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2047 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2048 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2049 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2050 // loop condition is a workaround.
2051 while (m_labelScopes
.size()) {
2052 if (m_labelScopes
.last().refCount())
2054 m_labelScopes
.removeLast();
2057 if (!m_labelScopes
.size())
2060 // We special-case the following, which is a syntax error in Firefox:
2063 if (name
.isEmpty()) {
2064 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2065 LabelScope
* scope
= &m_labelScopes
[i
];
2066 if (scope
->type() != LabelScope::NamedLabel
) {
2067 ASSERT(scope
->breakTarget());
2074 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2075 LabelScope
* scope
= &m_labelScopes
[i
];
2076 if (scope
->name() && *scope
->name() == name
) {
2077 ASSERT(scope
->breakTarget());
2084 LabelScope
* BytecodeGenerator::continueTarget(const Identifier
& name
)
2086 // Reclaim free label scopes.
2087 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
2088 m_labelScopes
.removeLast();
2090 if (!m_labelScopes
.size())
2093 if (name
.isEmpty()) {
2094 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2095 LabelScope
* scope
= &m_labelScopes
[i
];
2096 if (scope
->type() == LabelScope::Loop
) {
2097 ASSERT(scope
->continueTarget());
2104 // Continue to the loop nested nearest to the label scope that matches
2106 LabelScope
* result
= 0;
2107 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2108 LabelScope
* scope
= &m_labelScopes
[i
];
2109 if (scope
->type() == LabelScope::Loop
) {
2110 ASSERT(scope
->continueTarget());
2113 if (scope
->name() && *scope
->name() == name
)
2114 return result
; // may be 0
2119 PassRefPtr
<Label
> BytecodeGenerator::emitComplexJumpScopes(Label
* target
, ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
2121 while (topScope
> bottomScope
) {
2122 // First we count the number of dynamic scopes we need to remove to get
2123 // to a finally block.
2124 int nNormalScopes
= 0;
2125 while (topScope
> bottomScope
) {
2126 if (topScope
->isFinallyBlock
)
2132 if (nNormalScopes
) {
2133 size_t begin
= instructions().size();
2135 // We need to remove a number of dynamic scopes to get to the next
2137 emitOpcode(op_jmp_scopes
);
2138 instructions().append(nNormalScopes
);
2140 // If topScope == bottomScope then there isn't actually a finally block
2141 // left to emit, so make the jmp_scopes jump directly to the target label
2142 if (topScope
== bottomScope
) {
2143 instructions().append(target
->bind(begin
, instructions().size()));
2147 // Otherwise we just use jmp_scopes to pop a group of scopes and go
2148 // to the next instruction
2149 RefPtr
<Label
> nextInsn
= newLabel();
2150 instructions().append(nextInsn
->bind(begin
, instructions().size()));
2151 emitLabel(nextInsn
.get());
2154 Vector
<ControlFlowContext
> savedScopeContextStack
;
2155 Vector
<SwitchInfo
> savedSwitchContextStack
;
2156 Vector
<ForInContext
> savedForInContextStack
;
2157 SegmentedVector
<LabelScope
, 8> savedLabelScopes
;
2158 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
2159 // Save the current state of the world while instating the state of the world
2160 // for the finally block.
2161 FinallyContext finallyContext
= topScope
->finallyContext
;
2162 bool flipScopes
= finallyContext
.scopeContextStackSize
!= m_scopeContextStack
.size();
2163 bool flipSwitches
= finallyContext
.switchContextStackSize
!= m_switchContextStack
.size();
2164 bool flipForIns
= finallyContext
.forInContextStackSize
!= m_forInContextStack
.size();
2165 bool flipLabelScopes
= finallyContext
.labelScopesSize
!= m_labelScopes
.size();
2166 int topScopeIndex
= -1;
2167 int bottomScopeIndex
= -1;
2169 topScopeIndex
= topScope
- m_scopeContextStack
.begin();
2170 bottomScopeIndex
= bottomScope
- m_scopeContextStack
.begin();
2171 savedScopeContextStack
= m_scopeContextStack
;
2172 m_scopeContextStack
.shrink(finallyContext
.scopeContextStackSize
);
2175 savedSwitchContextStack
= m_switchContextStack
;
2176 m_switchContextStack
.shrink(finallyContext
.switchContextStackSize
);
2179 savedForInContextStack
= m_forInContextStack
;
2180 m_forInContextStack
.shrink(finallyContext
.forInContextStackSize
);
2182 if (flipLabelScopes
) {
2183 savedLabelScopes
= m_labelScopes
;
2184 while (m_labelScopes
.size() > finallyContext
.labelScopesSize
)
2185 m_labelScopes
.removeLast();
2187 int savedFinallyDepth
= m_finallyDepth
;
2188 m_finallyDepth
= finallyContext
.finallyDepth
;
2189 int savedDynamicScopeDepth
= m_dynamicScopeDepth
;
2190 m_dynamicScopeDepth
= finallyContext
.dynamicScopeDepth
;
2192 // Emit the finally block.
2193 emitNode(finallyContext
.finallyBlock
);
2195 // Restore the state of the world.
2197 m_scopeContextStack
= savedScopeContextStack
;
2198 topScope
= &m_scopeContextStack
[topScopeIndex
]; // assert it's within bounds
2199 bottomScope
= m_scopeContextStack
.begin() + bottomScopeIndex
; // don't assert, since it the index might be -1.
2202 m_switchContextStack
= savedSwitchContextStack
;
2204 m_forInContextStack
= savedForInContextStack
;
2205 if (flipLabelScopes
)
2206 m_labelScopes
= savedLabelScopes
;
2207 m_finallyDepth
= savedFinallyDepth
;
2208 m_dynamicScopeDepth
= savedDynamicScopeDepth
;
2213 return emitJump(target
);
2216 PassRefPtr
<Label
> BytecodeGenerator::emitJumpScopes(Label
* target
, int targetScopeDepth
)
2218 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
2219 ASSERT(target
->isForward());
2221 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
2222 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
2224 return emitJump(target
);
2227 return emitComplexJumpScopes(target
, &m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
2229 size_t begin
= instructions().size();
2231 emitOpcode(op_jmp_scopes
);
2232 instructions().append(scopeDelta
);
2233 instructions().append(target
->bind(begin
, instructions().size()));
2237 RegisterID
* BytecodeGenerator::emitGetPropertyNames(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, Label
* breakTarget
)
2239 size_t begin
= instructions().size();
2241 emitOpcode(op_get_pnames
);
2242 instructions().append(dst
->index());
2243 instructions().append(base
->index());
2244 instructions().append(i
->index());
2245 instructions().append(size
->index());
2246 instructions().append(breakTarget
->bind(begin
, instructions().size()));
2250 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, RegisterID
* iter
, Label
* target
)
2252 size_t begin
= instructions().size();
2254 emitOpcode(op_next_pname
);
2255 instructions().append(dst
->index());
2256 instructions().append(base
->index());
2257 instructions().append(i
->index());
2258 instructions().append(size
->index());
2259 instructions().append(iter
->index());
2260 instructions().append(target
->bind(begin
, instructions().size()));
2264 RegisterID
* BytecodeGenerator::emitCatch(RegisterID
* targetRegister
, Label
* start
, Label
* end
)
2266 m_usesExceptions
= true;
2269 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast
<void*>(&llint_op_catch
))) };
2271 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
, CodeLocationLabel() };
2274 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
};
2277 m_codeBlock
->addExceptionHandler(info
);
2278 emitOpcode(op_catch
);
2279 instructions().append(targetRegister
->index());
2280 return targetRegister
;
2283 void BytecodeGenerator::emitThrowReferenceError(const UString
& message
)
2285 emitOpcode(op_throw_reference_error
);
2286 instructions().append(addConstantValue(jsString(globalData(), message
))->index());
2289 void BytecodeGenerator::emitPushNewScope(RegisterID
* dst
, const Identifier
& property
, RegisterID
* value
)
2291 ControlFlowContext context
;
2292 context
.isFinallyBlock
= false;
2293 m_scopeContextStack
.append(context
);
2294 m_dynamicScopeDepth
++;
2296 emitOpcode(op_push_new_scope
);
2297 instructions().append(dst
->index());
2298 instructions().append(addConstant(property
));
2299 instructions().append(value
->index());
2302 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
2304 SwitchInfo info
= { instructions().size(), type
};
2306 case SwitchInfo::SwitchImmediate
:
2307 emitOpcode(op_switch_imm
);
2309 case SwitchInfo::SwitchCharacter
:
2310 emitOpcode(op_switch_char
);
2312 case SwitchInfo::SwitchString
:
2313 emitOpcode(op_switch_string
);
2316 ASSERT_NOT_REACHED();
2319 instructions().append(0); // place holder for table index
2320 instructions().append(0); // place holder for default target
2321 instructions().append(scrutineeRegister
->index());
2322 m_switchContextStack
.append(info
);
2325 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2328 ASSERT(node
->isNumber());
2329 double value
= static_cast<NumberNode
*>(node
)->value();
2330 int32_t key
= static_cast<int32_t>(value
);
2331 ASSERT(key
== value
);
2337 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2339 jumpTable
.min
= min
;
2340 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2341 jumpTable
.branchOffsets
.fill(0);
2342 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2343 // We're emitting this after the clause labels should have been fixed, so
2344 // the labels should not be "forward" references
2345 ASSERT(!labels
[i
]->isForward());
2346 jumpTable
.add(keyForImmediateSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2350 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2353 ASSERT(node
->isString());
2354 StringImpl
* clause
= static_cast<StringNode
*>(node
)->value().impl();
2355 ASSERT(clause
->length() == 1);
2357 int32_t key
= (*clause
)[0];
2363 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2365 jumpTable
.min
= min
;
2366 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2367 jumpTable
.branchOffsets
.fill(0);
2368 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2369 // We're emitting this after the clause labels should have been fixed, so
2370 // the labels should not be "forward" references
2371 ASSERT(!labels
[i
]->isForward());
2372 jumpTable
.add(keyForCharacterSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2376 static void prepareJumpTableForStringSwitch(StringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
2378 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2379 // We're emitting this after the clause labels should have been fixed, so
2380 // the labels should not be "forward" references
2381 ASSERT(!labels
[i
]->isForward());
2383 ASSERT(nodes
[i
]->isString());
2384 StringImpl
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().impl();
2385 OffsetLocation location
;
2386 location
.branchOffset
= labels
[i
]->bind(switchAddress
, switchAddress
+ 3);
2387 jumpTable
.offsetTable
.add(clause
, location
);
2391 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
2393 SwitchInfo switchInfo
= m_switchContextStack
.last();
2394 m_switchContextStack
.removeLast();
2395 if (switchInfo
.switchType
== SwitchInfo::SwitchImmediate
) {
2396 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfImmediateSwitchJumpTables();
2397 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2399 SimpleJumpTable
& jumpTable
= m_codeBlock
->addImmediateSwitchJumpTable();
2400 prepareJumpTableForImmediateSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2401 } else if (switchInfo
.switchType
== SwitchInfo::SwitchCharacter
) {
2402 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfCharacterSwitchJumpTables();
2403 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2405 SimpleJumpTable
& jumpTable
= m_codeBlock
->addCharacterSwitchJumpTable();
2406 prepareJumpTableForCharacterSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2408 ASSERT(switchInfo
.switchType
== SwitchInfo::SwitchString
);
2409 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
2410 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2412 StringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
2413 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
);
2417 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
2419 // It would be nice to do an even better job of identifying exactly where the expression is.
2420 // And we could make the caller pass the node pointer in, if there was some way of getting
2421 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2422 // is still good enough to get us an accurate line number.
2423 m_expressionTooDeep
= true;
2424 return newTemporary();
2427 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction
)
2429 m_codeBlock
->setIsNumericCompareFunction(isNumericCompareFunction
);
2432 bool BytecodeGenerator::isArgumentNumber(const Identifier
& ident
, int argumentNumber
)
2434 RegisterID
* registerID
= registerFor(ident
);
2435 if (!registerID
|| registerID
->index() >= 0)
2437 return registerID
->index() == CallFrame::argumentOffset(argumentNumber
);