2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BytecodeGenerator.h"
33 #include "BatchedTransitionOptimizer.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "ScopeChain.h"
44 The layout of a register frame looks like this:
55 assuming (x) and (y) generated temporaries t1 and t2, you would have
57 ------------------------------------
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
59 ------------------------------------
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
61 ------------------------------------
62 | params->|<-locals | temps->
64 Because temporary registers are allocated in a stack-like fashion, we
65 can reclaim them with a simple popping algorithm. The same goes for labels.
66 (We never reclaim parameter or local registers, because parameters and
67 locals are DontDelete.)
69 The register layout before a function call looks like this:
79 > <------------------------------
80 < > reserved: call frame | 1 | <-- value held
81 > >snip< <------------------------------
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
83 > <------------------------------
84 | params->|<-locals | temps->
86 The call instruction fills in the "call frame" registers. It also pads
87 missing arguments at the end of the call:
89 > <-----------------------------------
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
91 > >snip< <-----------------------------------
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
93 > <-----------------------------------
94 | params->|<-locals | temps->
96 After filling in missing arguments, the call instruction sets up the new
97 stack frame to overlap the end of the old stack frame:
99 |----------------------------------> <
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
101 |----------------------------------> >snip< <
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
103 |----------------------------------> <
104 | | params->|<-locals | temps->
106 That way, arguments are "copied" into the callee's stack frame for free.
108 If the caller supplies too many arguments, this trick doesn't work. The
109 extra arguments protrude into space reserved for locals and temporaries.
110 In that case, the call instruction makes a real copy of the call frame header,
111 along with just the arguments expected by the callee, leaving the original
112 call frame header and arguments behind. (The call instruction can't just discard
113 extra arguments, because the "arguments" object may access them later.)
114 This copying strategy ensures that all named values will be at the indices
115 expected by the callee.
119 static bool s_dumpsGeneratedCode
= false;
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode
)
125 s_dumpsGeneratedCode
= dumpsGeneratedCode
;
127 UNUSED_PARAM(dumpsGeneratedCode
);
131 bool BytecodeGenerator::dumpsGeneratedCode()
134 return s_dumpsGeneratedCode
;
140 JSObject
* BytecodeGenerator::generate()
142 m_codeBlock
->setThisRegister(m_thisRegister
.index());
144 m_scopeNode
->emitBytecode(*this);
147 m_codeBlock
->setInstructionCount(m_codeBlock
->instructions().size());
149 if (s_dumpsGeneratedCode
)
150 m_codeBlock
->dump(m_scopeChain
->globalObject
->globalExec());
153 if ((m_codeType
== FunctionCode
&& !m_codeBlock
->needsFullScopeChain() && !m_codeBlock
->usesArguments()) || m_codeType
== EvalCode
)
154 symbolTable().clear();
156 m_codeBlock
->shrinkToFit();
158 if (m_expressionTooDeep
)
159 return createOutOfMemoryError(m_scopeChain
->globalObject
.get());
163 bool BytecodeGenerator::addVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
165 int index
= m_calleeRegisters
.size();
166 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
167 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.impl(), newEntry
);
169 if (!result
.second
) {
170 r0
= ®isterFor(result
.first
->second
.getIndex());
178 bool BytecodeGenerator::addGlobalVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
180 int index
= m_nextGlobalIndex
;
181 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
182 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.impl(), newEntry
);
185 index
= result
.first
->second
.getIndex();
188 m_globals
.append(index
+ m_globalVarStorageOffset
);
191 r0
= ®isterFor(index
);
192 return result
.second
;
195 void BytecodeGenerator::preserveLastVar()
197 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
198 m_lastVar
= &m_calleeRegisters
.last();
201 BytecodeGenerator::BytecodeGenerator(ProgramNode
* programNode
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, ProgramCodeBlock
* codeBlock
)
202 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
203 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->supportsProfiling())
204 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->supportsRichSourceInfo())
205 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
206 , m_symbolTable(symbolTable
)
207 , m_scopeNode(programNode
)
208 , m_codeBlock(codeBlock
)
209 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
211 , m_dynamicScopeDepth(0)
212 , m_baseScopeDepth(0)
213 , m_codeType(GlobalCode
)
214 , m_nextGlobalIndex(-1)
215 , m_nextConstantOffset(0)
216 , m_globalConstantIndex(0)
217 , m_hasCreatedActivation(true)
218 , m_firstLazyFunction(0)
219 , m_lastLazyFunction(0)
220 , m_globalData(scopeChain
->globalData
)
221 , m_lastOpcodeID(op_end
)
223 , m_lastOpcodePosition(0)
225 , m_stack(m_globalData
->stack())
226 , m_usesExceptions(false)
227 , m_expressionTooDeep(false)
229 if (m_shouldEmitDebugHooks
)
230 m_codeBlock
->setNeedsFullScopeChain(true);
232 emitOpcode(op_enter
);
233 codeBlock
->setGlobalData(m_globalData
);
235 // FIXME: Move code that modifies the global object to Interpreter::execute.
237 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
239 JSGlobalObject
* globalObject
= scopeChain
->globalObject
.get();
240 ExecState
* exec
= globalObject
->globalExec();
241 RegisterFile
* registerFile
= &exec
->globalData().interpreter
->registerFile();
243 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
244 m_globalVarStorageOffset
= -RegisterFile::CallFrameHeaderSize
- m_codeBlock
->m_numParameters
- registerFile
->size();
246 // Add previously defined symbols to bookkeeping.
247 m_globals
.grow(symbolTable
->size());
248 SymbolTable::iterator end
= symbolTable
->end();
249 for (SymbolTable::iterator it
= symbolTable
->begin(); it
!= end
; ++it
)
250 registerFor(it
->second
.getIndex()).setIndex(it
->second
.getIndex() + m_globalVarStorageOffset
);
252 BatchedTransitionOptimizer
optimizer(*m_globalData
, globalObject
);
254 const VarStack
& varStack
= programNode
->varStack();
255 const FunctionStack
& functionStack
= programNode
->functionStack();
256 bool canOptimizeNewGlobals
= symbolTable
->size() + functionStack
.size() + varStack
.size() < registerFile
->maxGlobals();
257 if (canOptimizeNewGlobals
) {
258 // Shift new symbols so they get stored prior to existing symbols.
259 m_nextGlobalIndex
-= symbolTable
->size();
261 HashSet
<StringImpl
*, IdentifierRepHash
> newGlobals
;
262 Vector
<std::pair
<int, bool>, 16> functionInfo(functionStack
.size());
263 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
264 FunctionBodyNode
* function
= functionStack
[i
];
265 globalObject
->removeDirect(*m_globalData
, function
->ident()); // Make sure our new function is not shadowed by an old property.
266 SymbolTableEntry entry
= symbolTable
->inlineGet(function
->ident().impl());
269 newGlobals
.add(function
->ident().impl());
270 functionInfo
[i
] = make_pair(entry
.getIndex(), entry
.isReadOnly());
273 Vector
<bool, 16> shouldCreateVar(varStack
.size());
274 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
275 if (newGlobals
.contains(varStack
[i
].first
->impl()) || globalObject
->hasProperty(exec
, *varStack
[i
].first
)) {
276 shouldCreateVar
[i
] = false;
279 shouldCreateVar
[i
] = true;
280 newGlobals
.add(varStack
[i
].first
->impl());
283 int expectedSize
= symbolTable
->size() + newGlobals
.size();
284 globalObject
->resizeRegisters(symbolTable
->size(), expectedSize
);
286 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
287 FunctionBodyNode
* function
= functionStack
[i
];
288 if (functionInfo
[i
].second
)
290 RegisterID
* dst
= addGlobalVar(function
->ident(), false);
291 JSValue value
= new (exec
) JSFunction(exec
, makeFunction(exec
, function
), scopeChain
);
292 globalObject
->registerAt(dst
->index() - m_globalVarStorageOffset
).set(*m_globalData
, globalObject
, value
);
295 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
296 if (!shouldCreateVar
[i
])
298 addGlobalVar(*varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
300 if (symbolTable
->size() != expectedSize
)
305 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
306 FunctionBodyNode
* function
= functionStack
[i
];
307 globalObject
->putWithAttributes(exec
, function
->ident(), new (exec
) JSFunction(exec
, makeFunction(exec
, function
), scopeChain
), DontDelete
);
309 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
310 if (globalObject
->symbolTableHasProperty(*varStack
[i
].first
) || globalObject
->hasProperty(exec
, *varStack
[i
].first
))
312 int attributes
= DontDelete
;
313 if (varStack
[i
].second
& DeclarationStacks::IsConstant
)
314 attributes
|= ReadOnly
;
315 globalObject
->putWithAttributes(exec
, *varStack
[i
].first
, jsUndefined(), attributes
);
320 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
323 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode
* functionBody
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, CodeBlock
* codeBlock
)
324 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
325 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->supportsProfiling())
326 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->supportsRichSourceInfo())
327 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
328 , m_symbolTable(symbolTable
)
329 , m_scopeNode(functionBody
)
330 , m_codeBlock(codeBlock
)
331 , m_activationRegister(0)
333 , m_dynamicScopeDepth(0)
334 , m_baseScopeDepth(0)
335 , m_codeType(FunctionCode
)
336 , m_nextConstantOffset(0)
337 , m_globalConstantIndex(0)
338 , m_hasCreatedActivation(false)
339 , m_firstLazyFunction(0)
340 , m_lastLazyFunction(0)
341 , m_globalData(scopeChain
->globalData
)
342 , m_lastOpcodeID(op_end
)
344 , m_lastOpcodePosition(0)
346 , m_stack(m_globalData
->stack())
347 , m_usesExceptions(false)
348 , m_expressionTooDeep(false)
350 if (m_shouldEmitDebugHooks
)
351 m_codeBlock
->setNeedsFullScopeChain(true);
353 codeBlock
->setGlobalData(m_globalData
);
355 emitOpcode(op_enter
);
356 if (m_codeBlock
->needsFullScopeChain()) {
357 m_activationRegister
= addVar();
358 emitInitLazyRegister(m_activationRegister
);
359 m_codeBlock
->setActivationRegister(m_activationRegister
->index());
362 // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
363 // object, if created.
364 if (m_codeBlock
->needsFullScopeChain() || functionBody
->usesArguments()) {
365 RegisterID
* unmodifiedArgumentsRegister
= addVar(); // Anonymous, so it can't be modified by user code.
366 RegisterID
* argumentsRegister
= addVar(propertyNames().arguments
, false); // Can be changed by assigning to 'arguments'.
368 // We can save a little space by hard-coding the knowledge that the two
369 // 'arguments' values are stored in consecutive registers, and storing
370 // only the index of the assignable one.
371 codeBlock
->setArgumentsRegister(argumentsRegister
->index());
372 ASSERT_UNUSED(unmodifiedArgumentsRegister
, unmodifiedArgumentsRegister
->index() == JSC::unmodifiedArgumentsRegister(codeBlock
->argumentsRegister()));
374 emitInitLazyRegister(argumentsRegister
);
375 emitInitLazyRegister(unmodifiedArgumentsRegister
);
377 if (m_codeBlock
->isStrictMode()) {
378 emitOpcode(op_create_arguments
);
379 instructions().append(argumentsRegister
->index());
382 // The debugger currently retrieves the arguments object from an activation rather than pulling
383 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
384 // but for now we force eager creation of the arguments object when debugging.
385 if (m_shouldEmitDebugHooks
) {
386 emitOpcode(op_create_arguments
);
387 instructions().append(argumentsRegister
->index());
391 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
392 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
394 // Captured variables and functions go first so that activations don't have
395 // to step over the non-captured locals to mark them.
396 m_hasCreatedActivation
= false;
397 if (functionBody
->hasCapturedVariables()) {
398 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
399 FunctionBodyNode
* function
= functionStack
[i
];
400 const Identifier
& ident
= function
->ident();
401 if (functionBody
->captures(ident
)) {
402 if (!m_hasCreatedActivation
) {
403 m_hasCreatedActivation
= true;
404 emitOpcode(op_create_activation
);
405 instructions().append(m_activationRegister
->index());
407 m_functions
.add(ident
.impl());
408 emitNewFunction(addVar(ident
, false), function
);
411 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
412 const Identifier
& ident
= *varStack
[i
].first
;
413 if (functionBody
->captures(ident
))
414 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
417 bool canLazilyCreateFunctions
= !functionBody
->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks
;
418 if (!canLazilyCreateFunctions
&& !m_hasCreatedActivation
) {
419 m_hasCreatedActivation
= true;
420 emitOpcode(op_create_activation
);
421 instructions().append(m_activationRegister
->index());
424 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
425 m_firstLazyFunction
= codeBlock
->m_numVars
;
426 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
427 FunctionBodyNode
* function
= functionStack
[i
];
428 const Identifier
& ident
= function
->ident();
429 if (!functionBody
->captures(ident
)) {
430 m_functions
.add(ident
.impl());
431 RefPtr
<RegisterID
> reg
= addVar(ident
, false);
432 // Don't lazily create functions that override the name 'arguments'
433 // as this would complicate lazy instantiation of actual arguments.
434 if (!canLazilyCreateFunctions
|| ident
== propertyNames().arguments
)
435 emitNewFunction(reg
.get(), function
);
437 emitInitLazyRegister(reg
.get());
438 m_lazyFunctions
.set(reg
->index(), function
);
442 m_lastLazyFunction
= canLazilyCreateFunctions
? codeBlock
->m_numVars
: m_firstLazyFunction
;
443 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
444 const Identifier
& ident
= *varStack
[i
].first
;
445 if (!functionBody
->captures(ident
))
446 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
449 if (m_shouldEmitDebugHooks
)
450 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
452 FunctionParameters
& parameters
= *functionBody
->parameters();
453 size_t parameterCount
= parameters
.size();
454 int nextParameterIndex
= -RegisterFile::CallFrameHeaderSize
- parameterCount
- 1;
455 m_parameters
.grow(1 + parameterCount
); // reserve space for "this"
457 // Add "this" as a parameter
458 m_thisRegister
.setIndex(nextParameterIndex
);
459 ++m_codeBlock
->m_numParameters
;
461 for (size_t i
= 0; i
< parameterCount
; ++i
)
462 addParameter(parameters
[i
], ++nextParameterIndex
);
466 if (isConstructor()) {
467 RefPtr
<RegisterID
> func
= newTemporary();
468 RefPtr
<RegisterID
> funcProto
= newTemporary();
470 emitOpcode(op_get_callee
);
471 instructions().append(func
->index());
473 emitGetById(funcProto
.get(), func
.get(), globalData()->propertyNames
->prototype
);
475 emitOpcode(op_create_this
);
476 instructions().append(m_thisRegister
.index());
477 instructions().append(funcProto
->index());
478 } else if (functionBody
->usesThis() || m_shouldEmitDebugHooks
) {
479 if (codeBlock
->isStrictMode())
480 emitOpcode(op_convert_this_strict
);
482 emitOpcode(op_convert_this
);
483 instructions().append(m_thisRegister
.index());
487 BytecodeGenerator::BytecodeGenerator(EvalNode
* evalNode
, ScopeChainNode
* scopeChain
, SymbolTable
* symbolTable
, EvalCodeBlock
* codeBlock
)
488 : m_shouldEmitDebugHooks(scopeChain
->globalObject
->debugger())
489 , m_shouldEmitProfileHooks(scopeChain
->globalObject
->supportsProfiling())
490 , m_shouldEmitRichSourceInfo(scopeChain
->globalObject
->supportsRichSourceInfo())
491 , m_scopeChain(*scopeChain
->globalData
, scopeChain
)
492 , m_symbolTable(symbolTable
)
493 , m_scopeNode(evalNode
)
494 , m_codeBlock(codeBlock
)
495 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
497 , m_dynamicScopeDepth(0)
498 , m_baseScopeDepth(codeBlock
->baseScopeDepth())
499 , m_codeType(EvalCode
)
500 , m_nextConstantOffset(0)
501 , m_globalConstantIndex(0)
502 , m_hasCreatedActivation(true)
503 , m_firstLazyFunction(0)
504 , m_lastLazyFunction(0)
505 , m_globalData(scopeChain
->globalData
)
506 , m_lastOpcodeID(op_end
)
508 , m_lastOpcodePosition(0)
510 , m_stack(m_globalData
->stack())
511 , m_usesExceptions(false)
512 , m_expressionTooDeep(false)
514 if (m_shouldEmitDebugHooks
|| m_baseScopeDepth
)
515 m_codeBlock
->setNeedsFullScopeChain(true);
517 emitOpcode(op_enter
);
518 codeBlock
->setGlobalData(m_globalData
);
519 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
521 const DeclarationStacks::FunctionStack
& functionStack
= evalNode
->functionStack();
522 for (size_t i
= 0; i
< functionStack
.size(); ++i
)
523 m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, functionStack
[i
]));
525 const DeclarationStacks::VarStack
& varStack
= evalNode
->varStack();
526 unsigned numVariables
= varStack
.size();
527 Vector
<Identifier
> variables
;
528 variables
.reserveCapacity(numVariables
);
529 for (size_t i
= 0; i
< numVariables
; ++i
)
530 variables
.append(*varStack
[i
].first
);
531 codeBlock
->adoptVariables(variables
);
532 codeBlock
->m_numCapturedVars
= codeBlock
->m_numVars
;
536 RegisterID
* BytecodeGenerator::emitInitLazyRegister(RegisterID
* reg
)
538 emitOpcode(op_init_lazy_reg
);
539 instructions().append(reg
->index());
543 void BytecodeGenerator::addParameter(const Identifier
& ident
, int parameterIndex
)
545 // Parameters overwrite var declarations, but not function declarations.
546 StringImpl
* rep
= ident
.impl();
547 if (!m_functions
.contains(rep
)) {
548 symbolTable().set(rep
, parameterIndex
);
549 RegisterID
& parameter
= registerFor(parameterIndex
);
550 parameter
.setIndex(parameterIndex
);
553 // To maintain the calling convention, we have to allocate unique space for
554 // each parameter, even if the parameter doesn't make it into the symbol table.
555 ++m_codeBlock
->m_numParameters
;
558 RegisterID
* BytecodeGenerator::registerFor(const Identifier
& ident
)
560 if (ident
== propertyNames().thisIdentifier
)
561 return &m_thisRegister
;
563 if (!shouldOptimizeLocals())
566 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
570 if (ident
== propertyNames().arguments
)
571 createArgumentsIfNecessary();
573 return createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
576 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
578 if (ident
!= propertyNames().arguments
)
581 if (!shouldOptimizeLocals())
584 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
588 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
594 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
596 ASSERT(willResolveToArguments(propertyNames().arguments
));
598 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.impl());
599 ASSERT(!entry
.isNull());
600 return ®isterFor(entry
.getIndex());
603 RegisterID
* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID
* reg
)
605 if (m_lastLazyFunction
<= reg
->index() || reg
->index() < m_firstLazyFunction
)
607 emitLazyNewFunction(reg
, m_lazyFunctions
.get(reg
->index()));
611 RegisterID
* BytecodeGenerator::constRegisterFor(const Identifier
& ident
)
613 if (m_codeType
== EvalCode
)
616 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
620 return createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
623 bool BytecodeGenerator::isLocal(const Identifier
& ident
)
625 if (ident
== propertyNames().thisIdentifier
)
628 return shouldOptimizeLocals() && symbolTable().contains(ident
.impl());
631 bool BytecodeGenerator::isLocalConstant(const Identifier
& ident
)
633 return symbolTable().get(ident
.impl()).isReadOnly();
636 RegisterID
* BytecodeGenerator::newRegister()
638 m_calleeRegisters
.append(m_calleeRegisters
.size());
639 m_codeBlock
->m_numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
640 return &m_calleeRegisters
.last();
643 RegisterID
* BytecodeGenerator::newTemporary()
645 // Reclaim free register IDs.
646 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
647 m_calleeRegisters
.removeLast();
649 RegisterID
* result
= newRegister();
650 result
->setTemporary();
654 RegisterID
* BytecodeGenerator::highestUsedRegister()
656 size_t count
= m_codeBlock
->m_numCalleeRegisters
;
657 while (m_calleeRegisters
.size() < count
)
659 return &m_calleeRegisters
.last();
662 PassRefPtr
<LabelScope
> BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
664 // Reclaim free label scopes.
665 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
666 m_labelScopes
.removeLast();
668 // Allocate new label scope.
669 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : PassRefPtr
<Label
>()); // Only loops have continue targets.
670 m_labelScopes
.append(scope
);
671 return &m_labelScopes
.last();
674 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
676 // Reclaim free label IDs.
677 while (m_labels
.size() && !m_labels
.last().refCount())
678 m_labels
.removeLast();
680 // Allocate new label ID.
681 m_labels
.append(m_codeBlock
);
682 return &m_labels
.last();
685 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
687 unsigned newLabelIndex
= instructions().size();
688 l0
->setLocation(newLabelIndex
);
690 if (m_codeBlock
->numberOfJumpTargets()) {
691 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
692 ASSERT(lastLabelIndex
<= newLabelIndex
);
693 if (newLabelIndex
== lastLabelIndex
) {
694 // Peephole optimizations have already been disabled by emitting the last label
699 m_codeBlock
->addJumpTarget(newLabelIndex
);
701 // This disables peephole optimizations when an instruction is a jump target
702 m_lastOpcodeID
= op_end
;
706 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
709 size_t opcodePosition
= instructions().size();
710 ASSERT(opcodePosition
- m_lastOpcodePosition
== opcodeLength(m_lastOpcodeID
) || m_lastOpcodeID
== op_end
);
711 m_lastOpcodePosition
= opcodePosition
;
713 instructions().append(globalData()->interpreter
->getOpcode(opcodeID
));
714 m_lastOpcodeID
= opcodeID
;
717 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
719 ASSERT(instructions().size() >= 4);
720 size_t size
= instructions().size();
721 dstIndex
= instructions().at(size
- 3).u
.operand
;
722 src1Index
= instructions().at(size
- 2).u
.operand
;
723 src2Index
= instructions().at(size
- 1).u
.operand
;
726 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
728 ASSERT(instructions().size() >= 3);
729 size_t size
= instructions().size();
730 dstIndex
= instructions().at(size
- 2).u
.operand
;
731 srcIndex
= instructions().at(size
- 1).u
.operand
;
734 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
736 ASSERT(instructions().size() >= 4);
737 instructions().shrink(instructions().size() - 4);
738 m_lastOpcodeID
= op_end
;
741 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
743 ASSERT(instructions().size() >= 3);
744 instructions().shrink(instructions().size() - 3);
745 m_lastOpcodeID
= op_end
;
748 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
750 size_t begin
= instructions().size();
751 emitOpcode(target
->isForward() ? op_jmp
: op_loop
);
752 instructions().append(target
->bind(begin
, instructions().size()));
756 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
758 if (m_lastOpcodeID
== op_less
) {
763 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
765 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
768 size_t begin
= instructions().size();
769 emitOpcode(target
->isForward() ? op_jless
: op_loop_if_less
);
770 instructions().append(src1Index
);
771 instructions().append(src2Index
);
772 instructions().append(target
->bind(begin
, instructions().size()));
775 } else if (m_lastOpcodeID
== op_lesseq
) {
780 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
782 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
785 size_t begin
= instructions().size();
786 emitOpcode(target
->isForward() ? op_jlesseq
: op_loop_if_lesseq
);
787 instructions().append(src1Index
);
788 instructions().append(src2Index
);
789 instructions().append(target
->bind(begin
, instructions().size()));
792 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
796 retrieveLastUnaryOp(dstIndex
, srcIndex
);
798 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
801 size_t begin
= instructions().size();
802 emitOpcode(op_jeq_null
);
803 instructions().append(srcIndex
);
804 instructions().append(target
->bind(begin
, instructions().size()));
807 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
811 retrieveLastUnaryOp(dstIndex
, srcIndex
);
813 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
816 size_t begin
= instructions().size();
817 emitOpcode(op_jneq_null
);
818 instructions().append(srcIndex
);
819 instructions().append(target
->bind(begin
, instructions().size()));
824 size_t begin
= instructions().size();
826 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
827 instructions().append(cond
->index());
828 instructions().append(target
->bind(begin
, instructions().size()));
832 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
834 if (m_lastOpcodeID
== op_less
&& target
->isForward()) {
839 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
841 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
844 size_t begin
= instructions().size();
845 emitOpcode(op_jnless
);
846 instructions().append(src1Index
);
847 instructions().append(src2Index
);
848 instructions().append(target
->bind(begin
, instructions().size()));
851 } else if (m_lastOpcodeID
== op_lesseq
&& target
->isForward()) {
856 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
858 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
861 size_t begin
= instructions().size();
862 emitOpcode(op_jnlesseq
);
863 instructions().append(src1Index
);
864 instructions().append(src2Index
);
865 instructions().append(target
->bind(begin
, instructions().size()));
868 } else if (m_lastOpcodeID
== op_not
) {
872 retrieveLastUnaryOp(dstIndex
, srcIndex
);
874 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
877 size_t begin
= instructions().size();
878 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
879 instructions().append(srcIndex
);
880 instructions().append(target
->bind(begin
, instructions().size()));
883 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
887 retrieveLastUnaryOp(dstIndex
, srcIndex
);
889 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
892 size_t begin
= instructions().size();
893 emitOpcode(op_jneq_null
);
894 instructions().append(srcIndex
);
895 instructions().append(target
->bind(begin
, instructions().size()));
898 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
902 retrieveLastUnaryOp(dstIndex
, srcIndex
);
904 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
907 size_t begin
= instructions().size();
908 emitOpcode(op_jeq_null
);
909 instructions().append(srcIndex
);
910 instructions().append(target
->bind(begin
, instructions().size()));
915 size_t begin
= instructions().size();
916 emitOpcode(target
->isForward() ? op_jfalse
: op_loop_if_false
);
917 instructions().append(cond
->index());
918 instructions().append(target
->bind(begin
, instructions().size()));
922 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
924 size_t begin
= instructions().size();
926 emitOpcode(op_jneq_ptr
);
927 instructions().append(cond
->index());
928 instructions().append(Instruction(*m_globalData
, m_codeBlock
->ownerExecutable(), m_scopeChain
->globalObject
->callFunction()));
929 instructions().append(target
->bind(begin
, instructions().size()));
933 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
935 size_t begin
= instructions().size();
937 emitOpcode(op_jneq_ptr
);
938 instructions().append(cond
->index());
939 instructions().append(Instruction(*m_globalData
, m_codeBlock
->ownerExecutable(), m_scopeChain
->globalObject
->applyFunction()));
940 instructions().append(target
->bind(begin
, instructions().size()));
944 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
946 StringImpl
* rep
= ident
.impl();
947 pair
<IdentifierMap::iterator
, bool> result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
948 if (result
.second
) // new entry
949 m_codeBlock
->addIdentifier(Identifier(m_globalData
, rep
));
951 return result
.first
->second
;
954 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
956 int index
= m_nextConstantOffset
;
958 pair
<JSValueMap::iterator
, bool> result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
960 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
961 ++m_nextConstantOffset
;
962 m_codeBlock
->addConstant(JSValue(v
));
964 index
= result
.first
->second
;
966 return &m_constantPoolRegisters
[index
];
969 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
971 return m_codeBlock
->addRegExp(r
);
974 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
977 instructions().append(dst
->index());
978 instructions().append(src
->index());
982 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
984 emitOpcode(opcodeID
);
985 instructions().append(dst
->index());
986 instructions().append(src
->index());
990 RegisterID
* BytecodeGenerator::emitPreInc(RegisterID
* srcDst
)
992 emitOpcode(op_pre_inc
);
993 instructions().append(srcDst
->index());
997 RegisterID
* BytecodeGenerator::emitPreDec(RegisterID
* srcDst
)
999 emitOpcode(op_pre_dec
);
1000 instructions().append(srcDst
->index());
1004 RegisterID
* BytecodeGenerator::emitPostInc(RegisterID
* dst
, RegisterID
* srcDst
)
1006 emitOpcode(op_post_inc
);
1007 instructions().append(dst
->index());
1008 instructions().append(srcDst
->index());
1012 RegisterID
* BytecodeGenerator::emitPostDec(RegisterID
* dst
, RegisterID
* srcDst
)
1014 emitOpcode(op_post_dec
);
1015 instructions().append(dst
->index());
1016 instructions().append(srcDst
->index());
1020 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
1022 emitOpcode(opcodeID
);
1023 instructions().append(dst
->index());
1024 instructions().append(src1
->index());
1025 instructions().append(src2
->index());
1027 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
1028 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
1029 instructions().append(types
.toInt());
1034 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
1036 if (m_lastOpcodeID
== op_typeof
) {
1040 retrieveLastUnaryOp(dstIndex
, srcIndex
);
1042 if (src1
->index() == dstIndex
1043 && src1
->isTemporary()
1044 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
1045 && m_codeBlock
->constantRegister(src2
->index()).get().isString()) {
1046 const UString
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).get())->tryGetValue();
1047 if (value
== "undefined") {
1049 emitOpcode(op_is_undefined
);
1050 instructions().append(dst
->index());
1051 instructions().append(srcIndex
);
1054 if (value
== "boolean") {
1056 emitOpcode(op_is_boolean
);
1057 instructions().append(dst
->index());
1058 instructions().append(srcIndex
);
1061 if (value
== "number") {
1063 emitOpcode(op_is_number
);
1064 instructions().append(dst
->index());
1065 instructions().append(srcIndex
);
1068 if (value
== "string") {
1070 emitOpcode(op_is_string
);
1071 instructions().append(dst
->index());
1072 instructions().append(srcIndex
);
1075 if (value
== "object") {
1077 emitOpcode(op_is_object
);
1078 instructions().append(dst
->index());
1079 instructions().append(srcIndex
);
1082 if (value
== "function") {
1084 emitOpcode(op_is_function
);
1085 instructions().append(dst
->index());
1086 instructions().append(srcIndex
);
1092 emitOpcode(opcodeID
);
1093 instructions().append(dst
->index());
1094 instructions().append(src1
->index());
1095 instructions().append(src2
->index());
1099 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
1101 return emitLoad(dst
, jsBoolean(b
));
1104 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
1106 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1107 // Later we can do the extra work to handle that like the other cases. They also don't
1108 // work correctly with NaN as a key.
1109 if (isnan(number
) || number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
1110 return emitLoad(dst
, jsNumber(number
));
1111 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).first
->second
;
1113 valueInMap
= jsNumber(number
);
1114 return emitLoad(dst
, valueInMap
);
1117 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
1119 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).first
->second
;
1121 stringInMap
= jsOwnedString(globalData(), identifier
.ustring());
1122 return emitLoad(dst
, JSValue(stringInMap
));
1125 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
1127 RegisterID
* constantID
= addConstantValue(v
);
1129 return emitMove(dst
, constantID
);
1133 bool BytecodeGenerator::findScopedProperty(const Identifier
& property
, int& index
, size_t& stackDepth
, bool forWriting
, bool& requiresDynamicChecks
, JSObject
*& globalObject
)
1135 // Cases where we cannot statically optimize the lookup.
1136 if (property
== propertyNames().arguments
|| !canOptimizeNonLocals()) {
1138 index
= missingSymbolMarker();
1140 if (shouldOptimizeLocals() && m_codeType
== GlobalCode
) {
1141 ScopeChainIterator iter
= m_scopeChain
->begin();
1142 globalObject
= iter
->get();
1143 ASSERT((++iter
) == m_scopeChain
->end());
1149 requiresDynamicChecks
= false;
1150 ScopeChainIterator iter
= m_scopeChain
->begin();
1151 ScopeChainIterator end
= m_scopeChain
->end();
1152 for (; iter
!= end
; ++iter
, ++depth
) {
1153 JSObject
* currentScope
= iter
->get();
1154 if (!currentScope
->isVariableObject())
1156 JSVariableObject
* currentVariableObject
= static_cast<JSVariableObject
*>(currentScope
);
1157 SymbolTableEntry entry
= currentVariableObject
->symbolTable().get(property
.impl());
1159 // Found the property
1160 if (!entry
.isNull()) {
1161 if (entry
.isReadOnly() && forWriting
) {
1163 index
= missingSymbolMarker();
1165 globalObject
= currentVariableObject
;
1168 stackDepth
= depth
+ m_codeBlock
->needsFullScopeChain();
1169 index
= entry
.getIndex();
1171 globalObject
= currentVariableObject
;
1174 bool scopeRequiresDynamicChecks
= false;
1175 if (currentVariableObject
->isDynamicScope(scopeRequiresDynamicChecks
))
1177 requiresDynamicChecks
|= scopeRequiresDynamicChecks
;
1179 // Can't locate the property but we're able to avoid a few lookups.
1180 stackDepth
= depth
+ m_codeBlock
->needsFullScopeChain();
1181 index
= missingSymbolMarker();
1182 JSObject
* scope
= iter
->get();
1184 globalObject
= scope
;
1188 void BytecodeGenerator::emitCheckHasInstance(RegisterID
* base
)
1190 emitOpcode(op_check_has_instance
);
1191 instructions().append(base
->index());
1194 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, RegisterID
* basePrototype
)
1196 emitOpcode(op_instanceof
);
1197 instructions().append(dst
->index());
1198 instructions().append(value
->index());
1199 instructions().append(base
->index());
1200 instructions().append(basePrototype
->index());
1204 static const unsigned maxGlobalResolves
= 128;
1206 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1208 return m_codeBlock
->globalResolveInfoCount() > maxGlobalResolves
&& !m_labelScopes
.size();
1211 RegisterID
* BytecodeGenerator::emitResolve(RegisterID
* dst
, const Identifier
& property
)
1215 JSObject
* globalObject
= 0;
1216 bool requiresDynamicChecks
= false;
1217 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) && !globalObject
) {
1218 // We can't optimise at all :-(
1219 emitOpcode(op_resolve
);
1220 instructions().append(dst
->index());
1221 instructions().append(addConstant(property
));
1224 if (shouldAvoidResolveGlobal()) {
1226 requiresDynamicChecks
= true;
1230 bool forceGlobalResolve
= false;
1232 if (index
!= missingSymbolMarker() && !forceGlobalResolve
&& !requiresDynamicChecks
) {
1233 // Directly index the property lookup across multiple scopes.
1234 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1238 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1240 #if ENABLE(INTERPRETER)
1241 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1243 emitOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1244 instructions().append(dst
->index());
1245 instructions().append(addConstant(property
));
1246 instructions().append(0);
1247 instructions().append(0);
1248 if (requiresDynamicChecks
)
1249 instructions().append(depth
);
1253 if (requiresDynamicChecks
) {
1254 // If we get here we have eval nested inside a |with| just give up
1255 emitOpcode(op_resolve
);
1256 instructions().append(dst
->index());
1257 instructions().append(addConstant(property
));
1261 if (index
!= missingSymbolMarker()) {
1262 // Directly index the property lookup across multiple scopes.
1263 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1266 // In this case we are at least able to drop a few scope chains from the
1267 // lookup chain, although we still need to hash from then on.
1268 emitOpcode(op_resolve_skip
);
1269 instructions().append(dst
->index());
1270 instructions().append(addConstant(property
));
1271 instructions().append(depth
);
1275 RegisterID
* BytecodeGenerator::emitGetScopedVar(RegisterID
* dst
, size_t depth
, int index
, JSValue globalObject
)
1278 emitOpcode(op_get_global_var
);
1279 instructions().append(dst
->index());
1280 instructions().append(index
);
1284 emitOpcode(op_get_scoped_var
);
1285 instructions().append(dst
->index());
1286 instructions().append(index
);
1287 instructions().append(depth
);
1291 RegisterID
* BytecodeGenerator::emitPutScopedVar(size_t depth
, int index
, RegisterID
* value
, JSValue globalObject
)
1294 emitOpcode(op_put_global_var
);
1295 instructions().append(index
);
1296 instructions().append(value
->index());
1299 emitOpcode(op_put_scoped_var
);
1300 instructions().append(index
);
1301 instructions().append(depth
);
1302 instructions().append(value
->index());
1306 RegisterID
* BytecodeGenerator::emitResolveBase(RegisterID
* dst
, const Identifier
& property
)
1310 JSObject
* globalObject
= 0;
1311 bool requiresDynamicChecks
= false;
1312 findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
);
1313 if (!globalObject
|| requiresDynamicChecks
) {
1314 // We can't optimise at all :-(
1315 emitOpcode(op_resolve_base
);
1316 instructions().append(dst
->index());
1317 instructions().append(addConstant(property
));
1318 instructions().append(false);
1322 // Global object is the base
1323 return emitLoad(dst
, JSValue(globalObject
));
1326 RegisterID
* BytecodeGenerator::emitResolveBaseForPut(RegisterID
* dst
, const Identifier
& property
)
1328 if (!m_codeBlock
->isStrictMode())
1329 return emitResolveBase(dst
, property
);
1332 JSObject
* globalObject
= 0;
1333 bool requiresDynamicChecks
= false;
1334 findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
);
1335 if (!globalObject
|| requiresDynamicChecks
) {
1336 // We can't optimise at all :-(
1337 emitOpcode(op_resolve_base
);
1338 instructions().append(dst
->index());
1339 instructions().append(addConstant(property
));
1340 instructions().append(true);
1344 // Global object is the base
1345 RefPtr
<RegisterID
> result
= emitLoad(dst
, JSValue(globalObject
));
1346 emitOpcode(op_ensure_property_exists
);
1347 instructions().append(dst
->index());
1348 instructions().append(addConstant(property
));
1349 return result
.get();
1352 RegisterID
* BytecodeGenerator::emitResolveWithBase(RegisterID
* baseDst
, RegisterID
* propDst
, const Identifier
& property
)
1356 JSObject
* globalObject
= 0;
1357 bool requiresDynamicChecks
= false;
1358 if (!findScopedProperty(property
, index
, depth
, false, requiresDynamicChecks
, globalObject
) || !globalObject
|| requiresDynamicChecks
) {
1359 // We can't optimise at all :-(
1360 emitOpcode(op_resolve_with_base
);
1361 instructions().append(baseDst
->index());
1362 instructions().append(propDst
->index());
1363 instructions().append(addConstant(property
));
1367 bool forceGlobalResolve
= false;
1369 // Global object is the base
1370 emitLoad(baseDst
, JSValue(globalObject
));
1372 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1373 // Directly index the property lookup across multiple scopes.
1374 emitGetScopedVar(propDst
, depth
, index
, globalObject
);
1377 if (shouldAvoidResolveGlobal()) {
1378 emitOpcode(op_resolve
);
1379 instructions().append(propDst
->index());
1380 instructions().append(addConstant(property
));
1384 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1386 #if ENABLE(INTERPRETER)
1387 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1389 emitOpcode(requiresDynamicChecks
? op_resolve_global_dynamic
: op_resolve_global
);
1390 instructions().append(propDst
->index());
1391 instructions().append(addConstant(property
));
1392 instructions().append(0);
1393 instructions().append(0);
1394 if (requiresDynamicChecks
)
1395 instructions().append(depth
);
1399 void BytecodeGenerator::emitMethodCheck()
1401 emitOpcode(op_method_check
);
1404 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1407 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_get_by_id
));
1410 #if ENABLE(INTERPRETER)
1411 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1414 emitOpcode(op_get_by_id
);
1415 instructions().append(dst
->index());
1416 instructions().append(base
->index());
1417 instructions().append(addConstant(property
));
1418 instructions().append(0);
1419 instructions().append(0);
1420 instructions().append(0);
1421 instructions().append(0);
1425 RegisterID
* BytecodeGenerator::emitGetArgumentsLength(RegisterID
* dst
, RegisterID
* base
)
1427 emitOpcode(op_get_arguments_length
);
1428 instructions().append(dst
->index());
1429 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1430 instructions().append(base
->index());
1431 instructions().append(addConstant(propertyNames().length
));
1435 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1438 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_put_by_id
));
1440 #if ENABLE(INTERPRETER)
1441 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1444 emitOpcode(op_put_by_id
);
1445 instructions().append(base
->index());
1446 instructions().append(addConstant(property
));
1447 instructions().append(value
->index());
1448 instructions().append(0);
1449 instructions().append(0);
1450 instructions().append(0);
1451 instructions().append(0);
1452 instructions().append(0);
1456 RegisterID
* BytecodeGenerator::emitDirectPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1459 m_codeBlock
->addStructureStubInfo(StructureStubInfo(access_put_by_id
));
1461 #if ENABLE(INTERPRETER)
1462 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1465 emitOpcode(op_put_by_id
);
1466 instructions().append(base
->index());
1467 instructions().append(addConstant(property
));
1468 instructions().append(value
->index());
1469 instructions().append(0);
1470 instructions().append(0);
1471 instructions().append(0);
1472 instructions().append(0);
1473 instructions().append(property
!= m_globalData
->propertyNames
->underscoreProto
);
1477 RegisterID
* BytecodeGenerator::emitPutGetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1479 emitOpcode(op_put_getter
);
1480 instructions().append(base
->index());
1481 instructions().append(addConstant(property
));
1482 instructions().append(value
->index());
1486 RegisterID
* BytecodeGenerator::emitPutSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1488 emitOpcode(op_put_setter
);
1489 instructions().append(base
->index());
1490 instructions().append(addConstant(property
));
1491 instructions().append(value
->index());
1495 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1497 emitOpcode(op_del_by_id
);
1498 instructions().append(dst
->index());
1499 instructions().append(base
->index());
1500 instructions().append(addConstant(property
));
1504 RegisterID
* BytecodeGenerator::emitGetArgumentByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1506 emitOpcode(op_get_argument_by_val
);
1507 instructions().append(dst
->index());
1508 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1509 instructions().append(base
->index());
1510 instructions().append(property
->index());
1514 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1516 for (size_t i
= m_forInContextStack
.size(); i
> 0; i
--) {
1517 ForInContext
& context
= m_forInContextStack
[i
- 1];
1518 if (context
.propertyRegister
== property
) {
1519 emitOpcode(op_get_by_pname
);
1520 instructions().append(dst
->index());
1521 instructions().append(base
->index());
1522 instructions().append(property
->index());
1523 instructions().append(context
.expectedSubscriptRegister
->index());
1524 instructions().append(context
.iterRegister
->index());
1525 instructions().append(context
.indexRegister
->index());
1529 emitOpcode(op_get_by_val
);
1530 instructions().append(dst
->index());
1531 instructions().append(base
->index());
1532 instructions().append(property
->index());
1536 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1538 emitOpcode(op_put_by_val
);
1539 instructions().append(base
->index());
1540 instructions().append(property
->index());
1541 instructions().append(value
->index());
1545 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1547 emitOpcode(op_del_by_val
);
1548 instructions().append(dst
->index());
1549 instructions().append(base
->index());
1550 instructions().append(property
->index());
1554 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1556 emitOpcode(op_put_by_index
);
1557 instructions().append(base
->index());
1558 instructions().append(index
);
1559 instructions().append(value
->index());
1563 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1565 emitOpcode(op_new_object
);
1566 instructions().append(dst
->index());
1570 unsigned BytecodeGenerator::addConstantBuffer(unsigned length
)
1572 return m_codeBlock
->addConstantBuffer(length
);
1575 JSString
* BytecodeGenerator::addStringConstant(const Identifier
& identifier
)
1577 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).first
->second
;
1579 stringInMap
= jsString(globalData(), identifier
.ustring());
1580 addConstantValue(stringInMap
);
1585 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
, unsigned length
)
1587 #if !ASSERT_DISABLED
1588 unsigned checkLength
= 0;
1590 bool hadVariableExpression
= false;
1592 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1593 if (!n
->value()->isNumber() && !n
->value()->isString()) {
1594 hadVariableExpression
= true;
1599 #if !ASSERT_DISABLED
1603 if (!hadVariableExpression
) {
1604 ASSERT(length
== checkLength
);
1605 unsigned constantBufferIndex
= addConstantBuffer(length
);
1606 JSValue
* constantBuffer
= m_codeBlock
->constantBuffer(constantBufferIndex
);
1608 for (ElementNode
* n
= elements
; index
< length
; n
= n
->next()) {
1609 if (n
->value()->isNumber())
1610 constantBuffer
[index
++] = jsNumber(static_cast<NumberNode
*>(n
->value())->value());
1612 ASSERT(n
->value()->isString());
1613 constantBuffer
[index
++] = addStringConstant(static_cast<StringNode
*>(n
->value())->value());
1616 emitOpcode(op_new_array_buffer
);
1617 instructions().append(dst
->index());
1618 instructions().append(constantBufferIndex
);
1619 instructions().append(length
);
1624 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1625 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1628 argv
.append(newTemporary());
1629 // op_new_array requires the initial values to be a sequential range of registers
1630 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1631 emitNode(argv
.last().get(), n
->value());
1633 emitOpcode(op_new_array
);
1634 instructions().append(dst
->index());
1635 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1636 instructions().append(argv
.size()); // argc
1640 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1642 return emitNewFunctionInternal(dst
, m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, function
)), false);
1645 RegisterID
* BytecodeGenerator::emitLazyNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1647 std::pair
<FunctionOffsetMap::iterator
, bool> ptr
= m_functionOffsets
.add(function
, 0);
1649 ptr
.first
->second
= m_codeBlock
->addFunctionDecl(makeFunction(m_globalData
, function
));
1650 return emitNewFunctionInternal(dst
, ptr
.first
->second
, true);
1653 RegisterID
* BytecodeGenerator::emitNewFunctionInternal(RegisterID
* dst
, unsigned index
, bool doNullCheck
)
1655 createActivationIfNecessary();
1656 emitOpcode(op_new_func
);
1657 instructions().append(dst
->index());
1658 instructions().append(index
);
1659 instructions().append(doNullCheck
);
1663 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1665 emitOpcode(op_new_regexp
);
1666 instructions().append(dst
->index());
1667 instructions().append(addRegExp(regExp
));
1671 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1673 FunctionBodyNode
* function
= n
->body();
1674 unsigned index
= m_codeBlock
->addFunctionExpr(makeFunction(m_globalData
, function
));
1676 createActivationIfNecessary();
1677 emitOpcode(op_new_func_exp
);
1678 instructions().append(r0
->index());
1679 instructions().append(index
);
1683 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1685 return emitCall(op_call
, dst
, func
, callArguments
, divot
, startOffset
, endOffset
);
1688 void BytecodeGenerator::createArgumentsIfNecessary()
1690 if (m_codeType
!= FunctionCode
)
1693 if (!m_codeBlock
->usesArguments())
1696 // If we're in strict mode we tear off the arguments on function
1697 // entry, so there's no need to check if we need to create them
1699 if (m_codeBlock
->isStrictMode())
1702 emitOpcode(op_create_arguments
);
1703 instructions().append(m_codeBlock
->argumentsRegister());
1706 void BytecodeGenerator::createActivationIfNecessary()
1708 if (m_hasCreatedActivation
)
1710 if (!m_codeBlock
->needsFullScopeChain())
1712 emitOpcode(op_create_activation
);
1713 instructions().append(m_activationRegister
->index());
1716 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1718 return emitCall(op_call_eval
, dst
, func
, callArguments
, divot
, startOffset
, endOffset
);
1721 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1723 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1724 ASSERT(func
->refCount());
1726 if (m_shouldEmitProfileHooks
)
1727 emitMove(callArguments
.profileHookRegister(), func
);
1729 // Generate code for arguments.
1730 unsigned argumentIndex
= 0;
1731 for (ArgumentListNode
* n
= callArguments
.argumentsNode()->m_listNode
; n
; n
= n
->m_next
)
1732 emitNode(callArguments
.argumentRegister(argumentIndex
++), n
);
1734 // Reserve space for call frame.
1735 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1736 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1737 callFrame
.append(newTemporary());
1739 if (m_shouldEmitProfileHooks
) {
1740 emitOpcode(op_profile_will_call
);
1741 instructions().append(callArguments
.profileHookRegister()->index());
1744 emitExpressionInfo(divot
, startOffset
, endOffset
);
1747 m_codeBlock
->addCallLinkInfo();
1751 emitOpcode(opcodeID
);
1752 instructions().append(func
->index()); // func
1753 instructions().append(callArguments
.count()); // argCount
1754 instructions().append(callArguments
.callFrame()); // registerOffset
1755 if (dst
!= ignoredResult()) {
1756 emitOpcode(op_call_put_result
);
1757 instructions().append(dst
->index()); // dst
1760 if (m_shouldEmitProfileHooks
) {
1761 emitOpcode(op_profile_did_call
);
1762 instructions().append(callArguments
.profileHookRegister()->index());
1768 RegisterID
* BytecodeGenerator::emitLoadVarargs(RegisterID
* argCountDst
, RegisterID
* thisRegister
, RegisterID
* arguments
)
1770 ASSERT(argCountDst
->index() < arguments
->index());
1771 emitOpcode(op_load_varargs
);
1772 instructions().append(argCountDst
->index());
1773 instructions().append(arguments
->index());
1774 instructions().append(thisRegister
->index() + RegisterFile::CallFrameHeaderSize
); // initial registerOffset
1778 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* argCountRegister
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1780 ASSERT(func
->refCount());
1781 ASSERT(thisRegister
->refCount());
1782 ASSERT(dst
!= func
);
1783 if (m_shouldEmitProfileHooks
) {
1784 emitOpcode(op_profile_will_call
);
1785 instructions().append(func
->index());
1788 emitExpressionInfo(divot
, startOffset
, endOffset
);
1791 emitOpcode(op_call_varargs
);
1792 instructions().append(func
->index()); // func
1793 instructions().append(argCountRegister
->index()); // arg count
1794 instructions().append(thisRegister
->index() + RegisterFile::CallFrameHeaderSize
); // initial registerOffset
1795 if (dst
!= ignoredResult()) {
1796 emitOpcode(op_call_put_result
);
1797 instructions().append(dst
->index()); // dst
1799 if (m_shouldEmitProfileHooks
) {
1800 emitOpcode(op_profile_did_call
);
1801 instructions().append(func
->index());
1806 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1808 if (m_codeBlock
->needsFullScopeChain()) {
1809 emitOpcode(op_tear_off_activation
);
1810 instructions().append(m_activationRegister
->index());
1811 instructions().append(m_codeBlock
->argumentsRegister());
1812 } else if (m_codeBlock
->usesArguments() && m_codeBlock
->m_numParameters
> 1
1813 && !m_codeBlock
->isStrictMode()) { // If there are no named parameters, there's nothing to tear off, since extra / unnamed parameters get copied to the arguments object at construct time.
1814 emitOpcode(op_tear_off_arguments
);
1815 instructions().append(m_codeBlock
->argumentsRegister());
1818 // Constructors use op_ret_object_or_this to check the result is an
1819 // object, unless we can trivially determine the check is not
1820 // necessary (currently, if the return value is 'this').
1821 if (isConstructor() && (src
->index() != m_thisRegister
.index())) {
1822 emitOpcode(op_ret_object_or_this
);
1823 instructions().append(src
->index());
1824 instructions().append(m_thisRegister
.index());
1827 return emitUnaryNoDstOp(op_ret
, src
);
1830 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1832 emitOpcode(opcodeID
);
1833 instructions().append(src
->index());
1837 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1839 ASSERT(func
->refCount());
1841 if (m_shouldEmitProfileHooks
)
1842 emitMove(callArguments
.profileHookRegister(), func
);
1844 // Generate code for arguments.
1845 unsigned argumentIndex
= 0;
1846 if (ArgumentsNode
* argumentsNode
= callArguments
.argumentsNode()) {
1847 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
)
1848 emitNode(callArguments
.argumentRegister(argumentIndex
++), n
);
1851 if (m_shouldEmitProfileHooks
) {
1852 emitOpcode(op_profile_will_call
);
1853 instructions().append(callArguments
.profileHookRegister()->index());
1856 // Reserve space for call frame.
1857 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1858 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1859 callFrame
.append(newTemporary());
1861 emitExpressionInfo(divot
, startOffset
, endOffset
);
1864 m_codeBlock
->addCallLinkInfo();
1867 emitOpcode(op_construct
);
1868 instructions().append(func
->index()); // func
1869 instructions().append(callArguments
.count()); // argCount
1870 instructions().append(callArguments
.callFrame()); // registerOffset
1871 if (dst
!= ignoredResult()) {
1872 emitOpcode(op_call_put_result
);
1873 instructions().append(dst
->index()); // dst
1876 if (m_shouldEmitProfileHooks
) {
1877 emitOpcode(op_profile_did_call
);
1878 instructions().append(callArguments
.profileHookRegister()->index());
1884 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1886 emitOpcode(op_strcat
);
1887 instructions().append(dst
->index());
1888 instructions().append(src
->index());
1889 instructions().append(count
);
1894 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
1896 emitOpcode(op_to_primitive
);
1897 instructions().append(dst
->index());
1898 instructions().append(src
->index());
1901 RegisterID
* BytecodeGenerator::emitPushScope(RegisterID
* scope
)
1903 ASSERT(scope
->isTemporary());
1904 ControlFlowContext context
;
1905 context
.isFinallyBlock
= false;
1906 m_scopeContextStack
.append(context
);
1907 m_dynamicScopeDepth
++;
1909 return emitUnaryNoDstOp(op_push_scope
, scope
);
1912 void BytecodeGenerator::emitPopScope()
1914 ASSERT(m_scopeContextStack
.size());
1915 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
1917 emitOpcode(op_pop_scope
);
1919 m_scopeContextStack
.removeLast();
1920 m_dynamicScopeDepth
--;
1923 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, int firstLine
, int lastLine
)
1925 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1926 if (debugHookID
!= DidReachBreakpoint
)
1929 if (!m_shouldEmitDebugHooks
)
1932 emitOpcode(op_debug
);
1933 instructions().append(debugHookID
);
1934 instructions().append(firstLine
);
1935 instructions().append(lastLine
);
1938 void BytecodeGenerator::pushFinallyContext(Label
* target
, RegisterID
* retAddrDst
)
1940 ControlFlowContext scope
;
1941 scope
.isFinallyBlock
= true;
1942 FinallyContext context
= { target
, retAddrDst
};
1943 scope
.finallyContext
= context
;
1944 m_scopeContextStack
.append(scope
);
1948 void BytecodeGenerator::popFinallyContext()
1950 ASSERT(m_scopeContextStack
.size());
1951 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
1952 ASSERT(m_finallyDepth
> 0);
1953 m_scopeContextStack
.removeLast();
1957 LabelScope
* BytecodeGenerator::breakTarget(const Identifier
& name
)
1959 // Reclaim free label scopes.
1961 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1962 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1963 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1964 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1965 // loop condition is a workaround.
1966 while (m_labelScopes
.size()) {
1967 if (m_labelScopes
.last().refCount())
1969 m_labelScopes
.removeLast();
1972 if (!m_labelScopes
.size())
1975 // We special-case the following, which is a syntax error in Firefox:
1978 if (name
.isEmpty()) {
1979 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1980 LabelScope
* scope
= &m_labelScopes
[i
];
1981 if (scope
->type() != LabelScope::NamedLabel
) {
1982 ASSERT(scope
->breakTarget());
1989 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1990 LabelScope
* scope
= &m_labelScopes
[i
];
1991 if (scope
->name() && *scope
->name() == name
) {
1992 ASSERT(scope
->breakTarget());
1999 LabelScope
* BytecodeGenerator::continueTarget(const Identifier
& name
)
2001 // Reclaim free label scopes.
2002 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
2003 m_labelScopes
.removeLast();
2005 if (!m_labelScopes
.size())
2008 if (name
.isEmpty()) {
2009 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2010 LabelScope
* scope
= &m_labelScopes
[i
];
2011 if (scope
->type() == LabelScope::Loop
) {
2012 ASSERT(scope
->continueTarget());
2019 // Continue to the loop nested nearest to the label scope that matches
2021 LabelScope
* result
= 0;
2022 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2023 LabelScope
* scope
= &m_labelScopes
[i
];
2024 if (scope
->type() == LabelScope::Loop
) {
2025 ASSERT(scope
->continueTarget());
2028 if (scope
->name() && *scope
->name() == name
)
2029 return result
; // may be 0
2034 PassRefPtr
<Label
> BytecodeGenerator::emitComplexJumpScopes(Label
* target
, ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
2036 while (topScope
> bottomScope
) {
2037 // First we count the number of dynamic scopes we need to remove to get
2038 // to a finally block.
2039 int nNormalScopes
= 0;
2040 while (topScope
> bottomScope
) {
2041 if (topScope
->isFinallyBlock
)
2047 if (nNormalScopes
) {
2048 size_t begin
= instructions().size();
2050 // We need to remove a number of dynamic scopes to get to the next
2052 emitOpcode(op_jmp_scopes
);
2053 instructions().append(nNormalScopes
);
2055 // If topScope == bottomScope then there isn't actually a finally block
2056 // left to emit, so make the jmp_scopes jump directly to the target label
2057 if (topScope
== bottomScope
) {
2058 instructions().append(target
->bind(begin
, instructions().size()));
2062 // Otherwise we just use jmp_scopes to pop a group of scopes and go
2063 // to the next instruction
2064 RefPtr
<Label
> nextInsn
= newLabel();
2065 instructions().append(nextInsn
->bind(begin
, instructions().size()));
2066 emitLabel(nextInsn
.get());
2069 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
2070 emitJumpSubroutine(topScope
->finallyContext
.retAddrDst
, topScope
->finallyContext
.finallyAddr
);
2074 return emitJump(target
);
2077 PassRefPtr
<Label
> BytecodeGenerator::emitJumpScopes(Label
* target
, int targetScopeDepth
)
2079 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
2080 ASSERT(target
->isForward());
2082 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
2083 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
2085 return emitJump(target
);
2088 return emitComplexJumpScopes(target
, &m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
2090 size_t begin
= instructions().size();
2092 emitOpcode(op_jmp_scopes
);
2093 instructions().append(scopeDelta
);
2094 instructions().append(target
->bind(begin
, instructions().size()));
2098 RegisterID
* BytecodeGenerator::emitGetPropertyNames(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, Label
* breakTarget
)
2100 size_t begin
= instructions().size();
2102 emitOpcode(op_get_pnames
);
2103 instructions().append(dst
->index());
2104 instructions().append(base
->index());
2105 instructions().append(i
->index());
2106 instructions().append(size
->index());
2107 instructions().append(breakTarget
->bind(begin
, instructions().size()));
2111 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, RegisterID
* iter
, Label
* target
)
2113 size_t begin
= instructions().size();
2115 emitOpcode(op_next_pname
);
2116 instructions().append(dst
->index());
2117 instructions().append(base
->index());
2118 instructions().append(i
->index());
2119 instructions().append(size
->index());
2120 instructions().append(iter
->index());
2121 instructions().append(target
->bind(begin
, instructions().size()));
2125 RegisterID
* BytecodeGenerator::emitCatch(RegisterID
* targetRegister
, Label
* start
, Label
* end
)
2127 m_usesExceptions
= true;
2129 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
, CodeLocationLabel() };
2131 HandlerInfo info
= { start
->bind(0, 0), end
->bind(0, 0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
};
2134 m_codeBlock
->addExceptionHandler(info
);
2135 emitOpcode(op_catch
);
2136 instructions().append(targetRegister
->index());
2137 return targetRegister
;
2140 void BytecodeGenerator::emitThrowReferenceError(const UString
& message
)
2142 emitOpcode(op_throw_reference_error
);
2143 instructions().append(addConstantValue(jsString(globalData(), message
))->index());
2146 PassRefPtr
<Label
> BytecodeGenerator::emitJumpSubroutine(RegisterID
* retAddrDst
, Label
* finally
)
2148 size_t begin
= instructions().size();
2151 instructions().append(retAddrDst
->index());
2152 instructions().append(finally
->bind(begin
, instructions().size()));
2153 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
2157 void BytecodeGenerator::emitSubroutineReturn(RegisterID
* retAddrSrc
)
2159 emitOpcode(op_sret
);
2160 instructions().append(retAddrSrc
->index());
2163 void BytecodeGenerator::emitPushNewScope(RegisterID
* dst
, const Identifier
& property
, RegisterID
* value
)
2165 ControlFlowContext context
;
2166 context
.isFinallyBlock
= false;
2167 m_scopeContextStack
.append(context
);
2168 m_dynamicScopeDepth
++;
2170 emitOpcode(op_push_new_scope
);
2171 instructions().append(dst
->index());
2172 instructions().append(addConstant(property
));
2173 instructions().append(value
->index());
2176 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
2178 SwitchInfo info
= { instructions().size(), type
};
2180 case SwitchInfo::SwitchImmediate
:
2181 emitOpcode(op_switch_imm
);
2183 case SwitchInfo::SwitchCharacter
:
2184 emitOpcode(op_switch_char
);
2186 case SwitchInfo::SwitchString
:
2187 emitOpcode(op_switch_string
);
2190 ASSERT_NOT_REACHED();
2193 instructions().append(0); // place holder for table index
2194 instructions().append(0); // place holder for default target
2195 instructions().append(scrutineeRegister
->index());
2196 m_switchContextStack
.append(info
);
2199 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2202 ASSERT(node
->isNumber());
2203 double value
= static_cast<NumberNode
*>(node
)->value();
2204 int32_t key
= static_cast<int32_t>(value
);
2205 ASSERT(key
== value
);
2211 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2213 jumpTable
.min
= min
;
2214 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2215 jumpTable
.branchOffsets
.fill(0);
2216 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2217 // We're emitting this after the clause labels should have been fixed, so
2218 // the labels should not be "forward" references
2219 ASSERT(!labels
[i
]->isForward());
2220 jumpTable
.add(keyForImmediateSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2224 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2227 ASSERT(node
->isString());
2228 StringImpl
* clause
= static_cast<StringNode
*>(node
)->value().impl();
2229 ASSERT(clause
->length() == 1);
2231 int32_t key
= clause
->characters()[0];
2237 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2239 jumpTable
.min
= min
;
2240 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2241 jumpTable
.branchOffsets
.fill(0);
2242 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2243 // We're emitting this after the clause labels should have been fixed, so
2244 // the labels should not be "forward" references
2245 ASSERT(!labels
[i
]->isForward());
2246 jumpTable
.add(keyForCharacterSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2250 static void prepareJumpTableForStringSwitch(StringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
2252 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2253 // We're emitting this after the clause labels should have been fixed, so
2254 // the labels should not be "forward" references
2255 ASSERT(!labels
[i
]->isForward());
2257 ASSERT(nodes
[i
]->isString());
2258 StringImpl
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().impl();
2259 OffsetLocation location
;
2260 location
.branchOffset
= labels
[i
]->bind(switchAddress
, switchAddress
+ 3);
2261 jumpTable
.offsetTable
.add(clause
, location
);
2265 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
2267 SwitchInfo switchInfo
= m_switchContextStack
.last();
2268 m_switchContextStack
.removeLast();
2269 if (switchInfo
.switchType
== SwitchInfo::SwitchImmediate
) {
2270 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfImmediateSwitchJumpTables();
2271 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2273 SimpleJumpTable
& jumpTable
= m_codeBlock
->addImmediateSwitchJumpTable();
2274 prepareJumpTableForImmediateSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2275 } else if (switchInfo
.switchType
== SwitchInfo::SwitchCharacter
) {
2276 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfCharacterSwitchJumpTables();
2277 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2279 SimpleJumpTable
& jumpTable
= m_codeBlock
->addCharacterSwitchJumpTable();
2280 prepareJumpTableForCharacterSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2282 ASSERT(switchInfo
.switchType
== SwitchInfo::SwitchString
);
2283 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
2284 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2286 StringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
2287 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
);
2291 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
2293 // It would be nice to do an even better job of identifying exactly where the expression is.
2294 // And we could make the caller pass the node pointer in, if there was some way of getting
2295 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2296 // is still good enough to get us an accurate line number.
2297 m_expressionTooDeep
= true;
2298 return newTemporary();
2301 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction
)
2303 m_codeBlock
->setIsNumericCompareFunction(isNumericCompareFunction
);
2306 int BytecodeGenerator::argumentNumberFor(const Identifier
& ident
)
2308 int parameterCount
= m_parameters
.size(); // includes 'this'
2309 RegisterID
* registerID
= registerFor(ident
);
2312 int index
= registerID
->index() + RegisterFile::CallFrameHeaderSize
+ parameterCount
;
2313 return (index
> 0 && index
< parameterCount
) ? index
: 0;