2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BytecodeGenerator.h"
33 #include "BatchedTransitionOptimizer.h"
34 #include "PrototypeFunction.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
44 The layout of a register frame looks like this:
55 assuming (x) and (y) generated temporaries t1 and t2, you would have
57 ------------------------------------
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
59 ------------------------------------
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
61 ------------------------------------
62 | params->|<-locals | temps->
64 Because temporary registers are allocated in a stack-like fashion, we
65 can reclaim them with a simple popping algorithm. The same goes for labels.
66 (We never reclaim parameter or local registers, because parameters and
67 locals are DontDelete.)
69 The register layout before a function call looks like this:
79 > <------------------------------
80 < > reserved: call frame | 1 | <-- value held
81 > >snip< <------------------------------
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
83 > <------------------------------
84 | params->|<-locals | temps->
86 The call instruction fills in the "call frame" registers. It also pads
87 missing arguments at the end of the call:
89 > <-----------------------------------
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
91 > >snip< <-----------------------------------
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
93 > <-----------------------------------
94 | params->|<-locals | temps->
96 After filling in missing arguments, the call instruction sets up the new
97 stack frame to overlap the end of the old stack frame:
99 |----------------------------------> <
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
101 |----------------------------------> >snip< <
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
103 |----------------------------------> <
104 | | params->|<-locals | temps->
106 That way, arguments are "copied" into the callee's stack frame for free.
108 If the caller supplies too many arguments, this trick doesn't work. The
109 extra arguments protrude into space reserved for locals and temporaries.
110 In that case, the call instruction makes a real copy of the call frame header,
111 along with just the arguments expected by the callee, leaving the original
112 call frame header and arguments behind. (The call instruction can't just discard
113 extra arguments, because the "arguments" object may access them later.)
114 This copying strategy ensures that all named values will be at the indices
115 expected by the callee.
119 static bool s_dumpsGeneratedCode
= false;
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode
)
125 s_dumpsGeneratedCode
= dumpsGeneratedCode
;
127 UNUSED_PARAM(dumpsGeneratedCode
);
131 bool BytecodeGenerator::dumpsGeneratedCode()
134 return s_dumpsGeneratedCode
;
140 void BytecodeGenerator::generate()
142 m_codeBlock
->setThisRegister(m_thisRegister
.index());
144 m_scopeNode
->emitBytecode(*this);
147 m_codeBlock
->setInstructionCount(m_codeBlock
->instructions().size());
149 if (s_dumpsGeneratedCode
)
150 m_codeBlock
->dump(m_scopeChain
->globalObject()->globalExec());
153 if ((m_codeType
== FunctionCode
&& !m_codeBlock
->needsFullScopeChain() && !m_codeBlock
->usesArguments()) || m_codeType
== EvalCode
)
154 symbolTable().clear();
156 m_codeBlock
->setIsNumericCompareFunction(instructions() == m_globalData
->numericCompareFunction(m_scopeChain
->globalObject()->globalExec()));
158 #if !ENABLE(OPCODE_SAMPLING)
159 if (!m_regeneratingForExceptionInfo
&& (m_codeType
== FunctionCode
|| m_codeType
== EvalCode
))
160 m_codeBlock
->clearExceptionInfo();
163 m_codeBlock
->shrinkToFit();
166 bool BytecodeGenerator::addVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
168 int index
= m_calleeRegisters
.size();
169 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
170 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.ustring().rep(), newEntry
);
172 if (!result
.second
) {
173 r0
= ®isterFor(result
.first
->second
.getIndex());
177 ++m_codeBlock
->m_numVars
;
182 bool BytecodeGenerator::addGlobalVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
184 int index
= m_nextGlobalIndex
;
185 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
186 pair
<SymbolTable::iterator
, bool> result
= symbolTable().add(ident
.ustring().rep(), newEntry
);
189 index
= result
.first
->second
.getIndex();
192 m_globals
.append(index
+ m_globalVarStorageOffset
);
195 r0
= ®isterFor(index
);
196 return result
.second
;
199 void BytecodeGenerator::preserveLastVar()
201 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
202 m_lastVar
= &m_calleeRegisters
.last();
205 BytecodeGenerator::BytecodeGenerator(ProgramNode
* programNode
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, ProgramCodeBlock
* codeBlock
)
206 : m_shouldEmitDebugHooks(!!debugger
)
207 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
208 , m_scopeChain(&scopeChain
)
209 , m_symbolTable(symbolTable
)
210 , m_scopeNode(programNode
)
211 , m_codeBlock(codeBlock
)
212 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
214 , m_dynamicScopeDepth(0)
215 , m_baseScopeDepth(0)
216 , m_codeType(GlobalCode
)
217 , m_nextGlobalIndex(-1)
218 , m_nextConstantOffset(0)
219 , m_globalConstantIndex(0)
220 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
221 , m_lastOpcodeID(op_end
)
223 , m_regeneratingForExceptionInfo(false)
224 , m_codeBlockBeingRegeneratedFrom(0)
226 if (m_shouldEmitDebugHooks
)
227 m_codeBlock
->setNeedsFullScopeChain(true);
229 emitOpcode(op_enter
);
230 codeBlock
->setGlobalData(m_globalData
);
232 // FIXME: Move code that modifies the global object to Interpreter::execute.
234 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
236 JSGlobalObject
* globalObject
= scopeChain
.globalObject();
237 ExecState
* exec
= globalObject
->globalExec();
238 RegisterFile
* registerFile
= &exec
->globalData().interpreter
->registerFile();
240 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
241 m_globalVarStorageOffset
= -RegisterFile::CallFrameHeaderSize
- m_codeBlock
->m_numParameters
- registerFile
->size();
243 // Add previously defined symbols to bookkeeping.
244 m_globals
.grow(symbolTable
->size());
245 SymbolTable::iterator end
= symbolTable
->end();
246 for (SymbolTable::iterator it
= symbolTable
->begin(); it
!= end
; ++it
)
247 registerFor(it
->second
.getIndex()).setIndex(it
->second
.getIndex() + m_globalVarStorageOffset
);
249 BatchedTransitionOptimizer
optimizer(globalObject
);
251 const VarStack
& varStack
= programNode
->varStack();
252 const FunctionStack
& functionStack
= programNode
->functionStack();
253 bool canOptimizeNewGlobals
= symbolTable
->size() + functionStack
.size() + varStack
.size() < registerFile
->maxGlobals();
254 if (canOptimizeNewGlobals
) {
255 // Shift new symbols so they get stored prior to existing symbols.
256 m_nextGlobalIndex
-= symbolTable
->size();
258 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
259 FuncDeclNode
* funcDecl
= functionStack
[i
];
260 globalObject
->removeDirect(funcDecl
->m_ident
); // Make sure our new function is not shadowed by an old property.
261 emitNewFunction(addGlobalVar(funcDecl
->m_ident
, false), funcDecl
);
264 Vector
<RegisterID
*, 32> newVars
;
265 for (size_t i
= 0; i
< varStack
.size(); ++i
)
266 if (!globalObject
->hasProperty(exec
, varStack
[i
].first
))
267 newVars
.append(addGlobalVar(varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
));
271 for (size_t i
= 0; i
< newVars
.size(); ++i
)
272 emitLoad(newVars
[i
], jsUndefined());
274 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
275 FuncDeclNode
* funcDecl
= functionStack
[i
];
276 globalObject
->putWithAttributes(exec
, funcDecl
->m_ident
, funcDecl
->makeFunction(exec
, scopeChain
.node()), DontDelete
);
278 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
279 if (globalObject
->hasProperty(exec
, varStack
[i
].first
))
281 int attributes
= DontDelete
;
282 if (varStack
[i
].second
& DeclarationStacks::IsConstant
)
283 attributes
|= ReadOnly
;
284 globalObject
->putWithAttributes(exec
, varStack
[i
].first
, jsUndefined(), attributes
);
291 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode
* functionBody
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, CodeBlock
* codeBlock
)
292 : m_shouldEmitDebugHooks(!!debugger
)
293 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
294 , m_scopeChain(&scopeChain
)
295 , m_symbolTable(symbolTable
)
296 , m_scopeNode(functionBody
)
297 , m_codeBlock(codeBlock
)
299 , m_dynamicScopeDepth(0)
300 , m_baseScopeDepth(0)
301 , m_codeType(FunctionCode
)
302 , m_nextConstantOffset(0)
303 , m_globalConstantIndex(0)
304 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
305 , m_lastOpcodeID(op_end
)
307 , m_regeneratingForExceptionInfo(false)
308 , m_codeBlockBeingRegeneratedFrom(0)
310 if (m_shouldEmitDebugHooks
)
311 m_codeBlock
->setNeedsFullScopeChain(true);
313 codeBlock
->setGlobalData(m_globalData
);
315 bool usesArguments
= functionBody
->usesArguments();
316 codeBlock
->setUsesArguments(usesArguments
);
318 m_argumentsRegister
.setIndex(RegisterFile::OptionalCalleeArguments
);
319 addVar(propertyNames().arguments
, false);
322 if (m_codeBlock
->needsFullScopeChain()) {
323 ++m_codeBlock
->m_numVars
;
324 m_activationRegisterIndex
= newRegister()->index();
325 emitOpcode(op_enter_with_activation
);
326 instructions().append(m_activationRegisterIndex
);
328 emitOpcode(op_enter
);
331 emitOpcode(op_init_arguments
);
333 // The debugger currently retrieves the arguments object from an activation rather than pulling
334 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
335 // but for now we force eager creation of the arguments object when debugging.
336 if (m_shouldEmitDebugHooks
)
337 emitOpcode(op_create_arguments
);
340 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
341 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
342 FuncDeclNode
* funcDecl
= functionStack
[i
];
343 const Identifier
& ident
= funcDecl
->m_ident
;
344 m_functions
.add(ident
.ustring().rep());
345 emitNewFunction(addVar(ident
, false), funcDecl
);
348 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
349 for (size_t i
= 0; i
< varStack
.size(); ++i
)
350 addVar(varStack
[i
].first
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
352 const Identifier
* parameters
= functionBody
->parameters();
353 size_t parameterCount
= functionBody
->parameterCount();
354 m_nextParameterIndex
= -RegisterFile::CallFrameHeaderSize
- parameterCount
- 1;
355 m_parameters
.grow(1 + parameterCount
); // reserve space for "this"
357 // Add "this" as a parameter
358 m_thisRegister
.setIndex(m_nextParameterIndex
);
359 ++m_nextParameterIndex
;
360 ++m_codeBlock
->m_numParameters
;
362 if (functionBody
->usesThis() || m_shouldEmitDebugHooks
) {
363 emitOpcode(op_convert_this
);
364 instructions().append(m_thisRegister
.index());
367 for (size_t i
= 0; i
< parameterCount
; ++i
)
368 addParameter(parameters
[i
]);
373 BytecodeGenerator::BytecodeGenerator(EvalNode
* evalNode
, const Debugger
* debugger
, const ScopeChain
& scopeChain
, SymbolTable
* symbolTable
, EvalCodeBlock
* codeBlock
)
374 : m_shouldEmitDebugHooks(!!debugger
)
375 , m_shouldEmitProfileHooks(scopeChain
.globalObject()->supportsProfiling())
376 , m_scopeChain(&scopeChain
)
377 , m_symbolTable(symbolTable
)
378 , m_scopeNode(evalNode
)
379 , m_codeBlock(codeBlock
)
380 , m_thisRegister(RegisterFile::ProgramCodeThisRegister
)
382 , m_dynamicScopeDepth(0)
383 , m_baseScopeDepth(codeBlock
->baseScopeDepth())
384 , m_codeType(EvalCode
)
385 , m_nextConstantOffset(0)
386 , m_globalConstantIndex(0)
387 , m_globalData(&scopeChain
.globalObject()->globalExec()->globalData())
388 , m_lastOpcodeID(op_end
)
390 , m_regeneratingForExceptionInfo(false)
391 , m_codeBlockBeingRegeneratedFrom(0)
393 if (m_shouldEmitDebugHooks
|| m_baseScopeDepth
)
394 m_codeBlock
->setNeedsFullScopeChain(true);
396 emitOpcode(op_enter
);
397 codeBlock
->setGlobalData(m_globalData
);
398 m_codeBlock
->m_numParameters
= 1; // Allocate space for "this"
403 RegisterID
* BytecodeGenerator::addParameter(const Identifier
& ident
)
405 // Parameters overwrite var declarations, but not function declarations.
406 RegisterID
* result
= 0;
407 UString::Rep
* rep
= ident
.ustring().rep();
408 if (!m_functions
.contains(rep
)) {
409 symbolTable().set(rep
, m_nextParameterIndex
);
410 RegisterID
& parameter
= registerFor(m_nextParameterIndex
);
411 parameter
.setIndex(m_nextParameterIndex
);
415 // To maintain the calling convention, we have to allocate unique space for
416 // each parameter, even if the parameter doesn't make it into the symbol table.
417 ++m_nextParameterIndex
;
418 ++m_codeBlock
->m_numParameters
;
422 RegisterID
* BytecodeGenerator::registerFor(const Identifier
& ident
)
424 if (ident
== propertyNames().thisIdentifier
)
425 return &m_thisRegister
;
427 if (!shouldOptimizeLocals())
430 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
434 if (ident
== propertyNames().arguments
)
435 createArgumentsIfNecessary();
437 return ®isterFor(entry
.getIndex());
440 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
442 if (ident
!= propertyNames().arguments
)
445 if (!shouldOptimizeLocals())
448 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
452 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
458 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
460 ASSERT(willResolveToArguments(propertyNames().arguments
));
462 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.ustring().rep());
463 ASSERT(!entry
.isNull());
464 return ®isterFor(entry
.getIndex());
467 RegisterID
* BytecodeGenerator::constRegisterFor(const Identifier
& ident
)
469 if (m_codeType
== EvalCode
)
472 SymbolTableEntry entry
= symbolTable().get(ident
.ustring().rep());
473 ASSERT(!entry
.isNull());
475 return ®isterFor(entry
.getIndex());
478 bool BytecodeGenerator::isLocal(const Identifier
& ident
)
480 if (ident
== propertyNames().thisIdentifier
)
483 return shouldOptimizeLocals() && symbolTable().contains(ident
.ustring().rep());
486 bool BytecodeGenerator::isLocalConstant(const Identifier
& ident
)
488 return symbolTable().get(ident
.ustring().rep()).isReadOnly();
491 RegisterID
* BytecodeGenerator::newRegister()
493 m_calleeRegisters
.append(m_calleeRegisters
.size());
494 m_codeBlock
->m_numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
495 return &m_calleeRegisters
.last();
498 RegisterID
* BytecodeGenerator::newTemporary()
500 // Reclaim free register IDs.
501 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
502 m_calleeRegisters
.removeLast();
504 RegisterID
* result
= newRegister();
505 result
->setTemporary();
509 RegisterID
* BytecodeGenerator::highestUsedRegister()
511 size_t count
= m_codeBlock
->m_numCalleeRegisters
;
512 while (m_calleeRegisters
.size() < count
)
514 return &m_calleeRegisters
.last();
517 PassRefPtr
<LabelScope
> BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
519 // Reclaim free label scopes.
520 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
521 m_labelScopes
.removeLast();
523 // Allocate new label scope.
524 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : 0); // Only loops have continue targets.
525 m_labelScopes
.append(scope
);
526 return &m_labelScopes
.last();
529 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
531 // Reclaim free label IDs.
532 while (m_labels
.size() && !m_labels
.last().refCount())
533 m_labels
.removeLast();
535 // Allocate new label ID.
536 m_labels
.append(m_codeBlock
);
537 return &m_labels
.last();
540 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
542 unsigned newLabelIndex
= instructions().size();
543 l0
->setLocation(newLabelIndex
);
545 if (m_codeBlock
->numberOfJumpTargets()) {
546 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
547 ASSERT(lastLabelIndex
<= newLabelIndex
);
548 if (newLabelIndex
== lastLabelIndex
) {
549 // Peephole optimizations have already been disabled by emitting the last label
554 m_codeBlock
->addJumpTarget(newLabelIndex
);
556 // This disables peephole optimizations when an instruction is a jump target
557 m_lastOpcodeID
= op_end
;
561 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
563 instructions().append(globalData()->interpreter
->getOpcode(opcodeID
));
564 m_lastOpcodeID
= opcodeID
;
567 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
569 ASSERT(instructions().size() >= 4);
570 size_t size
= instructions().size();
571 dstIndex
= instructions().at(size
- 3).u
.operand
;
572 src1Index
= instructions().at(size
- 2).u
.operand
;
573 src2Index
= instructions().at(size
- 1).u
.operand
;
576 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
578 ASSERT(instructions().size() >= 3);
579 size_t size
= instructions().size();
580 dstIndex
= instructions().at(size
- 2).u
.operand
;
581 srcIndex
= instructions().at(size
- 1).u
.operand
;
584 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
586 ASSERT(instructions().size() >= 4);
587 instructions().shrink(instructions().size() - 4);
590 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
592 ASSERT(instructions().size() >= 3);
593 instructions().shrink(instructions().size() - 3);
596 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
598 emitOpcode(target
->isForward() ? op_jmp
: op_loop
);
599 instructions().append(target
->offsetFrom(instructions().size()));
603 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
605 if (m_lastOpcodeID
== op_less
&& !target
->isForward()) {
610 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
612 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
614 emitOpcode(op_loop_if_less
);
615 instructions().append(src1Index
);
616 instructions().append(src2Index
);
617 instructions().append(target
->offsetFrom(instructions().size()));
620 } else if (m_lastOpcodeID
== op_lesseq
&& !target
->isForward()) {
625 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
627 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
629 emitOpcode(op_loop_if_lesseq
);
630 instructions().append(src1Index
);
631 instructions().append(src2Index
);
632 instructions().append(target
->offsetFrom(instructions().size()));
635 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
639 retrieveLastUnaryOp(dstIndex
, srcIndex
);
641 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
643 emitOpcode(op_jeq_null
);
644 instructions().append(srcIndex
);
645 instructions().append(target
->offsetFrom(instructions().size()));
648 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
652 retrieveLastUnaryOp(dstIndex
, srcIndex
);
654 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
656 emitOpcode(op_jneq_null
);
657 instructions().append(srcIndex
);
658 instructions().append(target
->offsetFrom(instructions().size()));
663 emitOpcode(target
->isForward() ? op_jtrue
: op_loop_if_true
);
664 instructions().append(cond
->index());
665 instructions().append(target
->offsetFrom(instructions().size()));
669 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
671 ASSERT(target
->isForward());
673 if (m_lastOpcodeID
== op_less
) {
678 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
680 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
682 emitOpcode(op_jnless
);
683 instructions().append(src1Index
);
684 instructions().append(src2Index
);
685 instructions().append(target
->offsetFrom(instructions().size()));
688 } else if (m_lastOpcodeID
== op_lesseq
) {
693 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
695 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
697 emitOpcode(op_jnlesseq
);
698 instructions().append(src1Index
);
699 instructions().append(src2Index
);
700 instructions().append(target
->offsetFrom(instructions().size()));
703 } else if (m_lastOpcodeID
== op_not
) {
707 retrieveLastUnaryOp(dstIndex
, srcIndex
);
709 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
711 emitOpcode(op_jtrue
);
712 instructions().append(srcIndex
);
713 instructions().append(target
->offsetFrom(instructions().size()));
716 } else if (m_lastOpcodeID
== op_eq_null
) {
720 retrieveLastUnaryOp(dstIndex
, srcIndex
);
722 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
724 emitOpcode(op_jneq_null
);
725 instructions().append(srcIndex
);
726 instructions().append(target
->offsetFrom(instructions().size()));
729 } else if (m_lastOpcodeID
== op_neq_null
) {
733 retrieveLastUnaryOp(dstIndex
, srcIndex
);
735 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
737 emitOpcode(op_jeq_null
);
738 instructions().append(srcIndex
);
739 instructions().append(target
->offsetFrom(instructions().size()));
744 emitOpcode(op_jfalse
);
745 instructions().append(cond
->index());
746 instructions().append(target
->offsetFrom(instructions().size()));
750 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
752 emitOpcode(op_jneq_ptr
);
753 instructions().append(cond
->index());
754 instructions().append(m_scopeChain
->globalObject()->d()->callFunction
);
755 instructions().append(target
->offsetFrom(instructions().size()));
759 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
761 emitOpcode(op_jneq_ptr
);
762 instructions().append(cond
->index());
763 instructions().append(m_scopeChain
->globalObject()->d()->applyFunction
);
764 instructions().append(target
->offsetFrom(instructions().size()));
768 unsigned BytecodeGenerator::addConstant(FuncDeclNode
* n
)
770 // No need to explicitly unique function body nodes -- they're unique already.
771 return m_codeBlock
->addFunction(n
);
774 unsigned BytecodeGenerator::addConstant(FuncExprNode
* n
)
776 // No need to explicitly unique function expression nodes -- they're unique already.
777 return m_codeBlock
->addFunctionExpression(n
);
780 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
782 UString::Rep
* rep
= ident
.ustring().rep();
783 pair
<IdentifierMap::iterator
, bool> result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
784 if (result
.second
) // new entry
785 m_codeBlock
->addIdentifier(Identifier(m_globalData
, rep
));
787 return result
.first
->second
;
790 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
792 int index
= m_nextConstantOffset
;
794 pair
<JSValueMap::iterator
, bool> result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
796 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
797 ++m_nextConstantOffset
;
798 m_codeBlock
->addConstantRegister(JSValue(v
));
800 index
= result
.first
->second
;
802 return &m_constantPoolRegisters
[index
];
805 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
807 return m_codeBlock
->addRegExp(r
);
810 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
813 instructions().append(dst
->index());
814 instructions().append(src
->index());
818 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
820 emitOpcode(opcodeID
);
821 instructions().append(dst
->index());
822 instructions().append(src
->index());
826 RegisterID
* BytecodeGenerator::emitPreInc(RegisterID
* srcDst
)
828 emitOpcode(op_pre_inc
);
829 instructions().append(srcDst
->index());
833 RegisterID
* BytecodeGenerator::emitPreDec(RegisterID
* srcDst
)
835 emitOpcode(op_pre_dec
);
836 instructions().append(srcDst
->index());
840 RegisterID
* BytecodeGenerator::emitPostInc(RegisterID
* dst
, RegisterID
* srcDst
)
842 emitOpcode(op_post_inc
);
843 instructions().append(dst
->index());
844 instructions().append(srcDst
->index());
848 RegisterID
* BytecodeGenerator::emitPostDec(RegisterID
* dst
, RegisterID
* srcDst
)
850 emitOpcode(op_post_dec
);
851 instructions().append(dst
->index());
852 instructions().append(srcDst
->index());
856 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
858 emitOpcode(opcodeID
);
859 instructions().append(dst
->index());
860 instructions().append(src1
->index());
861 instructions().append(src2
->index());
863 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
864 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
865 instructions().append(types
.toInt());
870 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
872 if (m_lastOpcodeID
== op_typeof
) {
876 retrieveLastUnaryOp(dstIndex
, srcIndex
);
878 if (src1
->index() == dstIndex
879 && src1
->isTemporary()
880 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
881 && m_codeBlock
->constantRegister(src2
->index()).jsValue().isString()) {
882 const UString
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).jsValue())->value();
883 if (value
== "undefined") {
885 emitOpcode(op_is_undefined
);
886 instructions().append(dst
->index());
887 instructions().append(srcIndex
);
890 if (value
== "boolean") {
892 emitOpcode(op_is_boolean
);
893 instructions().append(dst
->index());
894 instructions().append(srcIndex
);
897 if (value
== "number") {
899 emitOpcode(op_is_number
);
900 instructions().append(dst
->index());
901 instructions().append(srcIndex
);
904 if (value
== "string") {
906 emitOpcode(op_is_string
);
907 instructions().append(dst
->index());
908 instructions().append(srcIndex
);
911 if (value
== "object") {
913 emitOpcode(op_is_object
);
914 instructions().append(dst
->index());
915 instructions().append(srcIndex
);
918 if (value
== "function") {
920 emitOpcode(op_is_function
);
921 instructions().append(dst
->index());
922 instructions().append(srcIndex
);
928 emitOpcode(opcodeID
);
929 instructions().append(dst
->index());
930 instructions().append(src1
->index());
931 instructions().append(src2
->index());
935 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
937 return emitLoad(dst
, jsBoolean(b
));
940 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
942 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time.
943 // Later we can do the extra work to handle that like the other cases.
944 if (number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
945 return emitLoad(dst
, jsNumber(globalData(), number
));
946 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).first
->second
;
948 valueInMap
= jsNumber(globalData(), number
);
949 return emitLoad(dst
, valueInMap
);
952 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
954 JSString
*& stringInMap
= m_stringMap
.add(identifier
.ustring().rep(), 0).first
->second
;
956 stringInMap
= jsOwnedString(globalData(), identifier
.ustring());
957 return emitLoad(dst
, JSValue(stringInMap
));
960 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
962 RegisterID
* constantID
= addConstantValue(v
);
964 return emitMove(dst
, constantID
);
968 bool BytecodeGenerator::findScopedProperty(const Identifier
& property
, int& index
, size_t& stackDepth
, bool forWriting
, JSObject
*& globalObject
)
970 // Cases where we cannot statically optimize the lookup.
971 if (property
== propertyNames().arguments
|| !canOptimizeNonLocals()) {
973 index
= missingSymbolMarker();
975 if (shouldOptimizeLocals() && m_codeType
== GlobalCode
) {
976 ScopeChainIterator iter
= m_scopeChain
->begin();
977 globalObject
= *iter
;
978 ASSERT((++iter
) == m_scopeChain
->end());
985 ScopeChainIterator iter
= m_scopeChain
->begin();
986 ScopeChainIterator end
= m_scopeChain
->end();
987 for (; iter
!= end
; ++iter
, ++depth
) {
988 JSObject
* currentScope
= *iter
;
989 if (!currentScope
->isVariableObject())
991 JSVariableObject
* currentVariableObject
= static_cast<JSVariableObject
*>(currentScope
);
992 SymbolTableEntry entry
= currentVariableObject
->symbolTable().get(property
.ustring().rep());
994 // Found the property
995 if (!entry
.isNull()) {
996 if (entry
.isReadOnly() && forWriting
) {
998 index
= missingSymbolMarker();
1000 globalObject
= currentVariableObject
;
1004 index
= entry
.getIndex();
1006 globalObject
= currentVariableObject
;
1009 if (currentVariableObject
->isDynamicScope())
1013 // Can't locate the property but we're able to avoid a few lookups.
1015 index
= missingSymbolMarker();
1016 JSObject
* scope
= *iter
;
1018 globalObject
= scope
;
1022 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, RegisterID
* basePrototype
)
1024 emitOpcode(op_instanceof
);
1025 instructions().append(dst
->index());
1026 instructions().append(value
->index());
1027 instructions().append(base
->index());
1028 instructions().append(basePrototype
->index());
1032 RegisterID
* BytecodeGenerator::emitResolve(RegisterID
* dst
, const Identifier
& property
)
1036 JSObject
* globalObject
= 0;
1037 if (!findScopedProperty(property
, index
, depth
, false, globalObject
) && !globalObject
) {
1038 // We can't optimise at all :-(
1039 emitOpcode(op_resolve
);
1040 instructions().append(dst
->index());
1041 instructions().append(addConstant(property
));
1046 bool forceGlobalResolve
= false;
1047 if (m_regeneratingForExceptionInfo
) {
1049 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1051 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1055 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1056 // Directly index the property lookup across multiple scopes.
1057 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1061 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1063 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1065 emitOpcode(op_resolve_global
);
1066 instructions().append(dst
->index());
1067 instructions().append(globalObject
);
1068 instructions().append(addConstant(property
));
1069 instructions().append(0);
1070 instructions().append(0);
1074 if (index
!= missingSymbolMarker()) {
1075 // Directly index the property lookup across multiple scopes.
1076 return emitGetScopedVar(dst
, depth
, index
, globalObject
);
1079 // In this case we are at least able to drop a few scope chains from the
1080 // lookup chain, although we still need to hash from then on.
1081 emitOpcode(op_resolve_skip
);
1082 instructions().append(dst
->index());
1083 instructions().append(addConstant(property
));
1084 instructions().append(depth
);
1088 RegisterID
* BytecodeGenerator::emitGetScopedVar(RegisterID
* dst
, size_t depth
, int index
, JSValue globalObject
)
1091 emitOpcode(op_get_global_var
);
1092 instructions().append(dst
->index());
1093 instructions().append(asCell(globalObject
));
1094 instructions().append(index
);
1098 emitOpcode(op_get_scoped_var
);
1099 instructions().append(dst
->index());
1100 instructions().append(index
);
1101 instructions().append(depth
);
1105 RegisterID
* BytecodeGenerator::emitPutScopedVar(size_t depth
, int index
, RegisterID
* value
, JSValue globalObject
)
1108 emitOpcode(op_put_global_var
);
1109 instructions().append(asCell(globalObject
));
1110 instructions().append(index
);
1111 instructions().append(value
->index());
1114 emitOpcode(op_put_scoped_var
);
1115 instructions().append(index
);
1116 instructions().append(depth
);
1117 instructions().append(value
->index());
1121 RegisterID
* BytecodeGenerator::emitResolveBase(RegisterID
* dst
, const Identifier
& property
)
1125 JSObject
* globalObject
= 0;
1126 findScopedProperty(property
, index
, depth
, false, globalObject
);
1127 if (!globalObject
) {
1128 // We can't optimise at all :-(
1129 emitOpcode(op_resolve_base
);
1130 instructions().append(dst
->index());
1131 instructions().append(addConstant(property
));
1135 // Global object is the base
1136 return emitLoad(dst
, JSValue(globalObject
));
1139 RegisterID
* BytecodeGenerator::emitResolveWithBase(RegisterID
* baseDst
, RegisterID
* propDst
, const Identifier
& property
)
1143 JSObject
* globalObject
= 0;
1144 if (!findScopedProperty(property
, index
, depth
, false, globalObject
) || !globalObject
) {
1145 // We can't optimise at all :-(
1146 emitOpcode(op_resolve_with_base
);
1147 instructions().append(baseDst
->index());
1148 instructions().append(propDst
->index());
1149 instructions().append(addConstant(property
));
1153 bool forceGlobalResolve
= false;
1154 if (m_regeneratingForExceptionInfo
) {
1156 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1158 forceGlobalResolve
= m_codeBlockBeingRegeneratedFrom
->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1162 // Global object is the base
1163 emitLoad(baseDst
, JSValue(globalObject
));
1165 if (index
!= missingSymbolMarker() && !forceGlobalResolve
) {
1166 // Directly index the property lookup across multiple scopes.
1167 emitGetScopedVar(propDst
, depth
, index
, globalObject
);
1172 m_codeBlock
->addGlobalResolveInfo(instructions().size());
1174 m_codeBlock
->addGlobalResolveInstruction(instructions().size());
1176 emitOpcode(op_resolve_global
);
1177 instructions().append(propDst
->index());
1178 instructions().append(globalObject
);
1179 instructions().append(addConstant(property
));
1180 instructions().append(0);
1181 instructions().append(0);
1185 void BytecodeGenerator::emitMethodCheck()
1187 emitOpcode(op_method_check
);
1190 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1193 m_codeBlock
->addStructureStubInfo(StructureStubInfo(op_get_by_id
));
1195 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1198 emitOpcode(op_get_by_id
);
1199 instructions().append(dst
->index());
1200 instructions().append(base
->index());
1201 instructions().append(addConstant(property
));
1202 instructions().append(0);
1203 instructions().append(0);
1204 instructions().append(0);
1205 instructions().append(0);
1209 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1212 m_codeBlock
->addStructureStubInfo(StructureStubInfo(op_put_by_id
));
1214 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1217 emitOpcode(op_put_by_id
);
1218 instructions().append(base
->index());
1219 instructions().append(addConstant(property
));
1220 instructions().append(value
->index());
1221 instructions().append(0);
1222 instructions().append(0);
1223 instructions().append(0);
1224 instructions().append(0);
1228 RegisterID
* BytecodeGenerator::emitPutGetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1230 emitOpcode(op_put_getter
);
1231 instructions().append(base
->index());
1232 instructions().append(addConstant(property
));
1233 instructions().append(value
->index());
1237 RegisterID
* BytecodeGenerator::emitPutSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1239 emitOpcode(op_put_setter
);
1240 instructions().append(base
->index());
1241 instructions().append(addConstant(property
));
1242 instructions().append(value
->index());
1246 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1248 emitOpcode(op_del_by_id
);
1249 instructions().append(dst
->index());
1250 instructions().append(base
->index());
1251 instructions().append(addConstant(property
));
1255 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1257 emitOpcode(op_get_by_val
);
1258 instructions().append(dst
->index());
1259 instructions().append(base
->index());
1260 instructions().append(property
->index());
1264 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1266 emitOpcode(op_put_by_val
);
1267 instructions().append(base
->index());
1268 instructions().append(property
->index());
1269 instructions().append(value
->index());
1273 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1275 emitOpcode(op_del_by_val
);
1276 instructions().append(dst
->index());
1277 instructions().append(base
->index());
1278 instructions().append(property
->index());
1282 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1284 emitOpcode(op_put_by_index
);
1285 instructions().append(base
->index());
1286 instructions().append(index
);
1287 instructions().append(value
->index());
1291 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1293 emitOpcode(op_new_object
);
1294 instructions().append(dst
->index());
1298 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
)
1300 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1301 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1304 argv
.append(newTemporary());
1305 // op_new_array requires the initial values to be a sequential range of registers
1306 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1307 emitNode(argv
.last().get(), n
->value());
1309 emitOpcode(op_new_array
);
1310 instructions().append(dst
->index());
1311 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1312 instructions().append(argv
.size()); // argc
1316 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, FuncDeclNode
* n
)
1318 emitOpcode(op_new_func
);
1319 instructions().append(dst
->index());
1320 instructions().append(addConstant(n
));
1324 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1326 emitOpcode(op_new_regexp
);
1327 instructions().append(dst
->index());
1328 instructions().append(addRegExp(regExp
));
1333 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1335 emitOpcode(op_new_func_exp
);
1336 instructions().append(r0
->index());
1337 instructions().append(addConstant(n
));
1341 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1343 return emitCall(op_call
, dst
, func
, thisRegister
, argumentsNode
, divot
, startOffset
, endOffset
);
1346 void BytecodeGenerator::createArgumentsIfNecessary()
1348 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
1349 emitOpcode(op_create_arguments
);
1352 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1354 createArgumentsIfNecessary();
1355 return emitCall(op_call_eval
, dst
, func
, thisRegister
, argumentsNode
, divot
, startOffset
, endOffset
);
1358 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1360 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1361 ASSERT(func
->refCount());
1362 ASSERT(thisRegister
->refCount());
1364 RegisterID
* originalFunc
= func
;
1365 if (m_shouldEmitProfileHooks
) {
1366 // If codegen decided to recycle func as this call's destination register,
1367 // we need to undo that optimization here so that func will still be around
1368 // for the sake of op_profile_did_call.
1370 RefPtr
<RegisterID
> movedThisRegister
= emitMove(newTemporary(), thisRegister
);
1371 RefPtr
<RegisterID
> movedFunc
= emitMove(thisRegister
, func
);
1373 thisRegister
= movedThisRegister
.release().releaseRef();
1374 func
= movedFunc
.release().releaseRef();
1378 // Generate code for arguments.
1379 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1380 argv
.append(thisRegister
);
1381 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
) {
1382 argv
.append(newTemporary());
1383 // op_call requires the arguments to be a sequential range of registers
1384 ASSERT(argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1385 emitNode(argv
.last().get(), n
);
1388 // Reserve space for call frame.
1389 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1390 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1391 callFrame
.append(newTemporary());
1393 if (m_shouldEmitProfileHooks
) {
1394 emitOpcode(op_profile_will_call
);
1395 instructions().append(func
->index());
1398 m_codeBlock
->addFunctionRegisterInfo(instructions().size(), func
->index());
1402 emitExpressionInfo(divot
, startOffset
, endOffset
);
1405 m_codeBlock
->addCallLinkInfo();
1409 emitOpcode(opcodeID
);
1410 instructions().append(dst
->index()); // dst
1411 instructions().append(func
->index()); // func
1412 instructions().append(argv
.size()); // argCount
1413 instructions().append(argv
[0]->index() + argv
.size() + RegisterFile::CallFrameHeaderSize
); // registerOffset
1415 if (m_shouldEmitProfileHooks
) {
1416 emitOpcode(op_profile_did_call
);
1417 instructions().append(func
->index());
1419 if (dst
== originalFunc
) {
1420 thisRegister
->deref();
1428 RegisterID
* BytecodeGenerator::emitLoadVarargs(RegisterID
* argCountDst
, RegisterID
* arguments
)
1430 ASSERT(argCountDst
->index() < arguments
->index());
1431 emitOpcode(op_load_varargs
);
1432 instructions().append(argCountDst
->index());
1433 instructions().append(arguments
->index());
1437 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* argCountRegister
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1439 ASSERT(func
->refCount());
1440 ASSERT(thisRegister
->refCount());
1441 ASSERT(dst
!= func
);
1442 if (m_shouldEmitProfileHooks
) {
1443 emitOpcode(op_profile_will_call
);
1444 instructions().append(func
->index());
1447 m_codeBlock
->addFunctionRegisterInfo(instructions().size(), func
->index());
1451 emitExpressionInfo(divot
, startOffset
, endOffset
);
1454 emitOpcode(op_call_varargs
);
1455 instructions().append(dst
->index()); // dst
1456 instructions().append(func
->index()); // func
1457 instructions().append(argCountRegister
->index()); // arg count
1458 instructions().append(thisRegister
->index() + RegisterFile::CallFrameHeaderSize
); // initial registerOffset
1459 if (m_shouldEmitProfileHooks
) {
1460 emitOpcode(op_profile_did_call
);
1461 instructions().append(func
->index());
1466 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1468 if (m_codeBlock
->needsFullScopeChain()) {
1469 emitOpcode(op_tear_off_activation
);
1470 instructions().append(m_activationRegisterIndex
);
1471 } else if (m_codeBlock
->usesArguments() && m_codeBlock
->m_numParameters
> 1)
1472 emitOpcode(op_tear_off_arguments
);
1474 return emitUnaryNoDstOp(op_ret
, src
);
1477 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1479 emitOpcode(opcodeID
);
1480 instructions().append(src
->index());
1484 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, ArgumentsNode
* argumentsNode
, unsigned divot
, unsigned startOffset
, unsigned endOffset
)
1486 ASSERT(func
->refCount());
1488 RegisterID
* originalFunc
= func
;
1489 if (m_shouldEmitProfileHooks
) {
1490 // If codegen decided to recycle func as this call's destination register,
1491 // we need to undo that optimization here so that func will still be around
1492 // for the sake of op_profile_did_call.
1494 RefPtr
<RegisterID
> movedFunc
= emitMove(newTemporary(), func
);
1495 func
= movedFunc
.release().releaseRef();
1499 RefPtr
<RegisterID
> funcProto
= newTemporary();
1501 // Generate code for arguments.
1502 Vector
<RefPtr
<RegisterID
>, 16> argv
;
1503 argv
.append(newTemporary()); // reserve space for "this"
1504 for (ArgumentListNode
* n
= argumentsNode
? argumentsNode
->m_listNode
: 0; n
; n
= n
->m_next
) {
1505 argv
.append(newTemporary());
1506 // op_construct requires the arguments to be a sequential range of registers
1507 ASSERT(argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1508 emitNode(argv
.last().get(), n
);
1511 if (m_shouldEmitProfileHooks
) {
1512 emitOpcode(op_profile_will_call
);
1513 instructions().append(func
->index());
1517 emitExpressionInfo(divot
, startOffset
, endOffset
);
1518 emitGetByIdExceptionInfo(op_construct
);
1519 emitGetById(funcProto
.get(), func
, globalData()->propertyNames
->prototype
);
1521 // Reserve space for call frame.
1522 Vector
<RefPtr
<RegisterID
>, RegisterFile::CallFrameHeaderSize
> callFrame
;
1523 for (int i
= 0; i
< RegisterFile::CallFrameHeaderSize
; ++i
)
1524 callFrame
.append(newTemporary());
1526 emitExpressionInfo(divot
, startOffset
, endOffset
);
1529 m_codeBlock
->addCallLinkInfo();
1532 emitOpcode(op_construct
);
1533 instructions().append(dst
->index()); // dst
1534 instructions().append(func
->index()); // func
1535 instructions().append(argv
.size()); // argCount
1536 instructions().append(argv
[0]->index() + argv
.size() + RegisterFile::CallFrameHeaderSize
); // registerOffset
1537 instructions().append(funcProto
->index()); // proto
1538 instructions().append(argv
[0]->index()); // thisRegister
1540 emitOpcode(op_construct_verify
);
1541 instructions().append(dst
->index());
1542 instructions().append(argv
[0]->index());
1544 if (m_shouldEmitProfileHooks
) {
1545 emitOpcode(op_profile_did_call
);
1546 instructions().append(func
->index());
1548 if (dst
== originalFunc
)
1555 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1557 emitOpcode(op_strcat
);
1558 instructions().append(dst
->index());
1559 instructions().append(src
->index());
1560 instructions().append(count
);
1565 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
1567 emitOpcode(op_to_primitive
);
1568 instructions().append(dst
->index());
1569 instructions().append(src
->index());
1572 RegisterID
* BytecodeGenerator::emitPushScope(RegisterID
* scope
)
1574 ASSERT(scope
->isTemporary());
1575 ControlFlowContext context
;
1576 context
.isFinallyBlock
= false;
1577 m_scopeContextStack
.append(context
);
1578 m_dynamicScopeDepth
++;
1579 createArgumentsIfNecessary();
1581 return emitUnaryNoDstOp(op_push_scope
, scope
);
1584 void BytecodeGenerator::emitPopScope()
1586 ASSERT(m_scopeContextStack
.size());
1587 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
1589 emitOpcode(op_pop_scope
);
1591 m_scopeContextStack
.removeLast();
1592 m_dynamicScopeDepth
--;
1595 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, int firstLine
, int lastLine
)
1597 if (!m_shouldEmitDebugHooks
)
1599 emitOpcode(op_debug
);
1600 instructions().append(debugHookID
);
1601 instructions().append(firstLine
);
1602 instructions().append(lastLine
);
1605 void BytecodeGenerator::pushFinallyContext(Label
* target
, RegisterID
* retAddrDst
)
1607 ControlFlowContext scope
;
1608 scope
.isFinallyBlock
= true;
1609 FinallyContext context
= { target
, retAddrDst
};
1610 scope
.finallyContext
= context
;
1611 m_scopeContextStack
.append(scope
);
1615 void BytecodeGenerator::popFinallyContext()
1617 ASSERT(m_scopeContextStack
.size());
1618 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
1619 ASSERT(m_finallyDepth
> 0);
1620 m_scopeContextStack
.removeLast();
1624 LabelScope
* BytecodeGenerator::breakTarget(const Identifier
& name
)
1626 // Reclaim free label scopes.
1628 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1629 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1630 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1631 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1632 // loop condition is a workaround.
1633 while (m_labelScopes
.size()) {
1634 if (m_labelScopes
.last().refCount())
1636 m_labelScopes
.removeLast();
1639 if (!m_labelScopes
.size())
1642 // We special-case the following, which is a syntax error in Firefox:
1645 if (name
.isEmpty()) {
1646 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1647 LabelScope
* scope
= &m_labelScopes
[i
];
1648 if (scope
->type() != LabelScope::NamedLabel
) {
1649 ASSERT(scope
->breakTarget());
1656 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1657 LabelScope
* scope
= &m_labelScopes
[i
];
1658 if (scope
->name() && *scope
->name() == name
) {
1659 ASSERT(scope
->breakTarget());
1666 LabelScope
* BytecodeGenerator::continueTarget(const Identifier
& name
)
1668 // Reclaim free label scopes.
1669 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
1670 m_labelScopes
.removeLast();
1672 if (!m_labelScopes
.size())
1675 if (name
.isEmpty()) {
1676 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1677 LabelScope
* scope
= &m_labelScopes
[i
];
1678 if (scope
->type() == LabelScope::Loop
) {
1679 ASSERT(scope
->continueTarget());
1686 // Continue to the loop nested nearest to the label scope that matches
1688 LabelScope
* result
= 0;
1689 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
1690 LabelScope
* scope
= &m_labelScopes
[i
];
1691 if (scope
->type() == LabelScope::Loop
) {
1692 ASSERT(scope
->continueTarget());
1695 if (scope
->name() && *scope
->name() == name
)
1696 return result
; // may be 0
1701 PassRefPtr
<Label
> BytecodeGenerator::emitComplexJumpScopes(Label
* target
, ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
1703 while (topScope
> bottomScope
) {
1704 // First we count the number of dynamic scopes we need to remove to get
1705 // to a finally block.
1706 int nNormalScopes
= 0;
1707 while (topScope
> bottomScope
) {
1708 if (topScope
->isFinallyBlock
)
1714 if (nNormalScopes
) {
1715 // We need to remove a number of dynamic scopes to get to the next
1717 emitOpcode(op_jmp_scopes
);
1718 instructions().append(nNormalScopes
);
1720 // If topScope == bottomScope then there isn't actually a finally block
1721 // left to emit, so make the jmp_scopes jump directly to the target label
1722 if (topScope
== bottomScope
) {
1723 instructions().append(target
->offsetFrom(instructions().size()));
1727 // Otherwise we just use jmp_scopes to pop a group of scopes and go
1728 // to the next instruction
1729 RefPtr
<Label
> nextInsn
= newLabel();
1730 instructions().append(nextInsn
->offsetFrom(instructions().size()));
1731 emitLabel(nextInsn
.get());
1734 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
1735 emitJumpSubroutine(topScope
->finallyContext
.retAddrDst
, topScope
->finallyContext
.finallyAddr
);
1739 return emitJump(target
);
1742 PassRefPtr
<Label
> BytecodeGenerator::emitJumpScopes(Label
* target
, int targetScopeDepth
)
1744 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
1745 ASSERT(target
->isForward());
1747 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
1748 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
1750 return emitJump(target
);
1753 return emitComplexJumpScopes(target
, &m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
1755 emitOpcode(op_jmp_scopes
);
1756 instructions().append(scopeDelta
);
1757 instructions().append(target
->offsetFrom(instructions().size()));
1761 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* iter
, Label
* target
)
1763 emitOpcode(op_next_pname
);
1764 instructions().append(dst
->index());
1765 instructions().append(iter
->index());
1766 instructions().append(target
->offsetFrom(instructions().size()));
1770 RegisterID
* BytecodeGenerator::emitCatch(RegisterID
* targetRegister
, Label
* start
, Label
* end
)
1773 HandlerInfo info
= { start
->offsetFrom(0), end
->offsetFrom(0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
, CodeLocationLabel() };
1775 HandlerInfo info
= { start
->offsetFrom(0), end
->offsetFrom(0), instructions().size(), m_dynamicScopeDepth
+ m_baseScopeDepth
};
1778 m_codeBlock
->addExceptionHandler(info
);
1779 emitOpcode(op_catch
);
1780 instructions().append(targetRegister
->index());
1781 return targetRegister
;
1784 RegisterID
* BytecodeGenerator::emitNewError(RegisterID
* dst
, ErrorType type
, JSValue message
)
1786 emitOpcode(op_new_error
);
1787 instructions().append(dst
->index());
1788 instructions().append(static_cast<int>(type
));
1789 instructions().append(addConstantValue(message
)->index());
1793 PassRefPtr
<Label
> BytecodeGenerator::emitJumpSubroutine(RegisterID
* retAddrDst
, Label
* finally
)
1796 instructions().append(retAddrDst
->index());
1797 instructions().append(finally
->offsetFrom(instructions().size()));
1798 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
1802 void BytecodeGenerator::emitSubroutineReturn(RegisterID
* retAddrSrc
)
1804 emitOpcode(op_sret
);
1805 instructions().append(retAddrSrc
->index());
1808 void BytecodeGenerator::emitPushNewScope(RegisterID
* dst
, Identifier
& property
, RegisterID
* value
)
1810 ControlFlowContext context
;
1811 context
.isFinallyBlock
= false;
1812 m_scopeContextStack
.append(context
);
1813 m_dynamicScopeDepth
++;
1815 createArgumentsIfNecessary();
1817 emitOpcode(op_push_new_scope
);
1818 instructions().append(dst
->index());
1819 instructions().append(addConstant(property
));
1820 instructions().append(value
->index());
1823 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
1825 SwitchInfo info
= { instructions().size(), type
};
1827 case SwitchInfo::SwitchImmediate
:
1828 emitOpcode(op_switch_imm
);
1830 case SwitchInfo::SwitchCharacter
:
1831 emitOpcode(op_switch_char
);
1833 case SwitchInfo::SwitchString
:
1834 emitOpcode(op_switch_string
);
1837 ASSERT_NOT_REACHED();
1840 instructions().append(0); // place holder for table index
1841 instructions().append(0); // place holder for default target
1842 instructions().append(scrutineeRegister
->index());
1843 m_switchContextStack
.append(info
);
1846 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
1849 ASSERT(node
->isNumber());
1850 double value
= static_cast<NumberNode
*>(node
)->value();
1851 int32_t key
= static_cast<int32_t>(value
);
1852 ASSERT(key
== value
);
1858 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
1860 jumpTable
.min
= min
;
1861 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
1862 jumpTable
.branchOffsets
.fill(0);
1863 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
1864 // We're emitting this after the clause labels should have been fixed, so
1865 // the labels should not be "forward" references
1866 ASSERT(!labels
[i
]->isForward());
1867 jumpTable
.add(keyForImmediateSwitch(nodes
[i
], min
, max
), labels
[i
]->offsetFrom(switchAddress
));
1871 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
1874 ASSERT(node
->isString());
1875 UString::Rep
* clause
= static_cast<StringNode
*>(node
)->value().ustring().rep();
1876 ASSERT(clause
->size() == 1);
1878 int32_t key
= clause
->data()[0];
1884 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
1886 jumpTable
.min
= min
;
1887 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
1888 jumpTable
.branchOffsets
.fill(0);
1889 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
1890 // We're emitting this after the clause labels should have been fixed, so
1891 // the labels should not be "forward" references
1892 ASSERT(!labels
[i
]->isForward());
1893 jumpTable
.add(keyForCharacterSwitch(nodes
[i
], min
, max
), labels
[i
]->offsetFrom(switchAddress
));
1897 static void prepareJumpTableForStringSwitch(StringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
1899 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
1900 // We're emitting this after the clause labels should have been fixed, so
1901 // the labels should not be "forward" references
1902 ASSERT(!labels
[i
]->isForward());
1904 ASSERT(nodes
[i
]->isString());
1905 UString::Rep
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().ustring().rep();
1906 OffsetLocation location
;
1907 location
.branchOffset
= labels
[i
]->offsetFrom(switchAddress
);
1908 jumpTable
.offsetTable
.add(clause
, location
);
1912 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
1914 SwitchInfo switchInfo
= m_switchContextStack
.last();
1915 m_switchContextStack
.removeLast();
1916 if (switchInfo
.switchType
== SwitchInfo::SwitchImmediate
) {
1917 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfImmediateSwitchJumpTables();
1918 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->offsetFrom(switchInfo
.bytecodeOffset
+ 3);
1920 SimpleJumpTable
& jumpTable
= m_codeBlock
->addImmediateSwitchJumpTable();
1921 prepareJumpTableForImmediateSwitch(jumpTable
, switchInfo
.bytecodeOffset
+ 3, clauseCount
, labels
, nodes
, min
, max
);
1922 } else if (switchInfo
.switchType
== SwitchInfo::SwitchCharacter
) {
1923 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfCharacterSwitchJumpTables();
1924 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->offsetFrom(switchInfo
.bytecodeOffset
+ 3);
1926 SimpleJumpTable
& jumpTable
= m_codeBlock
->addCharacterSwitchJumpTable();
1927 prepareJumpTableForCharacterSwitch(jumpTable
, switchInfo
.bytecodeOffset
+ 3, clauseCount
, labels
, nodes
, min
, max
);
1929 ASSERT(switchInfo
.switchType
== SwitchInfo::SwitchString
);
1930 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
1931 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->offsetFrom(switchInfo
.bytecodeOffset
+ 3);
1933 StringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
1934 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
+ 3, clauseCount
, labels
, nodes
);
1938 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
1940 // It would be nice to do an even better job of identifying exactly where the expression is.
1941 // And we could make the caller pass the node pointer in, if there was some way of getting
1942 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
1943 // is still good enough to get us an accurate line number.
1944 emitExpressionInfo(0, 0, 0);
1945 RegisterID
* exception
= emitNewError(newTemporary(), SyntaxError
, jsString(globalData(), "Expression too deep"));
1946 emitThrow(exception
);