2 * Copyright (C) 2008, 2009, 2012, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "BytecodeGenerator.h"
34 #include "BatchedTransitionOptimizer.h"
35 #include "Interpreter.h"
36 #include "JSActivation.h"
37 #include "JSFunction.h"
38 #include "JSNameScope.h"
39 #include "LowLevelInterpreter.h"
40 #include "Operations.h"
42 #include "StrongInlines.h"
43 #include "UnlinkedCodeBlock.h"
44 #include <wtf/text/WTFString.h>
50 void Label::setLocation(unsigned location
)
52 m_location
= location
;
54 unsigned size
= m_unresolvedJumps
.size();
55 for (unsigned i
= 0; i
< size
; ++i
)
56 m_generator
->m_instructions
[m_unresolvedJumps
[i
].second
].u
.operand
= m_location
- m_unresolvedJumps
[i
].first
;
60 void ResolveResult::checkValidity()
64 case ReadOnlyRegister
:
75 RELEASE_ASSERT_NOT_REACHED();
80 ParserError
BytecodeGenerator::generate()
82 SamplingRegion
samplingRegion("Bytecode Generation");
84 m_codeBlock
->setThisRegister(m_thisRegister
.index());
86 m_scopeNode
->emitBytecode(*this);
88 m_staticPropertyAnalyzer
.kill();
90 for (unsigned i
= 0; i
< m_tryRanges
.size(); ++i
) {
91 TryRange
& range
= m_tryRanges
[i
];
92 int start
= range
.start
->bind();
93 int end
= range
.end
->bind();
95 // This will happen for empty try blocks and for some cases of finally blocks:
107 // The return will pop scopes to execute the outer finally block. But this includes
108 // popping the try context for the inner try. The try context is live in the fall-through
109 // part of the finally block not because we will emit a handler that overlaps the finally,
110 // but because we haven't yet had a chance to plant the catch target. Then when we finish
111 // emitting code for the outer finally block, we repush the try contex, this time with a
112 // new start index. But that means that the start index for the try range corresponding
113 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
114 // than the end index of the try block. This is harmless since end < start handlers will
115 // never get matched in our logic, but we do the runtime a favor and choose to not emit
116 // such handlers at all.
120 ASSERT(range
.tryData
->targetScopeDepth
!= UINT_MAX
);
121 UnlinkedHandlerInfo info
= {
122 static_cast<uint32_t>(start
), static_cast<uint32_t>(end
),
123 static_cast<uint32_t>(range
.tryData
->target
->bind()),
124 range
.tryData
->targetScopeDepth
126 m_codeBlock
->addExceptionHandler(info
);
129 m_codeBlock
->instructions() = RefCountedArray
<UnlinkedInstruction
>(m_instructions
);
131 m_codeBlock
->shrinkToFit();
133 if (m_expressionTooDeep
)
134 return ParserError(ParserError::OutOfMemory
);
135 return ParserError(ParserError::ErrorNone
);
138 bool BytecodeGenerator::addVar(const Identifier
& ident
, bool isConstant
, RegisterID
*& r0
)
140 int index
= m_calleeRegisters
.size();
141 SymbolTableEntry
newEntry(index
, isConstant
? ReadOnly
: 0);
142 SymbolTable::AddResult result
= symbolTable().add(ident
.impl(), newEntry
);
144 if (!result
.isNewEntry
) {
145 r0
= ®isterFor(result
.iterator
->value
.getIndex());
153 void BytecodeGenerator::preserveLastVar()
155 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
156 m_lastVar
= &m_calleeRegisters
.last();
159 BytecodeGenerator::BytecodeGenerator(VM
& vm
, JSScope
*, ProgramNode
* programNode
, UnlinkedProgramCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
160 : m_shouldEmitDebugHooks(debuggerMode
== DebuggerOn
)
161 , m_shouldEmitProfileHooks(profilerMode
== ProfilerOn
)
163 , m_scopeNode(programNode
)
164 , m_codeBlock(vm
, codeBlock
)
165 , m_thisRegister(CallFrame::thisArgumentOffset())
166 , m_emptyValueRegister(0)
167 , m_globalObjectRegister(0)
169 , m_dynamicScopeDepth(0)
170 , m_codeType(GlobalCode
)
171 , m_nextConstantOffset(0)
172 , m_globalConstantIndex(0)
173 , m_hasCreatedActivation(true)
174 , m_firstLazyFunction(0)
175 , m_lastLazyFunction(0)
176 , m_staticPropertyAnalyzer(&m_instructions
)
178 , m_lastOpcodeID(op_end
)
180 , m_lastOpcodePosition(0)
182 , m_stack(wtfThreadData().stack())
183 , m_usesExceptions(false)
184 , m_expressionTooDeep(false)
186 if (m_shouldEmitDebugHooks
)
187 m_codeBlock
->setNeedsFullScopeChain(true);
189 m_codeBlock
->setNumParameters(1); // Allocate space for "this"
191 emitOpcode(op_enter
);
193 const VarStack
& varStack
= programNode
->varStack();
194 const FunctionStack
& functionStack
= programNode
->functionStack();
196 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
197 FunctionBodyNode
* function
= functionStack
[i
];
198 UnlinkedFunctionExecutable
* unlinkedFunction
= makeFunction(function
);
199 codeBlock
->addFunctionDeclaration(*m_vm
, function
->ident(), unlinkedFunction
);
202 for (size_t i
= 0; i
< varStack
.size(); ++i
)
203 codeBlock
->addVariableDeclaration(*varStack
[i
].first
, !!(varStack
[i
].second
& DeclarationStacks::IsConstant
));
207 BytecodeGenerator::BytecodeGenerator(VM
& vm
, JSScope
* scope
, FunctionBodyNode
* functionBody
, UnlinkedFunctionCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
208 : m_shouldEmitDebugHooks(debuggerMode
== DebuggerOn
)
209 , m_shouldEmitProfileHooks(profilerMode
== ProfilerOn
)
210 , m_symbolTable(codeBlock
->symbolTable())
211 , m_scopeNode(functionBody
)
213 , m_codeBlock(vm
, codeBlock
)
214 , m_activationRegister(0)
215 , m_emptyValueRegister(0)
216 , m_globalObjectRegister(0)
218 , m_dynamicScopeDepth(0)
219 , m_codeType(FunctionCode
)
220 , m_nextConstantOffset(0)
221 , m_globalConstantIndex(0)
222 , m_hasCreatedActivation(false)
223 , m_firstLazyFunction(0)
224 , m_lastLazyFunction(0)
225 , m_staticPropertyAnalyzer(&m_instructions
)
227 , m_lastOpcodeID(op_end
)
229 , m_lastOpcodePosition(0)
231 , m_stack(wtfThreadData().stack())
232 , m_usesExceptions(false)
233 , m_expressionTooDeep(false)
235 if (m_shouldEmitDebugHooks
)
236 m_codeBlock
->setNeedsFullScopeChain(true);
238 m_symbolTable
->setUsesNonStrictEval(codeBlock
->usesEval() && !codeBlock
->isStrictMode());
239 m_symbolTable
->setParameterCountIncludingThis(functionBody
->parameters()->size() + 1);
241 emitOpcode(op_enter
);
242 if (m_codeBlock
->needsFullScopeChain()) {
243 m_activationRegister
= addVar();
244 emitInitLazyRegister(m_activationRegister
);
245 m_codeBlock
->setActivationRegister(m_activationRegister
->index());
248 m_symbolTable
->setCaptureStart(m_codeBlock
->m_numVars
);
250 if (functionBody
->usesArguments() || codeBlock
->usesEval() || m_shouldEmitDebugHooks
) { // May reify arguments object.
251 RegisterID
* unmodifiedArgumentsRegister
= addVar(); // Anonymous, so it can't be modified by user code.
252 RegisterID
* argumentsRegister
= addVar(propertyNames().arguments
, false); // Can be changed by assigning to 'arguments'.
254 // We can save a little space by hard-coding the knowledge that the two
255 // 'arguments' values are stored in consecutive registers, and storing
256 // only the index of the assignable one.
257 codeBlock
->setArgumentsRegister(argumentsRegister
->index());
258 ASSERT_UNUSED(unmodifiedArgumentsRegister
, unmodifiedArgumentsRegister
->index() == JSC::unmodifiedArgumentsRegister(codeBlock
->argumentsRegister()));
260 emitInitLazyRegister(argumentsRegister
);
261 emitInitLazyRegister(unmodifiedArgumentsRegister
);
263 if (m_codeBlock
->isStrictMode()) {
264 emitOpcode(op_create_arguments
);
265 instructions().append(argumentsRegister
->index());
268 // The debugger currently retrieves the arguments object from an activation rather than pulling
269 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
270 // but for now we force eager creation of the arguments object when debugging.
271 if (m_shouldEmitDebugHooks
) {
272 emitOpcode(op_create_arguments
);
273 instructions().append(argumentsRegister
->index());
277 bool shouldCaptureAllTheThings
= m_shouldEmitDebugHooks
|| codeBlock
->usesEval();
279 bool capturesAnyArgumentByName
= false;
280 Vector
<RegisterID
*, 0, UnsafeVectorOverflow
> capturedArguments
;
281 if (functionBody
->hasCapturedVariables() || shouldCaptureAllTheThings
) {
282 FunctionParameters
& parameters
= *functionBody
->parameters();
283 capturedArguments
.resize(parameters
.size());
284 for (size_t i
= 0; i
< parameters
.size(); ++i
) {
285 capturedArguments
[i
] = 0;
286 if (!functionBody
->captures(parameters
.at(i
)) && !shouldCaptureAllTheThings
)
288 capturesAnyArgumentByName
= true;
289 capturedArguments
[i
] = addVar();
293 if (capturesAnyArgumentByName
&& !codeBlock
->isStrictMode()) {
294 size_t parameterCount
= m_symbolTable
->parameterCount();
295 OwnArrayPtr
<SlowArgument
> slowArguments
= adoptArrayPtr(new SlowArgument
[parameterCount
]);
296 for (size_t i
= 0; i
< parameterCount
; ++i
) {
297 if (!capturedArguments
[i
]) {
298 ASSERT(slowArguments
[i
].status
== SlowArgument::Normal
);
299 slowArguments
[i
].index
= CallFrame::argumentOffset(i
);
302 slowArguments
[i
].status
= SlowArgument::Captured
;
303 slowArguments
[i
].index
= capturedArguments
[i
]->index();
305 m_symbolTable
->setSlowArguments(slowArguments
.release());
308 RegisterID
* calleeRegister
= resolveCallee(functionBody
); // May push to the scope chain and/or add a captured var.
310 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
311 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
313 // Captured variables and functions go first so that activations don't have
314 // to step over the non-captured locals to mark them.
315 m_hasCreatedActivation
= false;
316 if (functionBody
->hasCapturedVariables()) {
317 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
318 FunctionBodyNode
* function
= functionStack
[i
];
319 const Identifier
& ident
= function
->ident();
320 if (functionBody
->captures(ident
)) {
321 if (!m_hasCreatedActivation
) {
322 m_hasCreatedActivation
= true;
323 emitOpcode(op_create_activation
);
324 instructions().append(m_activationRegister
->index());
326 m_functions
.add(ident
.impl());
327 emitNewFunction(addVar(ident
, false), function
);
330 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
331 const Identifier
& ident
= *varStack
[i
].first
;
332 if (functionBody
->captures(ident
))
333 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
336 bool canLazilyCreateFunctions
= !functionBody
->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks
;
337 if (!canLazilyCreateFunctions
&& !m_hasCreatedActivation
) {
338 m_hasCreatedActivation
= true;
339 emitOpcode(op_create_activation
);
340 instructions().append(m_activationRegister
->index());
343 m_symbolTable
->setCaptureEnd(codeBlock
->m_numVars
);
345 m_firstLazyFunction
= codeBlock
->m_numVars
;
346 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
347 FunctionBodyNode
* function
= functionStack
[i
];
348 const Identifier
& ident
= function
->ident();
349 if (!functionBody
->captures(ident
)) {
350 m_functions
.add(ident
.impl());
351 RefPtr
<RegisterID
> reg
= addVar(ident
, false);
352 // Don't lazily create functions that override the name 'arguments'
353 // as this would complicate lazy instantiation of actual arguments.
354 if (!canLazilyCreateFunctions
|| ident
== propertyNames().arguments
)
355 emitNewFunction(reg
.get(), function
);
357 emitInitLazyRegister(reg
.get());
358 m_lazyFunctions
.set(reg
->index(), function
);
362 m_lastLazyFunction
= canLazilyCreateFunctions
? codeBlock
->m_numVars
: m_firstLazyFunction
;
363 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
364 const Identifier
& ident
= *varStack
[i
].first
;
365 if (!functionBody
->captures(ident
))
366 addVar(ident
, varStack
[i
].second
& DeclarationStacks::IsConstant
);
369 if (shouldCaptureAllTheThings
)
370 m_symbolTable
->setCaptureEnd(codeBlock
->m_numVars
);
372 FunctionParameters
& parameters
= *functionBody
->parameters();
373 m_parameters
.grow(parameters
.size() + 1); // reserve space for "this"
375 // Add "this" as a parameter
376 int nextParameterIndex
= CallFrame::thisArgumentOffset();
377 m_thisRegister
.setIndex(nextParameterIndex
--);
378 m_codeBlock
->addParameter();
380 for (size_t i
= 0; i
< parameters
.size(); ++i
, --nextParameterIndex
) {
381 int index
= nextParameterIndex
;
382 if (capturedArguments
.size() && capturedArguments
[i
]) {
383 ASSERT((functionBody
->hasCapturedVariables() && functionBody
->captures(parameters
.at(i
))) || shouldCaptureAllTheThings
);
384 index
= capturedArguments
[i
]->index();
385 RegisterID
original(nextParameterIndex
);
386 emitMove(capturedArguments
[i
], &original
);
388 addParameter(parameters
.at(i
), index
);
392 // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
393 addCallee(functionBody
, calleeRegister
);
395 if (isConstructor()) {
396 emitCreateThis(&m_thisRegister
);
397 } else if (!codeBlock
->isStrictMode() && (functionBody
->usesThis() || codeBlock
->usesEval() || m_shouldEmitDebugHooks
)) {
398 UnlinkedValueProfile profile
= emitProfiledOpcode(op_convert_this
);
399 instructions().append(kill(&m_thisRegister
));
400 instructions().append(profile
);
404 BytecodeGenerator::BytecodeGenerator(VM
& vm
, JSScope
* scope
, EvalNode
* evalNode
, UnlinkedEvalCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
405 : m_shouldEmitDebugHooks(debuggerMode
== DebuggerOn
)
406 , m_shouldEmitProfileHooks(profilerMode
== ProfilerOn
)
407 , m_symbolTable(codeBlock
->symbolTable())
408 , m_scopeNode(evalNode
)
410 , m_codeBlock(vm
, codeBlock
)
411 , m_thisRegister(CallFrame::thisArgumentOffset())
412 , m_emptyValueRegister(0)
413 , m_globalObjectRegister(0)
415 , m_dynamicScopeDepth(0)
416 , m_codeType(EvalCode
)
417 , m_nextConstantOffset(0)
418 , m_globalConstantIndex(0)
419 , m_hasCreatedActivation(true)
420 , m_firstLazyFunction(0)
421 , m_lastLazyFunction(0)
422 , m_staticPropertyAnalyzer(&m_instructions
)
424 , m_lastOpcodeID(op_end
)
426 , m_lastOpcodePosition(0)
428 , m_stack(wtfThreadData().stack())
429 , m_usesExceptions(false)
430 , m_expressionTooDeep(false)
432 m_codeBlock
->setNeedsFullScopeChain(true);
434 m_symbolTable
->setUsesNonStrictEval(codeBlock
->usesEval() && !codeBlock
->isStrictMode());
435 m_codeBlock
->setNumParameters(1);
437 emitOpcode(op_enter
);
439 const DeclarationStacks::FunctionStack
& functionStack
= evalNode
->functionStack();
440 for (size_t i
= 0; i
< functionStack
.size(); ++i
)
441 m_codeBlock
->addFunctionDecl(makeFunction(functionStack
[i
]));
443 const DeclarationStacks::VarStack
& varStack
= evalNode
->varStack();
444 unsigned numVariables
= varStack
.size();
445 Vector
<Identifier
, 0, UnsafeVectorOverflow
> variables
;
446 variables
.reserveCapacity(numVariables
);
447 for (size_t i
= 0; i
< numVariables
; ++i
)
448 variables
.append(*varStack
[i
].first
);
449 codeBlock
->adoptVariables(variables
);
453 BytecodeGenerator::~BytecodeGenerator()
457 RegisterID
* BytecodeGenerator::emitInitLazyRegister(RegisterID
* reg
)
459 emitOpcode(op_init_lazy_reg
);
460 instructions().append(reg
->index());
464 RegisterID
* BytecodeGenerator::resolveCallee(FunctionBodyNode
* functionBodyNode
)
466 if (functionBodyNode
->ident().isNull() || !functionBodyNode
->functionNameIsInScope())
469 m_calleeRegister
.setIndex(JSStack::Callee
);
471 // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
472 if ((m_codeBlock
->usesEval() && !m_codeBlock
->isStrictMode()) || m_shouldEmitDebugHooks
) {
473 emitOpcode(op_push_name_scope
);
474 instructions().append(addConstant(functionBodyNode
->ident()));
475 instructions().append(m_calleeRegister
.index());
476 instructions().append(ReadOnly
| DontDelete
);
480 if (!functionBodyNode
->captures(functionBodyNode
->ident()))
481 return &m_calleeRegister
;
483 // Move the callee into the captured section of the stack.
484 return emitMove(addVar(), &m_calleeRegister
);
487 void BytecodeGenerator::addCallee(FunctionBodyNode
* functionBodyNode
, RegisterID
* calleeRegister
)
489 if (functionBodyNode
->ident().isNull() || !functionBodyNode
->functionNameIsInScope())
492 // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
493 if ((m_codeBlock
->usesEval() && !m_codeBlock
->isStrictMode()) || m_shouldEmitDebugHooks
)
496 ASSERT(calleeRegister
);
497 symbolTable().add(functionBodyNode
->ident().impl(), SymbolTableEntry(calleeRegister
->index(), ReadOnly
));
500 void BytecodeGenerator::addParameter(const Identifier
& ident
, int parameterIndex
)
502 // Parameters overwrite var declarations, but not function declarations.
503 StringImpl
* rep
= ident
.impl();
504 if (!m_functions
.contains(rep
)) {
505 symbolTable().set(rep
, parameterIndex
);
506 RegisterID
& parameter
= registerFor(parameterIndex
);
507 parameter
.setIndex(parameterIndex
);
510 // To maintain the calling convention, we have to allocate unique space for
511 // each parameter, even if the parameter doesn't make it into the symbol table.
512 m_codeBlock
->addParameter();
515 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
517 if (ident
!= propertyNames().arguments
)
520 if (!shouldOptimizeLocals())
523 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
527 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
533 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
535 ASSERT(willResolveToArguments(propertyNames().arguments
));
537 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.impl());
538 ASSERT(!entry
.isNull());
539 return ®isterFor(entry
.getIndex());
542 RegisterID
* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID
* reg
)
544 if (m_lastLazyFunction
<= reg
->index() || reg
->index() < m_firstLazyFunction
)
546 emitLazyNewFunction(reg
, m_lazyFunctions
.get(reg
->index()));
550 RegisterID
* BytecodeGenerator::newRegister()
552 m_calleeRegisters
.append(m_calleeRegisters
.size());
553 m_codeBlock
->m_numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
554 return &m_calleeRegisters
.last();
557 RegisterID
* BytecodeGenerator::newTemporary()
559 // Reclaim free register IDs.
560 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
561 m_calleeRegisters
.removeLast();
563 RegisterID
* result
= newRegister();
564 result
->setTemporary();
568 LabelScopePtr
BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
570 // Reclaim free label scopes.
571 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
572 m_labelScopes
.removeLast();
574 // Allocate new label scope.
575 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : PassRefPtr
<Label
>()); // Only loops have continue targets.
576 m_labelScopes
.append(scope
);
577 return LabelScopePtr(&m_labelScopes
, m_labelScopes
.size() - 1);
580 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
582 // Reclaim free label IDs.
583 while (m_labels
.size() && !m_labels
.last().refCount())
584 m_labels
.removeLast();
586 // Allocate new label ID.
587 m_labels
.append(this);
588 return &m_labels
.last();
591 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
593 unsigned newLabelIndex
= instructions().size();
594 l0
->setLocation(newLabelIndex
);
596 if (m_codeBlock
->numberOfJumpTargets()) {
597 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
598 ASSERT(lastLabelIndex
<= newLabelIndex
);
599 if (newLabelIndex
== lastLabelIndex
) {
600 // Peephole optimizations have already been disabled by emitting the last label
605 m_codeBlock
->addJumpTarget(newLabelIndex
);
607 // This disables peephole optimizations when an instruction is a jump target
608 m_lastOpcodeID
= op_end
;
612 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
615 size_t opcodePosition
= instructions().size();
616 ASSERT(opcodePosition
- m_lastOpcodePosition
== opcodeLength(m_lastOpcodeID
) || m_lastOpcodeID
== op_end
);
617 m_lastOpcodePosition
= opcodePosition
;
619 instructions().append(opcodeID
);
620 m_lastOpcodeID
= opcodeID
;
623 UnlinkedArrayProfile
BytecodeGenerator::newArrayProfile()
625 #if ENABLE(VALUE_PROFILER)
626 return m_codeBlock
->addArrayProfile();
632 UnlinkedArrayAllocationProfile
BytecodeGenerator::newArrayAllocationProfile()
634 #if ENABLE(VALUE_PROFILER)
635 return m_codeBlock
->addArrayAllocationProfile();
641 UnlinkedObjectAllocationProfile
BytecodeGenerator::newObjectAllocationProfile()
643 return m_codeBlock
->addObjectAllocationProfile();
646 UnlinkedValueProfile
BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID
)
648 #if ENABLE(VALUE_PROFILER)
649 UnlinkedValueProfile result
= m_codeBlock
->addValueProfile();
651 UnlinkedValueProfile result
= 0;
653 emitOpcode(opcodeID
);
657 void BytecodeGenerator::emitLoopHint()
659 emitOpcode(op_loop_hint
);
662 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
664 ASSERT(instructions().size() >= 4);
665 size_t size
= instructions().size();
666 dstIndex
= instructions().at(size
- 3).u
.operand
;
667 src1Index
= instructions().at(size
- 2).u
.operand
;
668 src2Index
= instructions().at(size
- 1).u
.operand
;
671 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
673 ASSERT(instructions().size() >= 3);
674 size_t size
= instructions().size();
675 dstIndex
= instructions().at(size
- 2).u
.operand
;
676 srcIndex
= instructions().at(size
- 1).u
.operand
;
679 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
681 ASSERT(instructions().size() >= 4);
682 instructions().shrink(instructions().size() - 4);
683 m_lastOpcodeID
= op_end
;
686 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
688 ASSERT(instructions().size() >= 3);
689 instructions().shrink(instructions().size() - 3);
690 m_lastOpcodeID
= op_end
;
693 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
695 size_t begin
= instructions().size();
697 instructions().append(target
->bind(begin
, instructions().size()));
701 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
703 if (m_lastOpcodeID
== op_less
) {
708 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
710 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
713 size_t begin
= instructions().size();
714 emitOpcode(op_jless
);
715 instructions().append(src1Index
);
716 instructions().append(src2Index
);
717 instructions().append(target
->bind(begin
, instructions().size()));
720 } else if (m_lastOpcodeID
== op_lesseq
) {
725 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
727 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
730 size_t begin
= instructions().size();
731 emitOpcode(op_jlesseq
);
732 instructions().append(src1Index
);
733 instructions().append(src2Index
);
734 instructions().append(target
->bind(begin
, instructions().size()));
737 } else if (m_lastOpcodeID
== op_greater
) {
742 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
744 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
747 size_t begin
= instructions().size();
748 emitOpcode(op_jgreater
);
749 instructions().append(src1Index
);
750 instructions().append(src2Index
);
751 instructions().append(target
->bind(begin
, instructions().size()));
754 } else if (m_lastOpcodeID
== op_greatereq
) {
759 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
761 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
764 size_t begin
= instructions().size();
765 emitOpcode(op_jgreatereq
);
766 instructions().append(src1Index
);
767 instructions().append(src2Index
);
768 instructions().append(target
->bind(begin
, instructions().size()));
771 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
775 retrieveLastUnaryOp(dstIndex
, srcIndex
);
777 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
780 size_t begin
= instructions().size();
781 emitOpcode(op_jeq_null
);
782 instructions().append(srcIndex
);
783 instructions().append(target
->bind(begin
, instructions().size()));
786 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
790 retrieveLastUnaryOp(dstIndex
, srcIndex
);
792 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
795 size_t begin
= instructions().size();
796 emitOpcode(op_jneq_null
);
797 instructions().append(srcIndex
);
798 instructions().append(target
->bind(begin
, instructions().size()));
803 size_t begin
= instructions().size();
805 emitOpcode(op_jtrue
);
806 instructions().append(cond
->index());
807 instructions().append(target
->bind(begin
, instructions().size()));
811 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
813 if (m_lastOpcodeID
== op_less
&& target
->isForward()) {
818 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
820 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
823 size_t begin
= instructions().size();
824 emitOpcode(op_jnless
);
825 instructions().append(src1Index
);
826 instructions().append(src2Index
);
827 instructions().append(target
->bind(begin
, instructions().size()));
830 } else if (m_lastOpcodeID
== op_lesseq
&& target
->isForward()) {
835 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
837 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
840 size_t begin
= instructions().size();
841 emitOpcode(op_jnlesseq
);
842 instructions().append(src1Index
);
843 instructions().append(src2Index
);
844 instructions().append(target
->bind(begin
, instructions().size()));
847 } else if (m_lastOpcodeID
== op_greater
&& target
->isForward()) {
852 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
854 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
857 size_t begin
= instructions().size();
858 emitOpcode(op_jngreater
);
859 instructions().append(src1Index
);
860 instructions().append(src2Index
);
861 instructions().append(target
->bind(begin
, instructions().size()));
864 } else if (m_lastOpcodeID
== op_greatereq
&& target
->isForward()) {
869 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
871 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
874 size_t begin
= instructions().size();
875 emitOpcode(op_jngreatereq
);
876 instructions().append(src1Index
);
877 instructions().append(src2Index
);
878 instructions().append(target
->bind(begin
, instructions().size()));
881 } else if (m_lastOpcodeID
== op_not
) {
885 retrieveLastUnaryOp(dstIndex
, srcIndex
);
887 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
890 size_t begin
= instructions().size();
891 emitOpcode(op_jtrue
);
892 instructions().append(srcIndex
);
893 instructions().append(target
->bind(begin
, instructions().size()));
896 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
900 retrieveLastUnaryOp(dstIndex
, srcIndex
);
902 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
905 size_t begin
= instructions().size();
906 emitOpcode(op_jneq_null
);
907 instructions().append(srcIndex
);
908 instructions().append(target
->bind(begin
, instructions().size()));
911 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
915 retrieveLastUnaryOp(dstIndex
, srcIndex
);
917 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
920 size_t begin
= instructions().size();
921 emitOpcode(op_jeq_null
);
922 instructions().append(srcIndex
);
923 instructions().append(target
->bind(begin
, instructions().size()));
928 size_t begin
= instructions().size();
929 emitOpcode(op_jfalse
);
930 instructions().append(cond
->index());
931 instructions().append(target
->bind(begin
, instructions().size()));
935 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
937 size_t begin
= instructions().size();
939 emitOpcode(op_jneq_ptr
);
940 instructions().append(cond
->index());
941 instructions().append(Special::CallFunction
);
942 instructions().append(target
->bind(begin
, instructions().size()));
946 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
948 size_t begin
= instructions().size();
950 emitOpcode(op_jneq_ptr
);
951 instructions().append(cond
->index());
952 instructions().append(Special::ApplyFunction
);
953 instructions().append(target
->bind(begin
, instructions().size()));
957 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
959 StringImpl
* rep
= ident
.impl();
960 IdentifierMap::AddResult result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
961 if (result
.isNewEntry
)
962 m_codeBlock
->addIdentifier(Identifier(m_vm
, rep
));
964 return result
.iterator
->value
;
967 // We can't hash JSValue(), so we use a dedicated data member to cache it.
968 RegisterID
* BytecodeGenerator::addConstantEmptyValue()
970 if (!m_emptyValueRegister
) {
971 int index
= m_nextConstantOffset
;
972 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
973 ++m_nextConstantOffset
;
974 m_codeBlock
->addConstant(JSValue());
975 m_emptyValueRegister
= &m_constantPoolRegisters
[index
];
978 return m_emptyValueRegister
;
981 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
984 return addConstantEmptyValue();
986 int index
= m_nextConstantOffset
;
987 JSValueMap::AddResult result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
988 if (result
.isNewEntry
) {
989 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
990 ++m_nextConstantOffset
;
991 m_codeBlock
->addConstant(v
);
993 index
= result
.iterator
->value
;
994 return &m_constantPoolRegisters
[index
];
997 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
999 return m_codeBlock
->addRegExp(r
);
1002 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
1004 m_staticPropertyAnalyzer
.mov(dst
->index(), src
->index());
1007 instructions().append(dst
->index());
1008 instructions().append(src
->index());
1012 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
1014 emitOpcode(opcodeID
);
1015 instructions().append(dst
->index());
1016 instructions().append(src
->index());
1020 RegisterID
* BytecodeGenerator::emitInc(RegisterID
* srcDst
)
1023 instructions().append(srcDst
->index());
1027 RegisterID
* BytecodeGenerator::emitDec(RegisterID
* srcDst
)
1030 instructions().append(srcDst
->index());
1034 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
1036 emitOpcode(opcodeID
);
1037 instructions().append(dst
->index());
1038 instructions().append(src1
->index());
1039 instructions().append(src2
->index());
1041 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
1042 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
1043 instructions().append(types
.toInt());
1048 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
1050 if (m_lastOpcodeID
== op_typeof
) {
1054 retrieveLastUnaryOp(dstIndex
, srcIndex
);
1056 if (src1
->index() == dstIndex
1057 && src1
->isTemporary()
1058 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
1059 && m_codeBlock
->constantRegister(src2
->index()).get().isString()) {
1060 const String
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).get())->tryGetValue();
1061 if (value
== "undefined") {
1063 emitOpcode(op_is_undefined
);
1064 instructions().append(dst
->index());
1065 instructions().append(srcIndex
);
1068 if (value
== "boolean") {
1070 emitOpcode(op_is_boolean
);
1071 instructions().append(dst
->index());
1072 instructions().append(srcIndex
);
1075 if (value
== "number") {
1077 emitOpcode(op_is_number
);
1078 instructions().append(dst
->index());
1079 instructions().append(srcIndex
);
1082 if (value
== "string") {
1084 emitOpcode(op_is_string
);
1085 instructions().append(dst
->index());
1086 instructions().append(srcIndex
);
1089 if (value
== "object") {
1091 emitOpcode(op_is_object
);
1092 instructions().append(dst
->index());
1093 instructions().append(srcIndex
);
1096 if (value
== "function") {
1098 emitOpcode(op_is_function
);
1099 instructions().append(dst
->index());
1100 instructions().append(srcIndex
);
1106 emitOpcode(opcodeID
);
1107 instructions().append(dst
->index());
1108 instructions().append(src1
->index());
1109 instructions().append(src2
->index());
1113 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
1115 return emitLoad(dst
, jsBoolean(b
));
1118 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
1120 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1121 // Later we can do the extra work to handle that like the other cases. They also don't
1122 // work correctly with NaN as a key.
1123 if (std::isnan(number
) || number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
1124 return emitLoad(dst
, jsNumber(number
));
1125 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).iterator
->value
;
1127 valueInMap
= jsNumber(number
);
1128 return emitLoad(dst
, valueInMap
);
1131 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
1133 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).iterator
->value
;
1135 stringInMap
= jsOwnedString(vm(), identifier
.string());
1136 return emitLoad(dst
, JSValue(stringInMap
));
1139 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
1141 RegisterID
* constantID
= addConstantValue(v
);
1143 return emitMove(dst
, constantID
);
1147 RegisterID
* BytecodeGenerator::emitLoadGlobalObject(RegisterID
* dst
)
1149 if (!m_globalObjectRegister
) {
1150 int index
= m_nextConstantOffset
;
1151 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
1152 ++m_nextConstantOffset
;
1153 m_codeBlock
->addConstant(JSValue());
1154 m_globalObjectRegister
= &m_constantPoolRegisters
[index
];
1155 m_codeBlock
->setGlobalObjectRegister(index
);
1158 emitMove(dst
, m_globalObjectRegister
);
1159 return m_globalObjectRegister
;
1162 ResolveResult
BytecodeGenerator::resolve(const Identifier
& property
)
1164 if (property
== propertyNames().thisIdentifier
)
1165 return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag
);
1167 // Check if the property should be allocated in a register.
1168 if (m_codeType
!= GlobalCode
&& shouldOptimizeLocals() && m_symbolTable
) {
1169 SymbolTableEntry entry
= symbolTable().get(property
.impl());
1170 if (!entry
.isNull()) {
1171 if (property
== propertyNames().arguments
)
1172 createArgumentsIfNecessary();
1173 unsigned flags
= entry
.isReadOnly() ? ResolveResult::ReadOnlyFlag
: 0;
1174 RegisterID
* local
= createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
1175 return ResolveResult::registerResolve(local
, flags
);
1178 // Cases where we cannot statically optimize the lookup.
1179 if (property
== propertyNames().arguments
|| !canOptimizeNonLocals())
1180 return ResolveResult::dynamicResolve();
1182 if (!m_scope
|| m_codeType
!= FunctionCode
|| m_shouldEmitDebugHooks
)
1183 return ResolveResult::dynamicResolve();
1185 ScopeChainIterator iter
= m_scope
->begin();
1186 ScopeChainIterator end
= m_scope
->end();
1187 size_t depth
= m_codeBlock
->needsFullScopeChain();
1189 for (; iter
!= end
; ++iter
, ++depth
) {
1190 JSObject
* currentScope
= iter
.get();
1191 if (!currentScope
->isStaticScopeObject())
1192 return ResolveResult::dynamicResolve();
1194 JSSymbolTableObject
* currentVariableObject
= jsCast
<JSSymbolTableObject
*>(currentScope
);
1195 SymbolTableEntry entry
= currentVariableObject
->symbolTable()->get(property
.impl());
1197 // Found the property
1198 if (!entry
.isNull()) {
1199 if (entry
.isReadOnly())
1200 flags
|= ResolveResult::ReadOnlyFlag
;
1202 return ResolveResult::dynamicResolve();
1203 #if !ASSERT_DISABLED
1204 if (JSActivation
* activation
= jsDynamicCast
<JSActivation
*>(currentVariableObject
))
1205 ASSERT(activation
->isValid(entry
));
1207 return ResolveResult::lexicalResolve(entry
.getIndex(), depth
, flags
);
1209 bool scopeRequiresDynamicChecks
= false;
1210 if (currentVariableObject
->isDynamicScope(scopeRequiresDynamicChecks
))
1212 if (scopeRequiresDynamicChecks
)
1213 flags
|= ResolveResult::DynamicFlag
;
1216 return ResolveResult::dynamicResolve();
1219 ResolveResult
BytecodeGenerator::resolveConstDecl(const Identifier
& property
)
1221 // Register-allocated const declarations.
1222 if (m_codeType
== FunctionCode
&& m_symbolTable
) {
1223 SymbolTableEntry entry
= symbolTable().get(property
.impl());
1224 if (!entry
.isNull()) {
1225 unsigned flags
= entry
.isReadOnly() ? ResolveResult::ReadOnlyFlag
: 0;
1226 RegisterID
* local
= createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
1227 return ResolveResult::registerResolve(local
, flags
);
1231 return ResolveResult::dynamicResolve();
1234 void BytecodeGenerator::emitCheckHasInstance(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, Label
* target
)
1236 size_t begin
= instructions().size();
1237 emitOpcode(op_check_has_instance
);
1238 instructions().append(dst
->index());
1239 instructions().append(value
->index());
1240 instructions().append(base
->index());
1241 instructions().append(target
->bind(begin
, instructions().size()));
1244 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* basePrototype
)
1246 emitOpcode(op_instanceof
);
1247 instructions().append(dst
->index());
1248 instructions().append(value
->index());
1249 instructions().append(basePrototype
->index());
1253 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1255 return !m_labelScopes
.size();
1258 RegisterID
* BytecodeGenerator::emitResolve(RegisterID
* dst
, const ResolveResult
& resolveResult
, const Identifier
& property
)
1261 if (resolveResult
.isStatic())
1262 return emitGetStaticVar(dst
, resolveResult
, property
);
1264 UnlinkedValueProfile profile
= emitProfiledOpcode(op_resolve
);
1265 instructions().append(kill(dst
));
1266 instructions().append(addConstant(property
));
1267 instructions().append(getResolveOperations(property
));
1268 instructions().append(profile
);
1272 RegisterID
* BytecodeGenerator::emitResolveBase(RegisterID
* dst
, const ResolveResult
& resolveResult
, const Identifier
& property
)
1274 if (!resolveResult
.isDynamic()) {
1275 // Global object is the base
1276 return emitLoadGlobalObject(dst
);
1279 // We can't optimise at all :-(
1280 UnlinkedValueProfile profile
= emitProfiledOpcode(op_resolve_base
);
1281 instructions().append(kill(dst
));
1282 instructions().append(addConstant(property
));
1283 instructions().append(false);
1284 instructions().append(getResolveBaseOperations(property
));
1285 instructions().append(0);
1286 instructions().append(profile
);
1290 RegisterID
* BytecodeGenerator::emitResolveBaseForPut(RegisterID
* dst
, const ResolveResult
&, const Identifier
& property
, NonlocalResolveInfo
& verifier
)
1292 // We can't optimise at all :-(
1293 UnlinkedValueProfile profile
= emitProfiledOpcode(op_resolve_base
);
1294 instructions().append(kill(dst
));
1295 instructions().append(addConstant(property
));
1296 instructions().append(m_codeBlock
->isStrictMode());
1297 uint32_t putToBaseIndex
= 0;
1298 instructions().append(getResolveBaseForPutOperations(property
, putToBaseIndex
));
1299 verifier
.resolved(putToBaseIndex
);
1300 instructions().append(putToBaseIndex
);
1301 instructions().append(profile
);
1305 RegisterID
* BytecodeGenerator::emitResolveWithBaseForPut(RegisterID
* baseDst
, RegisterID
* propDst
, const ResolveResult
& resolveResult
, const Identifier
& property
, NonlocalResolveInfo
& verifier
)
1307 ASSERT_UNUSED(resolveResult
, !resolveResult
.isStatic());
1308 UnlinkedValueProfile profile
= emitProfiledOpcode(op_resolve_with_base
);
1309 instructions().append(kill(baseDst
));
1310 instructions().append(propDst
->index());
1311 instructions().append(addConstant(property
));
1312 uint32_t putToBaseIndex
= 0;
1313 instructions().append(getResolveWithBaseForPutOperations(property
, putToBaseIndex
));
1314 verifier
.resolved(putToBaseIndex
);
1315 instructions().append(putToBaseIndex
);
1316 instructions().append(profile
);
1320 RegisterID
* BytecodeGenerator::emitResolveWithThis(RegisterID
* baseDst
, RegisterID
* propDst
, const ResolveResult
& resolveResult
, const Identifier
& property
)
1322 if (resolveResult
.isStatic()) {
1323 emitLoad(baseDst
, jsUndefined());
1324 emitGetStaticVar(propDst
, resolveResult
, property
);
1328 UnlinkedValueProfile profile
= emitProfiledOpcode(op_resolve_with_this
);
1329 instructions().append(kill(baseDst
));
1330 instructions().append(propDst
->index());
1331 instructions().append(addConstant(property
));
1332 instructions().append(getResolveWithThisOperations(property
));
1333 instructions().append(profile
);
1337 RegisterID
* BytecodeGenerator::emitGetStaticVar(RegisterID
* dst
, const ResolveResult
& resolveResult
, const Identifier
&)
1339 ASSERT(m_codeType
== FunctionCode
);
1340 switch (resolveResult
.type()) {
1341 case ResolveResult::Register
:
1342 case ResolveResult::ReadOnlyRegister
:
1343 if (dst
== ignoredResult())
1345 return moveToDestinationIfNeeded(dst
, resolveResult
.local());
1347 case ResolveResult::Lexical
:
1348 case ResolveResult::ReadOnlyLexical
: {
1349 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_scoped_var
);
1350 instructions().append(dst
->index());
1351 instructions().append(resolveResult
.index());
1352 instructions().append(resolveResult
.depth());
1353 instructions().append(profile
);
1358 RELEASE_ASSERT_NOT_REACHED();
1363 RegisterID
* BytecodeGenerator::emitInitGlobalConst(const Identifier
& identifier
, RegisterID
* value
)
1365 ASSERT(m_codeType
== GlobalCode
);
1366 emitOpcode(op_init_global_const_nop
);
1367 instructions().append(0);
1368 instructions().append(value
->index());
1369 instructions().append(0);
1370 instructions().append(addConstant(identifier
));
1374 RegisterID
* BytecodeGenerator::emitPutStaticVar(const ResolveResult
& resolveResult
, const Identifier
&, RegisterID
* value
)
1376 ASSERT(m_codeType
== FunctionCode
);
1377 switch (resolveResult
.type()) {
1378 case ResolveResult::Register
:
1379 case ResolveResult::ReadOnlyRegister
:
1380 return moveToDestinationIfNeeded(resolveResult
.local(), value
);
1382 case ResolveResult::Lexical
:
1383 case ResolveResult::ReadOnlyLexical
:
1384 emitOpcode(op_put_scoped_var
);
1385 instructions().append(resolveResult
.index());
1386 instructions().append(resolveResult
.depth());
1387 instructions().append(value
->index());
1391 RELEASE_ASSERT_NOT_REACHED();
1396 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1398 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1400 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_by_id
);
1401 instructions().append(kill(dst
));
1402 instructions().append(base
->index());
1403 instructions().append(addConstant(property
));
1404 instructions().append(0);
1405 instructions().append(0);
1406 instructions().append(0);
1407 instructions().append(0);
1408 instructions().append(profile
);
1412 RegisterID
* BytecodeGenerator::emitGetArgumentsLength(RegisterID
* dst
, RegisterID
* base
)
1414 emitOpcode(op_get_arguments_length
);
1415 instructions().append(dst
->index());
1416 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1417 instructions().append(base
->index());
1418 instructions().append(addConstant(propertyNames().length
));
1422 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1424 unsigned propertyIndex
= addConstant(property
);
1426 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1428 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1430 emitOpcode(op_put_by_id
);
1431 instructions().append(base
->index());
1432 instructions().append(propertyIndex
);
1433 instructions().append(value
->index());
1434 instructions().append(0);
1435 instructions().append(0);
1436 instructions().append(0);
1437 instructions().append(0);
1438 instructions().append(0);
1442 RegisterID
* BytecodeGenerator::emitPutToBase(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
, NonlocalResolveInfo
& resolveInfo
)
1444 emitOpcode(op_put_to_base
);
1445 instructions().append(base
->index());
1446 instructions().append(addConstant(property
));
1447 instructions().append(value
->index());
1448 instructions().append(resolveInfo
.put());
1452 RegisterID
* BytecodeGenerator::emitDirectPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1454 unsigned propertyIndex
= addConstant(property
);
1456 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1458 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1460 emitOpcode(op_put_by_id
);
1461 instructions().append(base
->index());
1462 instructions().append(propertyIndex
);
1463 instructions().append(value
->index());
1464 instructions().append(0);
1465 instructions().append(0);
1466 instructions().append(0);
1467 instructions().append(0);
1468 instructions().append(
1469 property
!= m_vm
->propertyNames
->underscoreProto
1470 && PropertyName(property
).asIndex() == PropertyName::NotAnIndex
);
1474 void BytecodeGenerator::emitPutGetterSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* getter
, RegisterID
* setter
)
1476 unsigned propertyIndex
= addConstant(property
);
1478 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1480 emitOpcode(op_put_getter_setter
);
1481 instructions().append(base
->index());
1482 instructions().append(propertyIndex
);
1483 instructions().append(getter
->index());
1484 instructions().append(setter
->index());
1487 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1489 emitOpcode(op_del_by_id
);
1490 instructions().append(dst
->index());
1491 instructions().append(base
->index());
1492 instructions().append(addConstant(property
));
1496 RegisterID
* BytecodeGenerator::emitGetArgumentByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1498 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1499 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_argument_by_val
);
1500 instructions().append(kill(dst
));
1501 ASSERT(base
->index() == m_codeBlock
->argumentsRegister());
1502 instructions().append(base
->index());
1503 instructions().append(property
->index());
1504 instructions().append(arrayProfile
);
1505 instructions().append(profile
);
1509 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1511 for (size_t i
= m_forInContextStack
.size(); i
> 0; i
--) {
1512 ForInContext
& context
= m_forInContextStack
[i
- 1];
1513 if (context
.propertyRegister
== property
) {
1514 emitOpcode(op_get_by_pname
);
1515 instructions().append(dst
->index());
1516 instructions().append(base
->index());
1517 instructions().append(property
->index());
1518 instructions().append(context
.expectedSubscriptRegister
->index());
1519 instructions().append(context
.iterRegister
->index());
1520 instructions().append(context
.indexRegister
->index());
1524 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1525 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_by_val
);
1526 instructions().append(kill(dst
));
1527 instructions().append(base
->index());
1528 instructions().append(property
->index());
1529 instructions().append(arrayProfile
);
1530 instructions().append(profile
);
1534 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1536 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1537 emitOpcode(op_put_by_val
);
1538 instructions().append(base
->index());
1539 instructions().append(property
->index());
1540 instructions().append(value
->index());
1541 instructions().append(arrayProfile
);
1545 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1547 emitOpcode(op_del_by_val
);
1548 instructions().append(dst
->index());
1549 instructions().append(base
->index());
1550 instructions().append(property
->index());
1554 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1556 emitOpcode(op_put_by_index
);
1557 instructions().append(base
->index());
1558 instructions().append(index
);
1559 instructions().append(value
->index());
1563 RegisterID
* BytecodeGenerator::emitCreateThis(RegisterID
* dst
)
1565 RefPtr
<RegisterID
> func
= newTemporary();
1567 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_callee
);
1568 instructions().append(func
->index());
1569 instructions().append(profile
);
1571 size_t begin
= instructions().size();
1572 m_staticPropertyAnalyzer
.createThis(m_thisRegister
.index(), begin
+ 3);
1574 emitOpcode(op_create_this
);
1575 instructions().append(m_thisRegister
.index());
1576 instructions().append(func
->index());
1577 instructions().append(0);
1581 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1583 size_t begin
= instructions().size();
1584 m_staticPropertyAnalyzer
.newObject(dst
->index(), begin
+ 2);
1586 emitOpcode(op_new_object
);
1587 instructions().append(dst
->index());
1588 instructions().append(0);
1589 instructions().append(newObjectAllocationProfile());
1593 unsigned BytecodeGenerator::addConstantBuffer(unsigned length
)
1595 return m_codeBlock
->addConstantBuffer(length
);
1598 JSString
* BytecodeGenerator::addStringConstant(const Identifier
& identifier
)
1600 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), 0).iterator
->value
;
1602 stringInMap
= jsString(vm(), identifier
.string());
1603 addConstantValue(stringInMap
);
1608 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
, unsigned length
)
1610 #if !ASSERT_DISABLED
1611 unsigned checkLength
= 0;
1613 bool hadVariableExpression
= false;
1615 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1616 if (!n
->value()->isConstant()) {
1617 hadVariableExpression
= true;
1622 #if !ASSERT_DISABLED
1626 if (!hadVariableExpression
) {
1627 ASSERT(length
== checkLength
);
1628 unsigned constantBufferIndex
= addConstantBuffer(length
);
1629 JSValue
* constantBuffer
= m_codeBlock
->constantBuffer(constantBufferIndex
).data();
1631 for (ElementNode
* n
= elements
; index
< length
; n
= n
->next()) {
1632 ASSERT(n
->value()->isConstant());
1633 constantBuffer
[index
++] = static_cast<ConstantNode
*>(n
->value())->jsValue(*this);
1635 emitOpcode(op_new_array_buffer
);
1636 instructions().append(dst
->index());
1637 instructions().append(constantBufferIndex
);
1638 instructions().append(length
);
1639 instructions().append(newArrayAllocationProfile());
1644 Vector
<RefPtr
<RegisterID
>, 16, UnsafeVectorOverflow
> argv
;
1645 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1648 argv
.append(newTemporary());
1649 // op_new_array requires the initial values to be a sequential range of registers
1650 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() + 1);
1651 emitNode(argv
.last().get(), n
->value());
1653 emitOpcode(op_new_array
);
1654 instructions().append(dst
->index());
1655 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1656 instructions().append(argv
.size()); // argc
1657 instructions().append(newArrayAllocationProfile());
1661 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1663 return emitNewFunctionInternal(dst
, m_codeBlock
->addFunctionDecl(makeFunction(function
)), false);
1666 RegisterID
* BytecodeGenerator::emitLazyNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1668 FunctionOffsetMap::AddResult ptr
= m_functionOffsets
.add(function
, 0);
1670 ptr
.iterator
->value
= m_codeBlock
->addFunctionDecl(makeFunction(function
));
1671 return emitNewFunctionInternal(dst
, ptr
.iterator
->value
, true);
1674 RegisterID
* BytecodeGenerator::emitNewFunctionInternal(RegisterID
* dst
, unsigned index
, bool doNullCheck
)
1676 createActivationIfNecessary();
1677 emitOpcode(op_new_func
);
1678 instructions().append(dst
->index());
1679 instructions().append(index
);
1680 instructions().append(doNullCheck
);
1684 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1686 emitOpcode(op_new_regexp
);
1687 instructions().append(dst
->index());
1688 instructions().append(addRegExp(regExp
));
1692 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1694 FunctionBodyNode
* function
= n
->body();
1695 unsigned index
= m_codeBlock
->addFunctionExpr(makeFunction(function
));
1697 createActivationIfNecessary();
1698 emitOpcode(op_new_func_exp
);
1699 instructions().append(r0
->index());
1700 instructions().append(index
);
1704 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
, unsigned line
, unsigned lineStart
)
1706 return emitCall(op_call
, dst
, func
, expectedFunction
, callArguments
, divot
, startOffset
, endOffset
, line
, lineStart
);
1709 void BytecodeGenerator::createArgumentsIfNecessary()
1711 if (m_codeType
!= FunctionCode
)
1714 if (!m_codeBlock
->usesArguments())
1717 // If we're in strict mode we tear off the arguments on function
1718 // entry, so there's no need to check if we need to create them
1720 if (m_codeBlock
->isStrictMode())
1723 emitOpcode(op_create_arguments
);
1724 instructions().append(m_codeBlock
->argumentsRegister());
1727 void BytecodeGenerator::createActivationIfNecessary()
1729 if (m_hasCreatedActivation
)
1731 if (!m_codeBlock
->needsFullScopeChain())
1733 emitOpcode(op_create_activation
);
1734 instructions().append(m_activationRegister
->index());
1737 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
, unsigned line
, unsigned lineStart
)
1739 return emitCall(op_call_eval
, dst
, func
, NoExpectedFunction
, callArguments
, divot
, startOffset
, endOffset
, line
, lineStart
);
1742 ExpectedFunction
BytecodeGenerator::expectedFunctionForIdentifier(const Identifier
& identifier
)
1744 if (identifier
== m_vm
->propertyNames
->Object
)
1745 return ExpectObjectConstructor
;
1746 if (identifier
== m_vm
->propertyNames
->Array
)
1747 return ExpectArrayConstructor
;
1748 return NoExpectedFunction
;
1751 ExpectedFunction
BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, Label
* done
)
1753 RefPtr
<Label
> realCall
= newLabel();
1754 switch (expectedFunction
) {
1755 case ExpectObjectConstructor
: {
1756 // If the number of arguments is non-zero, then we can't do anything interesting.
1757 if (callArguments
.argumentCountIncludingThis() >= 2)
1758 return NoExpectedFunction
;
1760 size_t begin
= instructions().size();
1761 emitOpcode(op_jneq_ptr
);
1762 instructions().append(func
->index());
1763 instructions().append(Special::ObjectConstructor
);
1764 instructions().append(realCall
->bind(begin
, instructions().size()));
1766 if (dst
!= ignoredResult())
1771 case ExpectArrayConstructor
: {
1772 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1773 // don't do inline it, for now. The only reason is that call arguments are in
1774 // the opposite order of what op_new_array expects, so we'd either need to change
1775 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1776 // things sounds like it's worth it.
1777 if (callArguments
.argumentCountIncludingThis() > 2)
1778 return NoExpectedFunction
;
1780 size_t begin
= instructions().size();
1781 emitOpcode(op_jneq_ptr
);
1782 instructions().append(func
->index());
1783 instructions().append(Special::ArrayConstructor
);
1784 instructions().append(realCall
->bind(begin
, instructions().size()));
1786 if (dst
!= ignoredResult()) {
1787 if (callArguments
.argumentCountIncludingThis() == 2) {
1788 emitOpcode(op_new_array_with_size
);
1789 instructions().append(dst
->index());
1790 instructions().append(callArguments
.argumentRegister(0)->index());
1791 instructions().append(newArrayAllocationProfile());
1793 ASSERT(callArguments
.argumentCountIncludingThis() == 1);
1794 emitOpcode(op_new_array
);
1795 instructions().append(dst
->index());
1796 instructions().append(0);
1797 instructions().append(0);
1798 instructions().append(newArrayAllocationProfile());
1805 ASSERT(expectedFunction
== NoExpectedFunction
);
1806 return NoExpectedFunction
;
1809 size_t begin
= instructions().size();
1811 instructions().append(done
->bind(begin
, instructions().size()));
1812 emitLabel(realCall
.get());
1814 return expectedFunction
;
1817 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
, unsigned line
, unsigned lineStart
)
1819 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1820 ASSERT(func
->refCount());
1822 if (m_shouldEmitProfileHooks
)
1823 emitMove(callArguments
.profileHookRegister(), func
);
1825 // Generate code for arguments.
1826 unsigned argument
= 0;
1827 for (ArgumentListNode
* n
= callArguments
.argumentsNode()->m_listNode
; n
; n
= n
->m_next
)
1828 emitNode(callArguments
.argumentRegister(argument
++), n
);
1830 // Reserve space for call frame.
1831 Vector
<RefPtr
<RegisterID
>, JSStack::CallFrameHeaderSize
, UnsafeVectorOverflow
> callFrame
;
1832 for (int i
= 0; i
< JSStack::CallFrameHeaderSize
; ++i
)
1833 callFrame
.append(newTemporary());
1835 if (m_shouldEmitProfileHooks
) {
1836 emitOpcode(op_profile_will_call
);
1837 instructions().append(callArguments
.profileHookRegister()->index());
1840 emitExpressionInfo(divot
, startOffset
, endOffset
, line
, lineStart
);
1842 RefPtr
<Label
> done
= newLabel();
1843 expectedFunction
= emitExpectedFunctionSnippet(dst
, func
, expectedFunction
, callArguments
, done
.get());
1846 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1847 emitOpcode(opcodeID
);
1848 instructions().append(func
->index()); // func
1849 instructions().append(callArguments
.argumentCountIncludingThis()); // argCount
1850 instructions().append(callArguments
.registerOffset()); // registerOffset
1852 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1854 instructions().append(0);
1856 instructions().append(arrayProfile
);
1857 if (dst
!= ignoredResult()) {
1858 UnlinkedValueProfile profile
= emitProfiledOpcode(op_call_put_result
);
1859 instructions().append(kill(dst
));
1860 instructions().append(profile
);
1863 if (expectedFunction
!= NoExpectedFunction
)
1864 emitLabel(done
.get());
1866 if (m_shouldEmitProfileHooks
) {
1867 emitOpcode(op_profile_did_call
);
1868 instructions().append(callArguments
.profileHookRegister()->index());
1874 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* arguments
, RegisterID
* firstFreeRegister
, RegisterID
* profileHookRegister
, unsigned divot
, unsigned startOffset
, unsigned endOffset
, unsigned line
, unsigned lineStart
)
1876 if (m_shouldEmitProfileHooks
) {
1877 emitMove(profileHookRegister
, func
);
1878 emitOpcode(op_profile_will_call
);
1879 instructions().append(profileHookRegister
->index());
1882 emitExpressionInfo(divot
, startOffset
, endOffset
, line
, lineStart
);
1885 emitOpcode(op_call_varargs
);
1886 instructions().append(func
->index());
1887 instructions().append(thisRegister
->index());
1888 instructions().append(arguments
->index());
1889 instructions().append(firstFreeRegister
->index());
1890 if (dst
!= ignoredResult()) {
1891 UnlinkedValueProfile profile
= emitProfiledOpcode(op_call_put_result
);
1892 instructions().append(kill(dst
));
1893 instructions().append(profile
);
1895 if (m_shouldEmitProfileHooks
) {
1896 emitOpcode(op_profile_did_call
);
1897 instructions().append(profileHookRegister
->index());
1902 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1904 if (m_codeBlock
->needsFullScopeChain()) {
1905 emitOpcode(op_tear_off_activation
);
1906 instructions().append(m_activationRegister
->index());
1909 if (m_codeBlock
->usesArguments() && m_codeBlock
->numParameters() != 1 && !m_codeBlock
->isStrictMode()) {
1910 emitOpcode(op_tear_off_arguments
);
1911 instructions().append(m_codeBlock
->argumentsRegister());
1912 instructions().append(m_activationRegister
? m_activationRegister
->index() : emitLoad(0, JSValue())->index());
1915 // Constructors use op_ret_object_or_this to check the result is an
1916 // object, unless we can trivially determine the check is not
1917 // necessary (currently, if the return value is 'this').
1918 if (isConstructor() && (src
->index() != m_thisRegister
.index())) {
1919 emitOpcode(op_ret_object_or_this
);
1920 instructions().append(src
->index());
1921 instructions().append(m_thisRegister
.index());
1924 return emitUnaryNoDstOp(op_ret
, src
);
1927 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1929 emitOpcode(opcodeID
);
1930 instructions().append(src
->index());
1934 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, unsigned divot
, unsigned startOffset
, unsigned endOffset
, unsigned line
, unsigned lineStart
)
1936 ASSERT(func
->refCount());
1938 if (m_shouldEmitProfileHooks
)
1939 emitMove(callArguments
.profileHookRegister(), func
);
1941 // Generate code for arguments.
1942 unsigned argument
= 0;
1943 if (ArgumentsNode
* argumentsNode
= callArguments
.argumentsNode()) {
1944 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
)
1945 emitNode(callArguments
.argumentRegister(argument
++), n
);
1948 if (m_shouldEmitProfileHooks
) {
1949 emitOpcode(op_profile_will_call
);
1950 instructions().append(callArguments
.profileHookRegister()->index());
1953 // Reserve space for call frame.
1954 Vector
<RefPtr
<RegisterID
>, JSStack::CallFrameHeaderSize
, UnsafeVectorOverflow
> callFrame
;
1955 for (int i
= 0; i
< JSStack::CallFrameHeaderSize
; ++i
)
1956 callFrame
.append(newTemporary());
1958 emitExpressionInfo(divot
, startOffset
, endOffset
, line
, lineStart
);
1960 RefPtr
<Label
> done
= newLabel();
1961 expectedFunction
= emitExpectedFunctionSnippet(dst
, func
, expectedFunction
, callArguments
, done
.get());
1963 emitOpcode(op_construct
);
1964 instructions().append(func
->index()); // func
1965 instructions().append(callArguments
.argumentCountIncludingThis()); // argCount
1966 instructions().append(callArguments
.registerOffset()); // registerOffset
1968 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1970 instructions().append(0);
1972 instructions().append(0);
1973 if (dst
!= ignoredResult()) {
1974 UnlinkedValueProfile profile
= emitProfiledOpcode(op_call_put_result
);
1975 instructions().append(kill(dst
));
1976 instructions().append(profile
);
1979 if (expectedFunction
!= NoExpectedFunction
)
1980 emitLabel(done
.get());
1982 if (m_shouldEmitProfileHooks
) {
1983 emitOpcode(op_profile_did_call
);
1984 instructions().append(callArguments
.profileHookRegister()->index());
1990 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1992 emitOpcode(op_strcat
);
1993 instructions().append(dst
->index());
1994 instructions().append(src
->index());
1995 instructions().append(count
);
2000 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
2002 emitOpcode(op_to_primitive
);
2003 instructions().append(dst
->index());
2004 instructions().append(src
->index());
2007 RegisterID
* BytecodeGenerator::emitPushWithScope(RegisterID
* scope
)
2009 ControlFlowContext context
;
2010 context
.isFinallyBlock
= false;
2011 m_scopeContextStack
.append(context
);
2012 m_dynamicScopeDepth
++;
2014 return emitUnaryNoDstOp(op_push_with_scope
, scope
);
2017 void BytecodeGenerator::emitPopScope()
2019 ASSERT(m_scopeContextStack
.size());
2020 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
2022 emitOpcode(op_pop_scope
);
2024 m_scopeContextStack
.removeLast();
2025 m_dynamicScopeDepth
--;
2028 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, unsigned firstLine
, unsigned lastLine
, unsigned charOffset
, unsigned lineStart
)
2030 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2031 if (debugHookID
!= DidReachBreakpoint
)
2034 if (!m_shouldEmitDebugHooks
)
2037 emitExpressionInfo(charOffset
, 0, 0, firstLine
, lineStart
);
2038 unsigned charPosition
= charOffset
- m_scopeNode
->source().startOffset();
2039 emitOpcode(op_debug
);
2040 instructions().append(debugHookID
);
2041 instructions().append(firstLine
);
2042 instructions().append(lastLine
);
2043 instructions().append(charPosition
);
2046 void BytecodeGenerator::pushFinallyContext(StatementNode
* finallyBlock
)
2048 ControlFlowContext scope
;
2049 scope
.isFinallyBlock
= true;
2050 FinallyContext context
= {
2052 static_cast<unsigned>(m_scopeContextStack
.size()),
2053 static_cast<unsigned>(m_switchContextStack
.size()),
2054 static_cast<unsigned>(m_forInContextStack
.size()),
2055 static_cast<unsigned>(m_tryContextStack
.size()),
2056 static_cast<unsigned>(m_labelScopes
.size()),
2060 scope
.finallyContext
= context
;
2061 m_scopeContextStack
.append(scope
);
2065 void BytecodeGenerator::popFinallyContext()
2067 ASSERT(m_scopeContextStack
.size());
2068 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
2069 ASSERT(m_finallyDepth
> 0);
2070 m_scopeContextStack
.removeLast();
2074 LabelScope
* BytecodeGenerator::breakTarget(const Identifier
& name
)
2076 // Reclaim free label scopes.
2078 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2079 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2080 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2081 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2082 // loop condition is a workaround.
2083 while (m_labelScopes
.size()) {
2084 if (m_labelScopes
.last().refCount())
2086 m_labelScopes
.removeLast();
2089 if (!m_labelScopes
.size())
2092 // We special-case the following, which is a syntax error in Firefox:
2095 if (name
.isEmpty()) {
2096 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2097 LabelScope
* scope
= &m_labelScopes
[i
];
2098 if (scope
->type() != LabelScope::NamedLabel
) {
2099 ASSERT(scope
->breakTarget());
2106 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2107 LabelScope
* scope
= &m_labelScopes
[i
];
2108 if (scope
->name() && *scope
->name() == name
) {
2109 ASSERT(scope
->breakTarget());
2116 LabelScope
* BytecodeGenerator::continueTarget(const Identifier
& name
)
2118 // Reclaim free label scopes.
2119 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
2120 m_labelScopes
.removeLast();
2122 if (!m_labelScopes
.size())
2125 if (name
.isEmpty()) {
2126 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2127 LabelScope
* scope
= &m_labelScopes
[i
];
2128 if (scope
->type() == LabelScope::Loop
) {
2129 ASSERT(scope
->continueTarget());
2136 // Continue to the loop nested nearest to the label scope that matches
2138 LabelScope
* result
= 0;
2139 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2140 LabelScope
* scope
= &m_labelScopes
[i
];
2141 if (scope
->type() == LabelScope::Loop
) {
2142 ASSERT(scope
->continueTarget());
2145 if (scope
->name() && *scope
->name() == name
)
2146 return result
; // may be 0
2151 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
2153 while (topScope
> bottomScope
) {
2154 // First we count the number of dynamic scopes we need to remove to get
2155 // to a finally block.
2156 int nNormalScopes
= 0;
2157 while (topScope
> bottomScope
) {
2158 if (topScope
->isFinallyBlock
)
2164 if (nNormalScopes
) {
2165 // We need to remove a number of dynamic scopes to get to the next
2167 while (nNormalScopes
--)
2168 emitOpcode(op_pop_scope
);
2170 // If topScope == bottomScope then there isn't a finally block left to emit.
2171 if (topScope
== bottomScope
)
2175 Vector
<ControlFlowContext
> savedScopeContextStack
;
2176 Vector
<SwitchInfo
> savedSwitchContextStack
;
2177 Vector
<ForInContext
> savedForInContextStack
;
2178 Vector
<TryContext
> poppedTryContexts
;
2179 LabelScopeStore savedLabelScopes
;
2180 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
2181 RefPtr
<Label
> beforeFinally
= emitLabel(newLabel().get());
2183 // Save the current state of the world while instating the state of the world
2184 // for the finally block.
2185 FinallyContext finallyContext
= topScope
->finallyContext
;
2186 bool flipScopes
= finallyContext
.scopeContextStackSize
!= m_scopeContextStack
.size();
2187 bool flipSwitches
= finallyContext
.switchContextStackSize
!= m_switchContextStack
.size();
2188 bool flipForIns
= finallyContext
.forInContextStackSize
!= m_forInContextStack
.size();
2189 bool flipTries
= finallyContext
.tryContextStackSize
!= m_tryContextStack
.size();
2190 bool flipLabelScopes
= finallyContext
.labelScopesSize
!= m_labelScopes
.size();
2191 int topScopeIndex
= -1;
2192 int bottomScopeIndex
= -1;
2194 topScopeIndex
= topScope
- m_scopeContextStack
.begin();
2195 bottomScopeIndex
= bottomScope
- m_scopeContextStack
.begin();
2196 savedScopeContextStack
= m_scopeContextStack
;
2197 m_scopeContextStack
.shrink(finallyContext
.scopeContextStackSize
);
2200 savedSwitchContextStack
= m_switchContextStack
;
2201 m_switchContextStack
.shrink(finallyContext
.switchContextStackSize
);
2204 savedForInContextStack
= m_forInContextStack
;
2205 m_forInContextStack
.shrink(finallyContext
.forInContextStackSize
);
2208 while (m_tryContextStack
.size() != finallyContext
.tryContextStackSize
) {
2209 ASSERT(m_tryContextStack
.size() > finallyContext
.tryContextStackSize
);
2210 TryContext context
= m_tryContextStack
.last();
2211 m_tryContextStack
.removeLast();
2213 range
.start
= context
.start
;
2214 range
.end
= beforeFinally
;
2215 range
.tryData
= context
.tryData
;
2216 m_tryRanges
.append(range
);
2217 poppedTryContexts
.append(context
);
2220 if (flipLabelScopes
) {
2221 savedLabelScopes
= m_labelScopes
;
2222 while (m_labelScopes
.size() > finallyContext
.labelScopesSize
)
2223 m_labelScopes
.removeLast();
2225 int savedFinallyDepth
= m_finallyDepth
;
2226 m_finallyDepth
= finallyContext
.finallyDepth
;
2227 int savedDynamicScopeDepth
= m_dynamicScopeDepth
;
2228 m_dynamicScopeDepth
= finallyContext
.dynamicScopeDepth
;
2230 // Emit the finally block.
2231 emitNode(finallyContext
.finallyBlock
);
2233 RefPtr
<Label
> afterFinally
= emitLabel(newLabel().get());
2235 // Restore the state of the world.
2237 m_scopeContextStack
= savedScopeContextStack
;
2238 topScope
= &m_scopeContextStack
[topScopeIndex
]; // assert it's within bounds
2239 bottomScope
= m_scopeContextStack
.begin() + bottomScopeIndex
; // don't assert, since it the index might be -1.
2242 m_switchContextStack
= savedSwitchContextStack
;
2244 m_forInContextStack
= savedForInContextStack
;
2246 ASSERT(m_tryContextStack
.size() == finallyContext
.tryContextStackSize
);
2247 for (unsigned i
= poppedTryContexts
.size(); i
--;) {
2248 TryContext context
= poppedTryContexts
[i
];
2249 context
.start
= afterFinally
;
2250 m_tryContextStack
.append(context
);
2252 poppedTryContexts
.clear();
2254 if (flipLabelScopes
)
2255 m_labelScopes
= savedLabelScopes
;
2256 m_finallyDepth
= savedFinallyDepth
;
2257 m_dynamicScopeDepth
= savedDynamicScopeDepth
;
2264 void BytecodeGenerator::emitPopScopes(int targetScopeDepth
)
2266 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
2268 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
2269 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
2273 if (!m_finallyDepth
) {
2274 while (scopeDelta
--)
2275 emitOpcode(op_pop_scope
);
2279 emitComplexPopScopes(&m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
2282 RegisterID
* BytecodeGenerator::emitGetPropertyNames(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, Label
* breakTarget
)
2284 size_t begin
= instructions().size();
2286 emitOpcode(op_get_pnames
);
2287 instructions().append(dst
->index());
2288 instructions().append(base
->index());
2289 instructions().append(i
->index());
2290 instructions().append(size
->index());
2291 instructions().append(breakTarget
->bind(begin
, instructions().size()));
2295 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, RegisterID
* iter
, Label
* target
)
2297 size_t begin
= instructions().size();
2299 emitOpcode(op_next_pname
);
2300 instructions().append(dst
->index());
2301 instructions().append(base
->index());
2302 instructions().append(i
->index());
2303 instructions().append(size
->index());
2304 instructions().append(iter
->index());
2305 instructions().append(target
->bind(begin
, instructions().size()));
2309 TryData
* BytecodeGenerator::pushTry(Label
* start
)
2312 tryData
.target
= newLabel();
2313 tryData
.targetScopeDepth
= UINT_MAX
;
2314 m_tryData
.append(tryData
);
2315 TryData
* result
= &m_tryData
.last();
2317 TryContext tryContext
;
2318 tryContext
.start
= start
;
2319 tryContext
.tryData
= result
;
2321 m_tryContextStack
.append(tryContext
);
2326 RegisterID
* BytecodeGenerator::popTryAndEmitCatch(TryData
* tryData
, RegisterID
* targetRegister
, Label
* end
)
2328 m_usesExceptions
= true;
2330 ASSERT_UNUSED(tryData
, m_tryContextStack
.last().tryData
== tryData
);
2333 tryRange
.start
= m_tryContextStack
.last().start
;
2335 tryRange
.tryData
= m_tryContextStack
.last().tryData
;
2336 m_tryRanges
.append(tryRange
);
2337 m_tryContextStack
.removeLast();
2339 emitLabel(tryRange
.tryData
->target
.get());
2340 tryRange
.tryData
->targetScopeDepth
= m_dynamicScopeDepth
;
2342 emitOpcode(op_catch
);
2343 instructions().append(targetRegister
->index());
2344 return targetRegister
;
2347 void BytecodeGenerator::emitThrowReferenceError(const String
& message
)
2349 emitOpcode(op_throw_static_error
);
2350 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm
, message
)))->index());
2351 instructions().append(true);
2354 void BytecodeGenerator::emitPushNameScope(const Identifier
& property
, RegisterID
* value
, unsigned attributes
)
2356 ControlFlowContext context
;
2357 context
.isFinallyBlock
= false;
2358 m_scopeContextStack
.append(context
);
2359 m_dynamicScopeDepth
++;
2361 emitOpcode(op_push_name_scope
);
2362 instructions().append(addConstant(property
));
2363 instructions().append(value
->index());
2364 instructions().append(attributes
);
2367 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
2369 SwitchInfo info
= { static_cast<uint32_t>(instructions().size()), type
};
2371 case SwitchInfo::SwitchImmediate
:
2372 emitOpcode(op_switch_imm
);
2374 case SwitchInfo::SwitchCharacter
:
2375 emitOpcode(op_switch_char
);
2377 case SwitchInfo::SwitchString
:
2378 emitOpcode(op_switch_string
);
2381 RELEASE_ASSERT_NOT_REACHED();
2384 instructions().append(0); // place holder for table index
2385 instructions().append(0); // place holder for default target
2386 instructions().append(scrutineeRegister
->index());
2387 m_switchContextStack
.append(info
);
2390 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2393 ASSERT(node
->isNumber());
2394 double value
= static_cast<NumberNode
*>(node
)->value();
2395 int32_t key
= static_cast<int32_t>(value
);
2396 ASSERT(key
== value
);
2402 static void prepareJumpTableForImmediateSwitch(UnlinkedSimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2404 jumpTable
.min
= min
;
2405 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2406 jumpTable
.branchOffsets
.fill(0);
2407 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2408 // We're emitting this after the clause labels should have been fixed, so
2409 // the labels should not be "forward" references
2410 ASSERT(!labels
[i
]->isForward());
2411 jumpTable
.add(keyForImmediateSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2415 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2418 ASSERT(node
->isString());
2419 StringImpl
* clause
= static_cast<StringNode
*>(node
)->value().impl();
2420 ASSERT(clause
->length() == 1);
2422 int32_t key
= (*clause
)[0];
2428 static void prepareJumpTableForCharacterSwitch(UnlinkedSimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
)
2430 jumpTable
.min
= min
;
2431 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2432 jumpTable
.branchOffsets
.fill(0);
2433 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2434 // We're emitting this after the clause labels should have been fixed, so
2435 // the labels should not be "forward" references
2436 ASSERT(!labels
[i
]->isForward());
2437 jumpTable
.add(keyForCharacterSwitch(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2441 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
2443 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2444 // We're emitting this after the clause labels should have been fixed, so
2445 // the labels should not be "forward" references
2446 ASSERT(!labels
[i
]->isForward());
2448 ASSERT(nodes
[i
]->isString());
2449 StringImpl
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().impl();
2450 jumpTable
.offsetTable
.add(clause
, labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2454 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
2456 SwitchInfo switchInfo
= m_switchContextStack
.last();
2457 m_switchContextStack
.removeLast();
2458 if (switchInfo
.switchType
== SwitchInfo::SwitchImmediate
) {
2459 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfImmediateSwitchJumpTables();
2460 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2462 UnlinkedSimpleJumpTable
& jumpTable
= m_codeBlock
->addImmediateSwitchJumpTable();
2463 prepareJumpTableForImmediateSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2464 } else if (switchInfo
.switchType
== SwitchInfo::SwitchCharacter
) {
2465 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfCharacterSwitchJumpTables();
2466 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2468 UnlinkedSimpleJumpTable
& jumpTable
= m_codeBlock
->addCharacterSwitchJumpTable();
2469 prepareJumpTableForCharacterSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
);
2471 ASSERT(switchInfo
.switchType
== SwitchInfo::SwitchString
);
2472 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
2473 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2475 UnlinkedStringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
2476 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
);
2480 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
2482 // It would be nice to do an even better job of identifying exactly where the expression is.
2483 // And we could make the caller pass the node pointer in, if there was some way of getting
2484 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2485 // is still good enough to get us an accurate line number.
2486 m_expressionTooDeep
= true;
2487 return newTemporary();
2490 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction
)
2492 m_codeBlock
->setIsNumericCompareFunction(isNumericCompareFunction
);
2495 bool BytecodeGenerator::isArgumentNumber(const Identifier
& ident
, int argumentNumber
)
2497 RegisterID
* registerID
= resolve(ident
).local();
2498 if (!registerID
|| registerID
->index() >= 0)
2500 return registerID
->index() == CallFrame::argumentOffset(argumentNumber
);
2503 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2505 if (!isStrictMode())
2507 emitOpcode(op_throw_static_error
);
2508 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm
, StrictModeReadonlyPropertyWriteError
)))->index());
2509 instructions().append(false);