2 * Copyright (C) 2008, 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "BytecodeGenerator.h"
34 #include "Interpreter.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "JSNameScope.h"
38 #include "LowLevelInterpreter.h"
39 #include "JSCInlines.h"
41 #include "StackAlignment.h"
42 #include "StrongInlines.h"
43 #include "UnlinkedCodeBlock.h"
44 #include "UnlinkedInstructionStream.h"
45 #include <wtf/StdLibExtras.h>
46 #include <wtf/text/WTFString.h>
52 void Label::setLocation(unsigned location
)
54 m_location
= location
;
56 unsigned size
= m_unresolvedJumps
.size();
57 for (unsigned i
= 0; i
< size
; ++i
)
58 m_generator
->m_instructions
[m_unresolvedJumps
[i
].second
].u
.operand
= m_location
- m_unresolvedJumps
[i
].first
;
61 ParserError
BytecodeGenerator::generate()
63 SamplingRegion
samplingRegion("Bytecode Generation");
65 m_codeBlock
->setThisRegister(m_thisRegister
.virtualRegister());
66 for (size_t i
= 0; i
< m_deconstructedParameters
.size(); i
++) {
67 auto& entry
= m_deconstructedParameters
[i
];
68 entry
.second
->bindValue(*this, entry
.first
.get());
71 m_scopeNode
->emitBytecode(*this);
73 m_staticPropertyAnalyzer
.kill();
75 for (unsigned i
= 0; i
< m_tryRanges
.size(); ++i
) {
76 TryRange
& range
= m_tryRanges
[i
];
77 int start
= range
.start
->bind();
78 int end
= range
.end
->bind();
80 // This will happen for empty try blocks and for some cases of finally blocks:
92 // The return will pop scopes to execute the outer finally block. But this includes
93 // popping the try context for the inner try. The try context is live in the fall-through
94 // part of the finally block not because we will emit a handler that overlaps the finally,
95 // but because we haven't yet had a chance to plant the catch target. Then when we finish
96 // emitting code for the outer finally block, we repush the try contex, this time with a
97 // new start index. But that means that the start index for the try range corresponding
98 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
99 // than the end index of the try block. This is harmless since end < start handlers will
100 // never get matched in our logic, but we do the runtime a favor and choose to not emit
101 // such handlers at all.
105 ASSERT(range
.tryData
->targetScopeDepth
!= UINT_MAX
);
106 UnlinkedHandlerInfo info
= {
107 static_cast<uint32_t>(start
), static_cast<uint32_t>(end
),
108 static_cast<uint32_t>(range
.tryData
->target
->bind()),
109 range
.tryData
->targetScopeDepth
111 m_codeBlock
->addExceptionHandler(info
);
114 m_codeBlock
->setInstructions(std::make_unique
<UnlinkedInstructionStream
>(m_instructions
));
116 m_codeBlock
->shrinkToFit();
118 if (m_codeBlock
->symbolTable())
119 m_codeBlock
->setSymbolTable(m_codeBlock
->symbolTable()->cloneCapturedNames(*m_codeBlock
->vm()));
121 if (m_expressionTooDeep
)
122 return ParserError(ParserError::OutOfMemory
);
123 return ParserError(ParserError::ErrorNone
);
126 bool BytecodeGenerator::addVar(
127 const Identifier
& ident
, ConstantMode constantMode
, WatchMode watchMode
, RegisterID
*& r0
)
129 ASSERT(static_cast<size_t>(m_codeBlock
->m_numVars
) == m_calleeRegisters
.size());
131 ConcurrentJITLocker
locker(symbolTable().m_lock
);
132 int index
= virtualRegisterForLocal(m_calleeRegisters
.size()).offset();
133 SymbolTableEntry
newEntry(index
, constantMode
== IsConstant
? ReadOnly
: 0);
134 SymbolTable::Map::AddResult result
= symbolTable().add(locker
, ident
.impl(), newEntry
);
136 if (!result
.isNewEntry
) {
137 r0
= ®isterFor(result
.iterator
->value
.getIndex());
141 if (watchMode
== IsWatchable
) {
142 while (m_watchableVariables
.size() < static_cast<size_t>(m_codeBlock
->m_numVars
))
143 m_watchableVariables
.append(Identifier());
144 m_watchableVariables
.append(ident
);
149 ASSERT(watchMode
== NotWatchable
|| static_cast<size_t>(m_codeBlock
->m_numVars
) == m_watchableVariables
.size());
154 void BytecodeGenerator::preserveLastVar()
156 if ((m_firstConstantIndex
= m_calleeRegisters
.size()) != 0)
157 m_lastVar
= &m_calleeRegisters
.last();
160 BytecodeGenerator::BytecodeGenerator(VM
& vm
, ProgramNode
* programNode
, UnlinkedProgramCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
161 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode
== DebuggerOn
)
162 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode
== ProfilerOn
)
164 , m_scopeNode(programNode
)
165 , m_codeBlock(vm
, codeBlock
)
166 , m_thisRegister(CallFrame::thisArgumentOffset())
167 , m_activationRegister(0)
168 , m_emptyValueRegister(0)
169 , m_globalObjectRegister(0)
171 , m_localScopeDepth(0)
172 , m_codeType(GlobalCode
)
173 , m_nextConstantOffset(0)
174 , m_globalConstantIndex(0)
175 , m_firstLazyFunction(0)
176 , m_lastLazyFunction(0)
177 , m_staticPropertyAnalyzer(&m_instructions
)
179 , m_lastOpcodeID(op_end
)
181 , m_lastOpcodePosition(0)
183 , m_usesExceptions(false)
184 , m_expressionTooDeep(false)
185 , m_isBuiltinFunction(false)
187 m_codeBlock
->setNumParameters(1); // Allocate space for "this"
189 emitOpcode(op_enter
);
191 const VarStack
& varStack
= programNode
->varStack();
192 const FunctionStack
& functionStack
= programNode
->functionStack();
194 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
195 FunctionBodyNode
* function
= functionStack
[i
];
196 UnlinkedFunctionExecutable
* unlinkedFunction
= makeFunction(function
);
197 codeBlock
->addFunctionDeclaration(*m_vm
, function
->ident(), unlinkedFunction
);
200 for (size_t i
= 0; i
< varStack
.size(); ++i
)
201 codeBlock
->addVariableDeclaration(varStack
[i
].first
, !!(varStack
[i
].second
& DeclarationStacks::IsConstant
));
205 BytecodeGenerator::BytecodeGenerator(VM
& vm
, FunctionBodyNode
* functionBody
, UnlinkedFunctionCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
206 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode
== DebuggerOn
)
207 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode
== ProfilerOn
)
208 , m_symbolTable(codeBlock
->symbolTable())
209 , m_scopeNode(functionBody
)
210 , m_codeBlock(vm
, codeBlock
)
211 , m_activationRegister(0)
212 , m_emptyValueRegister(0)
213 , m_globalObjectRegister(0)
215 , m_localScopeDepth(0)
216 , m_codeType(FunctionCode
)
217 , m_nextConstantOffset(0)
218 , m_globalConstantIndex(0)
219 , m_firstLazyFunction(0)
220 , m_lastLazyFunction(0)
221 , m_staticPropertyAnalyzer(&m_instructions
)
223 , m_lastOpcodeID(op_end
)
225 , m_lastOpcodePosition(0)
227 , m_usesExceptions(false)
228 , m_expressionTooDeep(false)
229 , m_isBuiltinFunction(codeBlock
->isBuiltinFunction())
231 if (m_isBuiltinFunction
)
232 m_shouldEmitDebugHooks
= false;
234 m_symbolTable
->setUsesNonStrictEval(codeBlock
->usesEval() && !codeBlock
->isStrictMode());
235 Vector
<Identifier
> boundParameterProperties
;
236 FunctionParameters
& parameters
= *functionBody
->parameters();
237 for (size_t i
= 0; i
< parameters
.size(); i
++) {
238 auto pattern
= parameters
.at(i
);
239 if (pattern
->isBindingNode())
241 pattern
->collectBoundIdentifiers(boundParameterProperties
);
244 m_symbolTable
->setParameterCountIncludingThis(functionBody
->parameters()->size() + 1);
246 emitOpcode(op_enter
);
247 if (m_codeBlock
->needsFullScopeChain() || m_shouldEmitDebugHooks
) {
248 m_activationRegister
= addVar();
249 emitInitLazyRegister(m_activationRegister
);
250 m_codeBlock
->setActivationRegister(m_activationRegister
->virtualRegister());
253 m_symbolTable
->setCaptureStart(virtualRegisterForLocal(m_codeBlock
->m_numVars
).offset());
255 if (functionBody
->usesArguments() || codeBlock
->usesEval()) { // May reify arguments object.
256 RegisterID
* unmodifiedArgumentsRegister
= addVar(); // Anonymous, so it can't be modified by user code.
257 RegisterID
* argumentsRegister
= addVar(propertyNames().arguments
, IsVariable
, NotWatchable
); // Can be changed by assigning to 'arguments'.
259 // We can save a little space by hard-coding the knowledge that the two
260 // 'arguments' values are stored in consecutive registers, and storing
261 // only the index of the assignable one.
262 codeBlock
->setArgumentsRegister(argumentsRegister
->virtualRegister());
263 ASSERT_UNUSED(unmodifiedArgumentsRegister
, unmodifiedArgumentsRegister
->virtualRegister() == JSC::unmodifiedArgumentsRegister(codeBlock
->argumentsRegister()));
265 emitInitLazyRegister(argumentsRegister
);
266 emitInitLazyRegister(unmodifiedArgumentsRegister
);
268 if (shouldTearOffArgumentsEagerly()) {
269 emitOpcode(op_create_arguments
);
270 instructions().append(argumentsRegister
->index());
274 bool shouldCaptureAllTheThings
= m_shouldEmitDebugHooks
|| codeBlock
->usesEval();
276 bool capturesAnyArgumentByName
= false;
277 Vector
<RegisterID
*, 0, UnsafeVectorOverflow
> capturedArguments
;
278 if (functionBody
->hasCapturedVariables() || shouldCaptureAllTheThings
) {
279 FunctionParameters
& parameters
= *functionBody
->parameters();
280 capturedArguments
.resize(parameters
.size());
281 for (size_t i
= 0; i
< parameters
.size(); ++i
) {
282 capturedArguments
[i
] = 0;
283 auto pattern
= parameters
.at(i
);
284 if (!pattern
->isBindingNode())
286 const Identifier
& ident
= static_cast<const BindingNode
*>(pattern
)->boundProperty();
287 if (!functionBody
->captures(ident
) && !shouldCaptureAllTheThings
)
289 capturesAnyArgumentByName
= true;
290 capturedArguments
[i
] = addVar();
294 if (capturesAnyArgumentByName
&& !shouldTearOffArgumentsEagerly()) {
295 size_t parameterCount
= m_symbolTable
->parameterCount();
296 auto slowArguments
= std::make_unique
<SlowArgument
[]>(parameterCount
);
297 for (size_t i
= 0; i
< parameterCount
; ++i
) {
298 if (!capturedArguments
[i
]) {
299 ASSERT(slowArguments
[i
].status
== SlowArgument::Normal
);
300 slowArguments
[i
].index
= CallFrame::argumentOffset(i
);
303 slowArguments
[i
].status
= SlowArgument::Captured
;
304 slowArguments
[i
].index
= capturedArguments
[i
]->index();
306 m_symbolTable
->setSlowArguments(WTF::move(slowArguments
));
309 RegisterID
* calleeRegister
= resolveCallee(functionBody
); // May push to the scope chain and/or add a captured var.
311 const DeclarationStacks::FunctionStack
& functionStack
= functionBody
->functionStack();
312 const DeclarationStacks::VarStack
& varStack
= functionBody
->varStack();
315 // Captured variables and functions go first so that activations don't have
316 // to step over the non-captured locals to mark them.
317 if (functionBody
->hasCapturedVariables()) {
318 for (size_t i
= 0; i
< boundParameterProperties
.size(); i
++) {
319 const Identifier
& ident
= boundParameterProperties
[i
];
320 if (functionBody
->captures(ident
))
321 addVar(ident
, IsVariable
, IsWatchable
);
323 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
324 FunctionBodyNode
* function
= functionStack
[i
];
325 const Identifier
& ident
= function
->ident();
326 if (functionBody
->captures(ident
)) {
327 m_functions
.add(ident
.impl());
328 emitNewFunction(addVar(ident
, IsVariable
, IsWatchable
), IsCaptured
, function
);
331 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
332 const Identifier
& ident
= varStack
[i
].first
;
333 if (functionBody
->captures(ident
))
334 addVar(ident
, (varStack
[i
].second
& DeclarationStacks::IsConstant
) ? IsConstant
: IsVariable
, IsWatchable
);
338 m_symbolTable
->setCaptureEnd(virtualRegisterForLocal(codeBlock
->m_numVars
).offset());
340 bool canLazilyCreateFunctions
= !functionBody
->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks
;
341 m_firstLazyFunction
= codeBlock
->m_numVars
;
342 for (size_t i
= 0; i
< functionStack
.size(); ++i
) {
343 FunctionBodyNode
* function
= functionStack
[i
];
344 const Identifier
& ident
= function
->ident();
345 if (!functionBody
->captures(ident
)) {
346 m_functions
.add(ident
.impl());
347 RefPtr
<RegisterID
> reg
= addVar(ident
, IsVariable
, NotWatchable
);
348 // Don't lazily create functions that override the name 'arguments'
349 // as this would complicate lazy instantiation of actual arguments.
350 if (!canLazilyCreateFunctions
|| ident
== propertyNames().arguments
)
351 emitNewFunction(reg
.get(), NotCaptured
, function
);
353 emitInitLazyRegister(reg
.get());
354 m_lazyFunctions
.set(reg
->virtualRegister().toLocal(), function
);
358 m_lastLazyFunction
= canLazilyCreateFunctions
? codeBlock
->m_numVars
: m_firstLazyFunction
;
359 for (size_t i
= 0; i
< boundParameterProperties
.size(); i
++) {
360 const Identifier
& ident
= boundParameterProperties
[i
];
361 if (!functionBody
->captures(ident
))
362 addVar(ident
, IsVariable
, IsWatchable
);
364 for (size_t i
= 0; i
< varStack
.size(); ++i
) {
365 const Identifier
& ident
= varStack
[i
].first
;
366 if (!functionBody
->captures(ident
))
367 addVar(ident
, (varStack
[i
].second
& DeclarationStacks::IsConstant
) ? IsConstant
: IsVariable
, NotWatchable
);
370 if (shouldCaptureAllTheThings
)
371 m_symbolTable
->setCaptureEnd(virtualRegisterForLocal(codeBlock
->m_numVars
).offset());
373 if (m_symbolTable
->captureCount())
374 emitOpcode(op_touch_entry
);
376 m_parameters
.grow(parameters
.size() + 1); // reserve space for "this"
378 // Add "this" as a parameter
379 int nextParameterIndex
= CallFrame::thisArgumentOffset();
380 m_thisRegister
.setIndex(nextParameterIndex
++);
381 m_codeBlock
->addParameter();
382 for (size_t i
= 0; i
< parameters
.size(); ++i
, ++nextParameterIndex
) {
383 int index
= nextParameterIndex
;
384 auto pattern
= parameters
.at(i
);
385 if (!pattern
->isBindingNode()) {
386 m_codeBlock
->addParameter();
387 RegisterID
& parameter
= registerFor(index
);
388 parameter
.setIndex(index
);
389 m_deconstructedParameters
.append(std::make_pair(¶meter
, pattern
));
392 auto simpleParameter
= static_cast<const BindingNode
*>(pattern
);
393 if (capturedArguments
.size() && capturedArguments
[i
]) {
394 ASSERT((functionBody
->hasCapturedVariables() && functionBody
->captures(simpleParameter
->boundProperty())) || shouldCaptureAllTheThings
);
395 index
= capturedArguments
[i
]->index();
396 RegisterID
original(nextParameterIndex
);
397 emitMove(capturedArguments
[i
], &original
);
399 addParameter(simpleParameter
->boundProperty(), index
);
403 // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
404 addCallee(functionBody
, calleeRegister
);
406 if (isConstructor()) {
407 emitCreateThis(&m_thisRegister
);
408 } else if (functionBody
->usesThis() || codeBlock
->usesEval()) {
409 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
410 emitOpcode(op_to_this
);
411 instructions().append(kill(&m_thisRegister
));
412 instructions().append(0);
416 BytecodeGenerator::BytecodeGenerator(VM
& vm
, EvalNode
* evalNode
, UnlinkedEvalCodeBlock
* codeBlock
, DebuggerMode debuggerMode
, ProfilerMode profilerMode
)
417 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode
== DebuggerOn
)
418 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode
== ProfilerOn
)
419 , m_symbolTable(codeBlock
->symbolTable())
420 , m_scopeNode(evalNode
)
421 , m_codeBlock(vm
, codeBlock
)
422 , m_thisRegister(CallFrame::thisArgumentOffset())
423 , m_activationRegister(0)
424 , m_emptyValueRegister(0)
425 , m_globalObjectRegister(0)
427 , m_localScopeDepth(0)
428 , m_codeType(EvalCode
)
429 , m_nextConstantOffset(0)
430 , m_globalConstantIndex(0)
431 , m_firstLazyFunction(0)
432 , m_lastLazyFunction(0)
433 , m_staticPropertyAnalyzer(&m_instructions
)
435 , m_lastOpcodeID(op_end
)
437 , m_lastOpcodePosition(0)
439 , m_usesExceptions(false)
440 , m_expressionTooDeep(false)
441 , m_isBuiltinFunction(false)
443 m_symbolTable
->setUsesNonStrictEval(codeBlock
->usesEval() && !codeBlock
->isStrictMode());
444 m_codeBlock
->setNumParameters(1);
446 emitOpcode(op_enter
);
448 const DeclarationStacks::FunctionStack
& functionStack
= evalNode
->functionStack();
449 for (size_t i
= 0; i
< functionStack
.size(); ++i
)
450 m_codeBlock
->addFunctionDecl(makeFunction(functionStack
[i
]));
452 const DeclarationStacks::VarStack
& varStack
= evalNode
->varStack();
453 unsigned numVariables
= varStack
.size();
454 Vector
<Identifier
, 0, UnsafeVectorOverflow
> variables
;
455 variables
.reserveCapacity(numVariables
);
456 for (size_t i
= 0; i
< numVariables
; ++i
) {
457 ASSERT(varStack
[i
].first
.impl()->isAtomic());
458 variables
.append(varStack
[i
].first
);
460 codeBlock
->adoptVariables(variables
);
464 BytecodeGenerator::~BytecodeGenerator()
468 RegisterID
* BytecodeGenerator::emitInitLazyRegister(RegisterID
* reg
)
470 emitOpcode(op_init_lazy_reg
);
471 instructions().append(reg
->index());
472 ASSERT(!hasWatchableVariable(reg
->index()));
476 RegisterID
* BytecodeGenerator::resolveCallee(FunctionBodyNode
* functionBodyNode
)
478 if (!functionNameIsInScope(functionBodyNode
->ident(), functionBodyNode
->functionMode()))
481 if (functionNameScopeIsDynamic(m_codeBlock
->usesEval(), m_codeBlock
->isStrictMode()))
484 m_calleeRegister
.setIndex(JSStack::Callee
);
485 if (functionBodyNode
->captures(functionBodyNode
->ident()))
486 return emitMove(addVar(), IsCaptured
, &m_calleeRegister
);
488 return &m_calleeRegister
;
491 void BytecodeGenerator::addCallee(FunctionBodyNode
* functionBodyNode
, RegisterID
* calleeRegister
)
496 symbolTable().add(functionBodyNode
->ident().impl(), SymbolTableEntry(calleeRegister
->index(), ReadOnly
));
499 void BytecodeGenerator::addParameter(const Identifier
& ident
, int parameterIndex
)
501 // Parameters overwrite var declarations, but not function declarations.
502 StringImpl
* rep
= ident
.impl();
503 if (!m_functions
.contains(rep
)) {
504 symbolTable().set(rep
, parameterIndex
);
505 RegisterID
& parameter
= registerFor(parameterIndex
);
506 parameter
.setIndex(parameterIndex
);
509 // To maintain the calling convention, we have to allocate unique space for
510 // each parameter, even if the parameter doesn't make it into the symbol table.
511 m_codeBlock
->addParameter();
514 bool BytecodeGenerator::willResolveToArguments(const Identifier
& ident
)
516 if (ident
!= propertyNames().arguments
)
519 if (!shouldOptimizeLocals())
522 SymbolTableEntry entry
= symbolTable().get(ident
.impl());
526 if (m_codeBlock
->usesArguments() && m_codeType
== FunctionCode
)
532 RegisterID
* BytecodeGenerator::uncheckedRegisterForArguments()
534 ASSERT(willResolveToArguments(propertyNames().arguments
));
536 SymbolTableEntry entry
= symbolTable().get(propertyNames().arguments
.impl());
537 ASSERT(!entry
.isNull());
538 return ®isterFor(entry
.getIndex());
541 RegisterID
* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID
* reg
)
543 if (!reg
->virtualRegister().isLocal())
546 int localVariableNumber
= reg
->virtualRegister().toLocal();
548 if (m_lastLazyFunction
<= localVariableNumber
|| localVariableNumber
< m_firstLazyFunction
)
550 emitLazyNewFunction(reg
, m_lazyFunctions
.get(localVariableNumber
));
554 RegisterID
* BytecodeGenerator::newRegister()
556 m_calleeRegisters
.append(virtualRegisterForLocal(m_calleeRegisters
.size()));
557 int numCalleeRegisters
= max
<int>(m_codeBlock
->m_numCalleeRegisters
, m_calleeRegisters
.size());
558 numCalleeRegisters
= WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeRegisters
);
559 m_codeBlock
->m_numCalleeRegisters
= numCalleeRegisters
;
560 return &m_calleeRegisters
.last();
563 RegisterID
* BytecodeGenerator::newTemporary()
565 // Reclaim free register IDs.
566 while (m_calleeRegisters
.size() && !m_calleeRegisters
.last().refCount())
567 m_calleeRegisters
.removeLast();
569 RegisterID
* result
= newRegister();
570 result
->setTemporary();
574 LabelScopePtr
BytecodeGenerator::newLabelScope(LabelScope::Type type
, const Identifier
* name
)
576 // Reclaim free label scopes.
577 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
578 m_labelScopes
.removeLast();
580 // Allocate new label scope.
581 LabelScope
scope(type
, name
, scopeDepth(), newLabel(), type
== LabelScope::Loop
? newLabel() : PassRefPtr
<Label
>()); // Only loops have continue targets.
582 m_labelScopes
.append(scope
);
583 return LabelScopePtr(m_labelScopes
, m_labelScopes
.size() - 1);
586 PassRefPtr
<Label
> BytecodeGenerator::newLabel()
588 // Reclaim free label IDs.
589 while (m_labels
.size() && !m_labels
.last().refCount())
590 m_labels
.removeLast();
592 // Allocate new label ID.
593 m_labels
.append(this);
594 return &m_labels
.last();
597 PassRefPtr
<Label
> BytecodeGenerator::emitLabel(Label
* l0
)
599 unsigned newLabelIndex
= instructions().size();
600 l0
->setLocation(newLabelIndex
);
602 if (m_codeBlock
->numberOfJumpTargets()) {
603 unsigned lastLabelIndex
= m_codeBlock
->lastJumpTarget();
604 ASSERT(lastLabelIndex
<= newLabelIndex
);
605 if (newLabelIndex
== lastLabelIndex
) {
606 // Peephole optimizations have already been disabled by emitting the last label
611 m_codeBlock
->addJumpTarget(newLabelIndex
);
613 // This disables peephole optimizations when an instruction is a jump target
614 m_lastOpcodeID
= op_end
;
618 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID
)
621 size_t opcodePosition
= instructions().size();
622 ASSERT(opcodePosition
- m_lastOpcodePosition
== opcodeLength(m_lastOpcodeID
) || m_lastOpcodeID
== op_end
);
623 m_lastOpcodePosition
= opcodePosition
;
625 instructions().append(opcodeID
);
626 m_lastOpcodeID
= opcodeID
;
629 UnlinkedArrayProfile
BytecodeGenerator::newArrayProfile()
631 return m_codeBlock
->addArrayProfile();
634 UnlinkedArrayAllocationProfile
BytecodeGenerator::newArrayAllocationProfile()
636 return m_codeBlock
->addArrayAllocationProfile();
639 UnlinkedObjectAllocationProfile
BytecodeGenerator::newObjectAllocationProfile()
641 return m_codeBlock
->addObjectAllocationProfile();
644 UnlinkedValueProfile
BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID
)
646 UnlinkedValueProfile result
= m_codeBlock
->addValueProfile();
647 emitOpcode(opcodeID
);
651 void BytecodeGenerator::emitLoopHint()
653 emitOpcode(op_loop_hint
);
656 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex
, int& src1Index
, int& src2Index
)
658 ASSERT(instructions().size() >= 4);
659 size_t size
= instructions().size();
660 dstIndex
= instructions().at(size
- 3).u
.operand
;
661 src1Index
= instructions().at(size
- 2).u
.operand
;
662 src2Index
= instructions().at(size
- 1).u
.operand
;
665 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex
, int& srcIndex
)
667 ASSERT(instructions().size() >= 3);
668 size_t size
= instructions().size();
669 dstIndex
= instructions().at(size
- 2).u
.operand
;
670 srcIndex
= instructions().at(size
- 1).u
.operand
;
673 void ALWAYS_INLINE
BytecodeGenerator::rewindBinaryOp()
675 ASSERT(instructions().size() >= 4);
676 instructions().shrink(instructions().size() - 4);
677 m_lastOpcodeID
= op_end
;
680 void ALWAYS_INLINE
BytecodeGenerator::rewindUnaryOp()
682 ASSERT(instructions().size() >= 3);
683 instructions().shrink(instructions().size() - 3);
684 m_lastOpcodeID
= op_end
;
687 PassRefPtr
<Label
> BytecodeGenerator::emitJump(Label
* target
)
689 size_t begin
= instructions().size();
691 instructions().append(target
->bind(begin
, instructions().size()));
695 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfTrue(RegisterID
* cond
, Label
* target
)
697 if (m_lastOpcodeID
== op_less
) {
702 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
704 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
707 size_t begin
= instructions().size();
708 emitOpcode(op_jless
);
709 instructions().append(src1Index
);
710 instructions().append(src2Index
);
711 instructions().append(target
->bind(begin
, instructions().size()));
714 } else if (m_lastOpcodeID
== op_lesseq
) {
719 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
721 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
724 size_t begin
= instructions().size();
725 emitOpcode(op_jlesseq
);
726 instructions().append(src1Index
);
727 instructions().append(src2Index
);
728 instructions().append(target
->bind(begin
, instructions().size()));
731 } else if (m_lastOpcodeID
== op_greater
) {
736 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
738 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
741 size_t begin
= instructions().size();
742 emitOpcode(op_jgreater
);
743 instructions().append(src1Index
);
744 instructions().append(src2Index
);
745 instructions().append(target
->bind(begin
, instructions().size()));
748 } else if (m_lastOpcodeID
== op_greatereq
) {
753 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
755 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
758 size_t begin
= instructions().size();
759 emitOpcode(op_jgreatereq
);
760 instructions().append(src1Index
);
761 instructions().append(src2Index
);
762 instructions().append(target
->bind(begin
, instructions().size()));
765 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
769 retrieveLastUnaryOp(dstIndex
, srcIndex
);
771 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
774 size_t begin
= instructions().size();
775 emitOpcode(op_jeq_null
);
776 instructions().append(srcIndex
);
777 instructions().append(target
->bind(begin
, instructions().size()));
780 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
784 retrieveLastUnaryOp(dstIndex
, srcIndex
);
786 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
789 size_t begin
= instructions().size();
790 emitOpcode(op_jneq_null
);
791 instructions().append(srcIndex
);
792 instructions().append(target
->bind(begin
, instructions().size()));
797 size_t begin
= instructions().size();
799 emitOpcode(op_jtrue
);
800 instructions().append(cond
->index());
801 instructions().append(target
->bind(begin
, instructions().size()));
805 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfFalse(RegisterID
* cond
, Label
* target
)
807 if (m_lastOpcodeID
== op_less
&& target
->isForward()) {
812 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
814 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
817 size_t begin
= instructions().size();
818 emitOpcode(op_jnless
);
819 instructions().append(src1Index
);
820 instructions().append(src2Index
);
821 instructions().append(target
->bind(begin
, instructions().size()));
824 } else if (m_lastOpcodeID
== op_lesseq
&& target
->isForward()) {
829 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
831 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
834 size_t begin
= instructions().size();
835 emitOpcode(op_jnlesseq
);
836 instructions().append(src1Index
);
837 instructions().append(src2Index
);
838 instructions().append(target
->bind(begin
, instructions().size()));
841 } else if (m_lastOpcodeID
== op_greater
&& target
->isForward()) {
846 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
848 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
851 size_t begin
= instructions().size();
852 emitOpcode(op_jngreater
);
853 instructions().append(src1Index
);
854 instructions().append(src2Index
);
855 instructions().append(target
->bind(begin
, instructions().size()));
858 } else if (m_lastOpcodeID
== op_greatereq
&& target
->isForward()) {
863 retrieveLastBinaryOp(dstIndex
, src1Index
, src2Index
);
865 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
868 size_t begin
= instructions().size();
869 emitOpcode(op_jngreatereq
);
870 instructions().append(src1Index
);
871 instructions().append(src2Index
);
872 instructions().append(target
->bind(begin
, instructions().size()));
875 } else if (m_lastOpcodeID
== op_not
) {
879 retrieveLastUnaryOp(dstIndex
, srcIndex
);
881 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
884 size_t begin
= instructions().size();
885 emitOpcode(op_jtrue
);
886 instructions().append(srcIndex
);
887 instructions().append(target
->bind(begin
, instructions().size()));
890 } else if (m_lastOpcodeID
== op_eq_null
&& target
->isForward()) {
894 retrieveLastUnaryOp(dstIndex
, srcIndex
);
896 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
899 size_t begin
= instructions().size();
900 emitOpcode(op_jneq_null
);
901 instructions().append(srcIndex
);
902 instructions().append(target
->bind(begin
, instructions().size()));
905 } else if (m_lastOpcodeID
== op_neq_null
&& target
->isForward()) {
909 retrieveLastUnaryOp(dstIndex
, srcIndex
);
911 if (cond
->index() == dstIndex
&& cond
->isTemporary() && !cond
->refCount()) {
914 size_t begin
= instructions().size();
915 emitOpcode(op_jeq_null
);
916 instructions().append(srcIndex
);
917 instructions().append(target
->bind(begin
, instructions().size()));
922 size_t begin
= instructions().size();
923 emitOpcode(op_jfalse
);
924 instructions().append(cond
->index());
925 instructions().append(target
->bind(begin
, instructions().size()));
929 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID
* cond
, Label
* target
)
931 size_t begin
= instructions().size();
933 emitOpcode(op_jneq_ptr
);
934 instructions().append(cond
->index());
935 instructions().append(Special::CallFunction
);
936 instructions().append(target
->bind(begin
, instructions().size()));
940 PassRefPtr
<Label
> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID
* cond
, Label
* target
)
942 size_t begin
= instructions().size();
944 emitOpcode(op_jneq_ptr
);
945 instructions().append(cond
->index());
946 instructions().append(Special::ApplyFunction
);
947 instructions().append(target
->bind(begin
, instructions().size()));
951 unsigned BytecodeGenerator::addConstant(const Identifier
& ident
)
953 StringImpl
* rep
= ident
.impl();
954 IdentifierMap::AddResult result
= m_identifierMap
.add(rep
, m_codeBlock
->numberOfIdentifiers());
955 if (result
.isNewEntry
)
956 m_codeBlock
->addIdentifier(ident
);
958 return result
.iterator
->value
;
961 // We can't hash JSValue(), so we use a dedicated data member to cache it.
962 RegisterID
* BytecodeGenerator::addConstantEmptyValue()
964 if (!m_emptyValueRegister
) {
965 int index
= m_nextConstantOffset
;
966 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
967 ++m_nextConstantOffset
;
968 m_codeBlock
->addConstant(JSValue());
969 m_emptyValueRegister
= &m_constantPoolRegisters
[index
];
972 return m_emptyValueRegister
;
975 RegisterID
* BytecodeGenerator::addConstantValue(JSValue v
)
978 return addConstantEmptyValue();
980 int index
= m_nextConstantOffset
;
981 JSValueMap::AddResult result
= m_jsValueMap
.add(JSValue::encode(v
), m_nextConstantOffset
);
982 if (result
.isNewEntry
) {
983 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
984 ++m_nextConstantOffset
;
985 m_codeBlock
->addConstant(v
);
987 index
= result
.iterator
->value
;
988 return &m_constantPoolRegisters
[index
];
991 unsigned BytecodeGenerator::addRegExp(RegExp
* r
)
993 return m_codeBlock
->addRegExp(r
);
996 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, CaptureMode captureMode
, RegisterID
* src
)
998 m_staticPropertyAnalyzer
.mov(dst
->index(), src
->index());
1000 emitOpcode(captureMode
== IsCaptured
? op_captured_mov
: op_mov
);
1001 instructions().append(dst
->index());
1002 instructions().append(src
->index());
1003 if (captureMode
== IsCaptured
)
1004 instructions().append(watchableVariable(dst
->index()));
1008 RegisterID
* BytecodeGenerator::emitMove(RegisterID
* dst
, RegisterID
* src
)
1010 return emitMove(dst
, captureMode(dst
->index()), src
);
1013 RegisterID
* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src
)
1015 emitOpcode(opcodeID
);
1016 instructions().append(dst
->index());
1017 instructions().append(src
->index());
1021 RegisterID
* BytecodeGenerator::emitInc(RegisterID
* srcDst
)
1024 instructions().append(srcDst
->index());
1028 RegisterID
* BytecodeGenerator::emitDec(RegisterID
* srcDst
)
1031 instructions().append(srcDst
->index());
1035 RegisterID
* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
, OperandTypes types
)
1037 emitOpcode(opcodeID
);
1038 instructions().append(dst
->index());
1039 instructions().append(src1
->index());
1040 instructions().append(src2
->index());
1042 if (opcodeID
== op_bitor
|| opcodeID
== op_bitand
|| opcodeID
== op_bitxor
||
1043 opcodeID
== op_add
|| opcodeID
== op_mul
|| opcodeID
== op_sub
|| opcodeID
== op_div
)
1044 instructions().append(types
.toInt());
1049 RegisterID
* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* src1
, RegisterID
* src2
)
1051 if (m_lastOpcodeID
== op_typeof
) {
1055 retrieveLastUnaryOp(dstIndex
, srcIndex
);
1057 if (src1
->index() == dstIndex
1058 && src1
->isTemporary()
1059 && m_codeBlock
->isConstantRegisterIndex(src2
->index())
1060 && m_codeBlock
->constantRegister(src2
->index()).get().isString()) {
1061 const String
& value
= asString(m_codeBlock
->constantRegister(src2
->index()).get())->tryGetValue();
1062 if (value
== "undefined") {
1064 emitOpcode(op_is_undefined
);
1065 instructions().append(dst
->index());
1066 instructions().append(srcIndex
);
1069 if (value
== "boolean") {
1071 emitOpcode(op_is_boolean
);
1072 instructions().append(dst
->index());
1073 instructions().append(srcIndex
);
1076 if (value
== "number") {
1078 emitOpcode(op_is_number
);
1079 instructions().append(dst
->index());
1080 instructions().append(srcIndex
);
1083 if (value
== "string") {
1085 emitOpcode(op_is_string
);
1086 instructions().append(dst
->index());
1087 instructions().append(srcIndex
);
1090 if (value
== "object") {
1092 emitOpcode(op_is_object
);
1093 instructions().append(dst
->index());
1094 instructions().append(srcIndex
);
1097 if (value
== "function") {
1099 emitOpcode(op_is_function
);
1100 instructions().append(dst
->index());
1101 instructions().append(srcIndex
);
1107 emitOpcode(opcodeID
);
1108 instructions().append(dst
->index());
1109 instructions().append(src1
->index());
1110 instructions().append(src2
->index());
1114 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, bool b
)
1116 return emitLoad(dst
, jsBoolean(b
));
1119 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, double number
)
1121 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1122 // Later we can do the extra work to handle that like the other cases. They also don't
1123 // work correctly with NaN as a key.
1124 if (std::isnan(number
) || number
== HashTraits
<double>::emptyValue() || HashTraits
<double>::isDeletedValue(number
))
1125 return emitLoad(dst
, jsNumber(number
));
1126 JSValue
& valueInMap
= m_numberMap
.add(number
, JSValue()).iterator
->value
;
1128 valueInMap
= jsNumber(number
);
1129 return emitLoad(dst
, valueInMap
);
1132 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, const Identifier
& identifier
)
1134 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), nullptr).iterator
->value
;
1136 stringInMap
= jsOwnedString(vm(), identifier
.string());
1137 return emitLoad(dst
, JSValue(stringInMap
));
1140 RegisterID
* BytecodeGenerator::emitLoad(RegisterID
* dst
, JSValue v
)
1142 RegisterID
* constantID
= addConstantValue(v
);
1144 return emitMove(dst
, constantID
);
1148 RegisterID
* BytecodeGenerator::emitLoadGlobalObject(RegisterID
* dst
)
1150 if (!m_globalObjectRegister
) {
1151 int index
= m_nextConstantOffset
;
1152 m_constantPoolRegisters
.append(FirstConstantRegisterIndex
+ m_nextConstantOffset
);
1153 ++m_nextConstantOffset
;
1154 m_codeBlock
->addConstant(JSValue());
1155 m_globalObjectRegister
= &m_constantPoolRegisters
[index
];
1156 m_codeBlock
->setGlobalObjectRegister(VirtualRegister(index
));
1159 emitMove(dst
, m_globalObjectRegister
);
1160 return m_globalObjectRegister
;
1163 bool BytecodeGenerator::isCaptured(int operand
)
1165 return m_symbolTable
&& m_symbolTable
->isCaptured(operand
);
1168 Local
BytecodeGenerator::local(const Identifier
& property
)
1170 if (property
== propertyNames().thisIdentifier
)
1171 return Local(thisRegister(), ReadOnly
, NotCaptured
);
1173 if (property
== propertyNames().arguments
)
1174 createArgumentsIfNecessary();
1176 if (!shouldOptimizeLocals())
1179 SymbolTableEntry entry
= symbolTable().get(property
.impl());
1183 RegisterID
* local
= createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
1184 return Local(local
, entry
.getAttributes(), captureMode(local
->index()));
1187 Local
BytecodeGenerator::constLocal(const Identifier
& property
)
1189 if (m_codeType
!= FunctionCode
)
1192 SymbolTableEntry entry
= symbolTable().get(property
.impl());
1196 RegisterID
* local
= createLazyRegisterIfNecessary(®isterFor(entry
.getIndex()));
1197 return Local(local
, entry
.getAttributes(), captureMode(local
->index()));
1200 void BytecodeGenerator::emitCheckHasInstance(RegisterID
* dst
, RegisterID
* value
, RegisterID
* base
, Label
* target
)
1202 size_t begin
= instructions().size();
1203 emitOpcode(op_check_has_instance
);
1204 instructions().append(dst
->index());
1205 instructions().append(value
->index());
1206 instructions().append(base
->index());
1207 instructions().append(target
->bind(begin
, instructions().size()));
1210 // Indicates the least upper bound of resolve type based on local scope. The bytecode linker
1211 // will start with this ResolveType and compute the least upper bound including intercepting scopes.
1212 ResolveType
BytecodeGenerator::resolveType()
1214 if (m_localScopeDepth
)
1216 if (m_symbolTable
&& m_symbolTable
->usesNonStrictEval())
1217 return GlobalPropertyWithVarInjectionChecks
;
1218 return GlobalProperty
;
1221 RegisterID
* BytecodeGenerator::emitResolveScope(RegisterID
* dst
, const Identifier
& identifier
)
1223 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1225 ASSERT(!m_symbolTable
|| !m_symbolTable
->contains(identifier
.impl()) || resolveType() == Dynamic
);
1227 // resolve_scope dst, id, ResolveType, depth
1228 emitOpcode(op_resolve_scope
);
1229 instructions().append(kill(dst
));
1230 instructions().append(addConstant(identifier
));
1231 instructions().append(resolveType());
1232 instructions().append(0);
1233 instructions().append(0);
1237 RegisterID
* BytecodeGenerator::emitGetFromScope(RegisterID
* dst
, RegisterID
* scope
, const Identifier
& identifier
, ResolveMode resolveMode
)
1239 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1241 // get_from_scope dst, scope, id, ResolveModeAndType, Structure, Operand
1242 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_from_scope
);
1243 instructions().append(kill(dst
));
1244 instructions().append(scope
->index());
1245 instructions().append(addConstant(identifier
));
1246 instructions().append(ResolveModeAndType(resolveMode
, resolveType()).operand());
1247 instructions().append(0);
1248 instructions().append(0);
1249 instructions().append(profile
);
1253 RegisterID
* BytecodeGenerator::emitPutToScope(RegisterID
* scope
, const Identifier
& identifier
, RegisterID
* value
, ResolveMode resolveMode
)
1255 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1257 // put_to_scope scope, id, value, ResolveModeAndType, Structure, Operand
1258 emitOpcode(op_put_to_scope
);
1259 instructions().append(scope
->index());
1260 instructions().append(addConstant(identifier
));
1261 instructions().append(value
->index());
1262 instructions().append(ResolveModeAndType(resolveMode
, resolveType()).operand());
1263 instructions().append(0);
1264 instructions().append(0);
1268 RegisterID
* BytecodeGenerator::emitInstanceOf(RegisterID
* dst
, RegisterID
* value
, RegisterID
* basePrototype
)
1270 emitOpcode(op_instanceof
);
1271 instructions().append(dst
->index());
1272 instructions().append(value
->index());
1273 instructions().append(basePrototype
->index());
1277 RegisterID
* BytecodeGenerator::emitInitGlobalConst(const Identifier
& identifier
, RegisterID
* value
)
1279 ASSERT(m_codeType
== GlobalCode
);
1280 emitOpcode(op_init_global_const_nop
);
1281 instructions().append(0);
1282 instructions().append(value
->index());
1283 instructions().append(0);
1284 instructions().append(addConstant(identifier
));
1288 RegisterID
* BytecodeGenerator::emitGetById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1290 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1292 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_by_id
);
1293 instructions().append(kill(dst
));
1294 instructions().append(base
->index());
1295 instructions().append(addConstant(property
));
1296 instructions().append(0);
1297 instructions().append(0);
1298 instructions().append(0);
1299 instructions().append(0);
1300 instructions().append(profile
);
1304 RegisterID
* BytecodeGenerator::emitGetArgumentsLength(RegisterID
* dst
, RegisterID
* base
)
1306 emitOpcode(op_get_arguments_length
);
1307 instructions().append(dst
->index());
1308 ASSERT(base
->virtualRegister() == m_codeBlock
->argumentsRegister());
1309 instructions().append(base
->index());
1310 instructions().append(addConstant(propertyNames().length
));
1314 RegisterID
* BytecodeGenerator::emitPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1316 unsigned propertyIndex
= addConstant(property
);
1318 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1320 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1322 emitOpcode(op_put_by_id
);
1323 instructions().append(base
->index());
1324 instructions().append(propertyIndex
);
1325 instructions().append(value
->index());
1326 instructions().append(0);
1327 instructions().append(0);
1328 instructions().append(0);
1329 instructions().append(0);
1330 instructions().append(0);
1334 RegisterID
* BytecodeGenerator::emitDirectPutById(RegisterID
* base
, const Identifier
& property
, RegisterID
* value
)
1336 unsigned propertyIndex
= addConstant(property
);
1338 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1340 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1342 emitOpcode(op_put_by_id
);
1343 instructions().append(base
->index());
1344 instructions().append(propertyIndex
);
1345 instructions().append(value
->index());
1346 instructions().append(0);
1347 instructions().append(0);
1348 instructions().append(0);
1349 instructions().append(0);
1350 instructions().append(
1351 property
!= m_vm
->propertyNames
->underscoreProto
1352 && PropertyName(property
).asIndex() == PropertyName::NotAnIndex
);
1356 void BytecodeGenerator::emitPutGetterSetter(RegisterID
* base
, const Identifier
& property
, RegisterID
* getter
, RegisterID
* setter
)
1358 unsigned propertyIndex
= addConstant(property
);
1360 m_staticPropertyAnalyzer
.putById(base
->index(), propertyIndex
);
1362 emitOpcode(op_put_getter_setter
);
1363 instructions().append(base
->index());
1364 instructions().append(propertyIndex
);
1365 instructions().append(getter
->index());
1366 instructions().append(setter
->index());
1369 RegisterID
* BytecodeGenerator::emitDeleteById(RegisterID
* dst
, RegisterID
* base
, const Identifier
& property
)
1371 emitOpcode(op_del_by_id
);
1372 instructions().append(dst
->index());
1373 instructions().append(base
->index());
1374 instructions().append(addConstant(property
));
1378 RegisterID
* BytecodeGenerator::emitGetArgumentByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1380 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1381 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_argument_by_val
);
1382 instructions().append(kill(dst
));
1383 ASSERT(base
->virtualRegister() == m_codeBlock
->argumentsRegister());
1384 instructions().append(base
->index());
1385 instructions().append(property
->index());
1386 instructions().append(arrayProfile
);
1387 instructions().append(profile
);
1391 RegisterID
* BytecodeGenerator::emitGetByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1393 for (size_t i
= m_forInContextStack
.size(); i
> 0; i
--) {
1394 ForInContext
& context
= m_forInContextStack
[i
- 1];
1395 if (context
.propertyRegister
== property
) {
1396 emitOpcode(op_get_by_pname
);
1397 instructions().append(dst
->index());
1398 instructions().append(base
->index());
1399 instructions().append(property
->index());
1400 instructions().append(context
.expectedSubscriptRegister
->index());
1401 instructions().append(context
.iterRegister
->index());
1402 instructions().append(context
.indexRegister
->index());
1406 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1407 UnlinkedValueProfile profile
= emitProfiledOpcode(op_get_by_val
);
1408 instructions().append(kill(dst
));
1409 instructions().append(base
->index());
1410 instructions().append(property
->index());
1411 instructions().append(arrayProfile
);
1412 instructions().append(profile
);
1416 RegisterID
* BytecodeGenerator::emitPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1418 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1419 if (m_isBuiltinFunction
)
1420 emitOpcode(op_put_by_val_direct
);
1422 emitOpcode(op_put_by_val
);
1423 instructions().append(base
->index());
1424 instructions().append(property
->index());
1425 instructions().append(value
->index());
1426 instructions().append(arrayProfile
);
1430 RegisterID
* BytecodeGenerator::emitDirectPutByVal(RegisterID
* base
, RegisterID
* property
, RegisterID
* value
)
1432 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1433 emitOpcode(op_put_by_val_direct
);
1434 instructions().append(base
->index());
1435 instructions().append(property
->index());
1436 instructions().append(value
->index());
1437 instructions().append(arrayProfile
);
1441 RegisterID
* BytecodeGenerator::emitDeleteByVal(RegisterID
* dst
, RegisterID
* base
, RegisterID
* property
)
1443 emitOpcode(op_del_by_val
);
1444 instructions().append(dst
->index());
1445 instructions().append(base
->index());
1446 instructions().append(property
->index());
1450 RegisterID
* BytecodeGenerator::emitPutByIndex(RegisterID
* base
, unsigned index
, RegisterID
* value
)
1452 emitOpcode(op_put_by_index
);
1453 instructions().append(base
->index());
1454 instructions().append(index
);
1455 instructions().append(value
->index());
1459 RegisterID
* BytecodeGenerator::emitCreateThis(RegisterID
* dst
)
1461 RefPtr
<RegisterID
> func
= newTemporary();
1463 m_codeBlock
->addPropertyAccessInstruction(instructions().size());
1464 emitOpcode(op_get_callee
);
1465 instructions().append(func
->index());
1466 instructions().append(0);
1468 size_t begin
= instructions().size();
1469 m_staticPropertyAnalyzer
.createThis(m_thisRegister
.index(), begin
+ 3);
1471 emitOpcode(op_create_this
);
1472 instructions().append(m_thisRegister
.index());
1473 instructions().append(func
->index());
1474 instructions().append(0);
1478 RegisterID
* BytecodeGenerator::emitNewObject(RegisterID
* dst
)
1480 size_t begin
= instructions().size();
1481 m_staticPropertyAnalyzer
.newObject(dst
->index(), begin
+ 2);
1483 emitOpcode(op_new_object
);
1484 instructions().append(dst
->index());
1485 instructions().append(0);
1486 instructions().append(newObjectAllocationProfile());
1490 unsigned BytecodeGenerator::addConstantBuffer(unsigned length
)
1492 return m_codeBlock
->addConstantBuffer(length
);
1495 JSString
* BytecodeGenerator::addStringConstant(const Identifier
& identifier
)
1497 JSString
*& stringInMap
= m_stringMap
.add(identifier
.impl(), nullptr).iterator
->value
;
1499 stringInMap
= jsString(vm(), identifier
.string());
1500 addConstantValue(stringInMap
);
1505 RegisterID
* BytecodeGenerator::emitNewArray(RegisterID
* dst
, ElementNode
* elements
, unsigned length
)
1507 #if !ASSERT_DISABLED
1508 unsigned checkLength
= 0;
1510 bool hadVariableExpression
= false;
1512 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1513 if (!n
->value()->isConstant()) {
1514 hadVariableExpression
= true;
1519 #if !ASSERT_DISABLED
1523 if (!hadVariableExpression
) {
1524 ASSERT(length
== checkLength
);
1525 unsigned constantBufferIndex
= addConstantBuffer(length
);
1526 JSValue
* constantBuffer
= m_codeBlock
->constantBuffer(constantBufferIndex
).data();
1528 for (ElementNode
* n
= elements
; index
< length
; n
= n
->next()) {
1529 ASSERT(n
->value()->isConstant());
1530 constantBuffer
[index
++] = static_cast<ConstantNode
*>(n
->value())->jsValue(*this);
1532 emitOpcode(op_new_array_buffer
);
1533 instructions().append(dst
->index());
1534 instructions().append(constantBufferIndex
);
1535 instructions().append(length
);
1536 instructions().append(newArrayAllocationProfile());
1541 Vector
<RefPtr
<RegisterID
>, 16, UnsafeVectorOverflow
> argv
;
1542 for (ElementNode
* n
= elements
; n
; n
= n
->next()) {
1546 ASSERT(!n
->value()->isSpreadExpression());
1547 argv
.append(newTemporary());
1548 // op_new_array requires the initial values to be a sequential range of registers
1549 ASSERT(argv
.size() == 1 || argv
[argv
.size() - 1]->index() == argv
[argv
.size() - 2]->index() - 1);
1550 emitNode(argv
.last().get(), n
->value());
1553 emitOpcode(op_new_array
);
1554 instructions().append(dst
->index());
1555 instructions().append(argv
.size() ? argv
[0]->index() : 0); // argv
1556 instructions().append(argv
.size()); // argc
1557 instructions().append(newArrayAllocationProfile());
1561 RegisterID
* BytecodeGenerator::emitNewFunction(RegisterID
* dst
, CaptureMode captureMode
, FunctionBodyNode
* function
)
1563 return emitNewFunctionInternal(dst
, captureMode
, m_codeBlock
->addFunctionDecl(makeFunction(function
)), false);
1566 RegisterID
* BytecodeGenerator::emitLazyNewFunction(RegisterID
* dst
, FunctionBodyNode
* function
)
1568 FunctionOffsetMap::AddResult ptr
= m_functionOffsets
.add(function
, 0);
1570 ptr
.iterator
->value
= m_codeBlock
->addFunctionDecl(makeFunction(function
));
1571 return emitNewFunctionInternal(dst
, NotCaptured
, ptr
.iterator
->value
, true);
1574 RegisterID
* BytecodeGenerator::emitNewFunctionInternal(RegisterID
* dst
, CaptureMode captureMode
, unsigned index
, bool doNullCheck
)
1576 createActivationIfNecessary();
1577 emitOpcode(captureMode
== IsCaptured
? op_new_captured_func
: op_new_func
);
1578 instructions().append(dst
->index());
1579 instructions().append(index
);
1580 if (captureMode
== IsCaptured
) {
1581 ASSERT(!doNullCheck
);
1582 instructions().append(watchableVariable(dst
->index()));
1584 instructions().append(doNullCheck
);
1588 RegisterID
* BytecodeGenerator::emitNewRegExp(RegisterID
* dst
, RegExp
* regExp
)
1590 emitOpcode(op_new_regexp
);
1591 instructions().append(dst
->index());
1592 instructions().append(addRegExp(regExp
));
1596 RegisterID
* BytecodeGenerator::emitNewFunctionExpression(RegisterID
* r0
, FuncExprNode
* n
)
1598 FunctionBodyNode
* function
= n
->body();
1599 unsigned index
= m_codeBlock
->addFunctionExpr(makeFunction(function
));
1601 createActivationIfNecessary();
1602 emitOpcode(op_new_func_exp
);
1603 instructions().append(r0
->index());
1604 instructions().append(index
);
1608 RegisterID
* BytecodeGenerator::emitCall(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1610 return emitCall(op_call
, dst
, func
, expectedFunction
, callArguments
, divot
, divotStart
, divotEnd
);
1613 void BytecodeGenerator::createArgumentsIfNecessary()
1615 if (m_codeType
!= FunctionCode
)
1618 if (!m_codeBlock
->usesArguments())
1621 if (shouldTearOffArgumentsEagerly())
1624 emitOpcode(op_create_arguments
);
1625 instructions().append(m_codeBlock
->argumentsRegister().offset());
1626 ASSERT(!hasWatchableVariable(m_codeBlock
->argumentsRegister().offset()));
1629 void BytecodeGenerator::createActivationIfNecessary()
1631 if (!m_activationRegister
)
1633 emitOpcode(op_create_activation
);
1634 instructions().append(m_activationRegister
->index());
1637 RegisterID
* BytecodeGenerator::emitCallEval(RegisterID
* dst
, RegisterID
* func
, CallArguments
& callArguments
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1639 createActivationIfNecessary();
1640 return emitCall(op_call_eval
, dst
, func
, NoExpectedFunction
, callArguments
, divot
, divotStart
, divotEnd
);
1643 ExpectedFunction
BytecodeGenerator::expectedFunctionForIdentifier(const Identifier
& identifier
)
1645 if (identifier
== m_vm
->propertyNames
->Object
)
1646 return ExpectObjectConstructor
;
1647 if (identifier
== m_vm
->propertyNames
->Array
)
1648 return ExpectArrayConstructor
;
1649 return NoExpectedFunction
;
1652 ExpectedFunction
BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, Label
* done
)
1654 RefPtr
<Label
> realCall
= newLabel();
1655 switch (expectedFunction
) {
1656 case ExpectObjectConstructor
: {
1657 // If the number of arguments is non-zero, then we can't do anything interesting.
1658 if (callArguments
.argumentCountIncludingThis() >= 2)
1659 return NoExpectedFunction
;
1661 size_t begin
= instructions().size();
1662 emitOpcode(op_jneq_ptr
);
1663 instructions().append(func
->index());
1664 instructions().append(Special::ObjectConstructor
);
1665 instructions().append(realCall
->bind(begin
, instructions().size()));
1667 if (dst
!= ignoredResult())
1672 case ExpectArrayConstructor
: {
1673 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1674 // don't do inline it, for now. The only reason is that call arguments are in
1675 // the opposite order of what op_new_array expects, so we'd either need to change
1676 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1677 // things sounds like it's worth it.
1678 if (callArguments
.argumentCountIncludingThis() > 2)
1679 return NoExpectedFunction
;
1681 size_t begin
= instructions().size();
1682 emitOpcode(op_jneq_ptr
);
1683 instructions().append(func
->index());
1684 instructions().append(Special::ArrayConstructor
);
1685 instructions().append(realCall
->bind(begin
, instructions().size()));
1687 if (dst
!= ignoredResult()) {
1688 if (callArguments
.argumentCountIncludingThis() == 2) {
1689 emitOpcode(op_new_array_with_size
);
1690 instructions().append(dst
->index());
1691 instructions().append(callArguments
.argumentRegister(0)->index());
1692 instructions().append(newArrayAllocationProfile());
1694 ASSERT(callArguments
.argumentCountIncludingThis() == 1);
1695 emitOpcode(op_new_array
);
1696 instructions().append(dst
->index());
1697 instructions().append(0);
1698 instructions().append(0);
1699 instructions().append(newArrayAllocationProfile());
1706 ASSERT(expectedFunction
== NoExpectedFunction
);
1707 return NoExpectedFunction
;
1710 size_t begin
= instructions().size();
1712 instructions().append(done
->bind(begin
, instructions().size()));
1713 emitLabel(realCall
.get());
1715 return expectedFunction
;
1718 RegisterID
* BytecodeGenerator::emitCall(OpcodeID opcodeID
, RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1720 ASSERT(opcodeID
== op_call
|| opcodeID
== op_call_eval
);
1721 ASSERT(func
->refCount());
1723 if (m_shouldEmitProfileHooks
)
1724 emitMove(callArguments
.profileHookRegister(), func
);
1726 // Generate code for arguments.
1727 unsigned argument
= 0;
1728 if (callArguments
.argumentsNode()) {
1729 ArgumentListNode
* n
= callArguments
.argumentsNode()->m_listNode
;
1730 if (n
&& n
->m_expr
->isSpreadExpression()) {
1731 RELEASE_ASSERT(!n
->m_next
);
1732 auto expression
= static_cast<SpreadExpressionNode
*>(n
->m_expr
)->expression();
1733 RefPtr
<RegisterID
> argumentRegister
;
1734 if (expression
->isResolveNode() && willResolveToArguments(static_cast<ResolveNode
*>(expression
)->identifier()) && !symbolTable().slowArguments())
1735 argumentRegister
= uncheckedRegisterForArguments();
1737 argumentRegister
= expression
->emitBytecode(*this, callArguments
.argumentRegister(0));
1738 RefPtr
<RegisterID
> thisRegister
= emitMove(newTemporary(), callArguments
.thisRegister());
1739 return emitCallVarargs(dst
, func
, callArguments
.thisRegister(), argumentRegister
.get(), newTemporary(), 0, callArguments
.profileHookRegister(), divot
, divotStart
, divotEnd
);
1741 for (; n
; n
= n
->m_next
)
1742 emitNode(callArguments
.argumentRegister(argument
++), n
);
1745 // Reserve space for call frame.
1746 Vector
<RefPtr
<RegisterID
>, JSStack::CallFrameHeaderSize
, UnsafeVectorOverflow
> callFrame
;
1747 for (int i
= 0; i
< JSStack::CallFrameHeaderSize
; ++i
)
1748 callFrame
.append(newTemporary());
1750 if (m_shouldEmitProfileHooks
) {
1751 emitOpcode(op_profile_will_call
);
1752 instructions().append(callArguments
.profileHookRegister()->index());
1755 emitExpressionInfo(divot
, divotStart
, divotEnd
);
1757 RefPtr
<Label
> done
= newLabel();
1758 expectedFunction
= emitExpectedFunctionSnippet(dst
, func
, expectedFunction
, callArguments
, done
.get());
1761 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1762 UnlinkedValueProfile profile
= emitProfiledOpcode(opcodeID
);
1764 ASSERT(dst
!= ignoredResult());
1765 instructions().append(dst
->index());
1766 instructions().append(func
->index());
1767 instructions().append(callArguments
.argumentCountIncludingThis());
1768 instructions().append(callArguments
.stackOffset());
1769 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1770 instructions().append(0);
1771 instructions().append(arrayProfile
);
1772 instructions().append(profile
);
1774 if (expectedFunction
!= NoExpectedFunction
)
1775 emitLabel(done
.get());
1777 if (m_shouldEmitProfileHooks
) {
1778 emitOpcode(op_profile_did_call
);
1779 instructions().append(callArguments
.profileHookRegister()->index());
1785 RegisterID
* BytecodeGenerator::emitCallVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* arguments
, RegisterID
* firstFreeRegister
, int32_t firstVarArgOffset
, RegisterID
* profileHookRegister
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1787 return emitCallVarargs(op_call_varargs
, dst
, func
, thisRegister
, arguments
, firstFreeRegister
, firstVarArgOffset
, profileHookRegister
, divot
, divotStart
, divotEnd
);
1790 RegisterID
* BytecodeGenerator::emitConstructVarargs(RegisterID
* dst
, RegisterID
* func
, RegisterID
* arguments
, RegisterID
* firstFreeRegister
, int32_t firstVarArgOffset
, RegisterID
* profileHookRegister
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1792 return emitCallVarargs(op_construct_varargs
, dst
, func
, 0, arguments
, firstFreeRegister
, firstVarArgOffset
, profileHookRegister
, divot
, divotStart
, divotEnd
);
1795 RegisterID
* BytecodeGenerator::emitCallVarargs(OpcodeID opcode
, RegisterID
* dst
, RegisterID
* func
, RegisterID
* thisRegister
, RegisterID
* arguments
, RegisterID
* firstFreeRegister
, int32_t firstVarArgOffset
, RegisterID
* profileHookRegister
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1797 if (m_shouldEmitProfileHooks
) {
1798 emitMove(profileHookRegister
, func
);
1799 emitOpcode(op_profile_will_call
);
1800 instructions().append(profileHookRegister
->index());
1803 emitExpressionInfo(divot
, divotStart
, divotEnd
);
1806 UnlinkedArrayProfile arrayProfile
= newArrayProfile();
1807 UnlinkedValueProfile profile
= emitProfiledOpcode(opcode
);
1808 ASSERT(dst
!= ignoredResult());
1809 instructions().append(dst
->index());
1810 instructions().append(func
->index());
1811 instructions().append(thisRegister
? thisRegister
->index() : 0);
1812 instructions().append(arguments
->index());
1813 instructions().append(firstFreeRegister
->index());
1814 instructions().append(firstVarArgOffset
);
1815 instructions().append(arrayProfile
);
1816 instructions().append(profile
);
1817 if (m_shouldEmitProfileHooks
) {
1818 emitOpcode(op_profile_did_call
);
1819 instructions().append(profileHookRegister
->index());
1824 RegisterID
* BytecodeGenerator::emitReturn(RegisterID
* src
)
1826 if (m_activationRegister
) {
1827 emitOpcode(op_tear_off_activation
);
1828 instructions().append(m_activationRegister
->index());
1831 if (m_codeBlock
->usesArguments() && m_codeBlock
->numParameters() != 1 && !isStrictMode()) {
1832 emitOpcode(op_tear_off_arguments
);
1833 instructions().append(m_codeBlock
->argumentsRegister().offset());
1834 instructions().append(m_activationRegister
? m_activationRegister
->index() : emitLoad(0, JSValue())->index());
1837 // Constructors use op_ret_object_or_this to check the result is an
1838 // object, unless we can trivially determine the check is not
1839 // necessary (currently, if the return value is 'this').
1840 if (isConstructor() && (src
->index() != m_thisRegister
.index())) {
1841 emitOpcode(op_ret_object_or_this
);
1842 instructions().append(src
->index());
1843 instructions().append(m_thisRegister
.index());
1846 return emitUnaryNoDstOp(op_ret
, src
);
1849 RegisterID
* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID
, RegisterID
* src
)
1851 emitOpcode(opcodeID
);
1852 instructions().append(src
->index());
1856 RegisterID
* BytecodeGenerator::emitConstruct(RegisterID
* dst
, RegisterID
* func
, ExpectedFunction expectedFunction
, CallArguments
& callArguments
, const JSTextPosition
& divot
, const JSTextPosition
& divotStart
, const JSTextPosition
& divotEnd
)
1858 ASSERT(func
->refCount());
1860 if (m_shouldEmitProfileHooks
)
1861 emitMove(callArguments
.profileHookRegister(), func
);
1863 // Generate code for arguments.
1864 unsigned argument
= 0;
1865 if (ArgumentsNode
* argumentsNode
= callArguments
.argumentsNode()) {
1867 ArgumentListNode
* n
= callArguments
.argumentsNode()->m_listNode
;
1868 if (n
&& n
->m_expr
->isSpreadExpression()) {
1869 RELEASE_ASSERT(!n
->m_next
);
1870 auto expression
= static_cast<SpreadExpressionNode
*>(n
->m_expr
)->expression();
1871 RefPtr
<RegisterID
> argumentRegister
;
1872 if (expression
->isResolveNode() && willResolveToArguments(static_cast<ResolveNode
*>(expression
)->identifier()) && !symbolTable().slowArguments())
1873 argumentRegister
= uncheckedRegisterForArguments();
1875 argumentRegister
= expression
->emitBytecode(*this, callArguments
.argumentRegister(0));
1876 return emitConstructVarargs(dst
, func
, argumentRegister
.get(), newTemporary(), 0, callArguments
.profileHookRegister(), divot
, divotStart
, divotEnd
);
1879 for (ArgumentListNode
* n
= argumentsNode
->m_listNode
; n
; n
= n
->m_next
)
1880 emitNode(callArguments
.argumentRegister(argument
++), n
);
1883 if (m_shouldEmitProfileHooks
) {
1884 emitOpcode(op_profile_will_call
);
1885 instructions().append(callArguments
.profileHookRegister()->index());
1888 // Reserve space for call frame.
1889 Vector
<RefPtr
<RegisterID
>, JSStack::CallFrameHeaderSize
, UnsafeVectorOverflow
> callFrame
;
1890 for (int i
= 0; i
< JSStack::CallFrameHeaderSize
; ++i
)
1891 callFrame
.append(newTemporary());
1893 emitExpressionInfo(divot
, divotStart
, divotEnd
);
1895 RefPtr
<Label
> done
= newLabel();
1896 expectedFunction
= emitExpectedFunctionSnippet(dst
, func
, expectedFunction
, callArguments
, done
.get());
1898 UnlinkedValueProfile profile
= emitProfiledOpcode(op_construct
);
1899 ASSERT(dst
!= ignoredResult());
1900 instructions().append(dst
->index());
1901 instructions().append(func
->index());
1902 instructions().append(callArguments
.argumentCountIncludingThis());
1903 instructions().append(callArguments
.stackOffset());
1904 instructions().append(m_codeBlock
->addLLIntCallLinkInfo());
1905 instructions().append(0);
1906 instructions().append(0);
1907 instructions().append(profile
);
1909 if (expectedFunction
!= NoExpectedFunction
)
1910 emitLabel(done
.get());
1912 if (m_shouldEmitProfileHooks
) {
1913 emitOpcode(op_profile_did_call
);
1914 instructions().append(callArguments
.profileHookRegister()->index());
1920 RegisterID
* BytecodeGenerator::emitStrcat(RegisterID
* dst
, RegisterID
* src
, int count
)
1922 emitOpcode(op_strcat
);
1923 instructions().append(dst
->index());
1924 instructions().append(src
->index());
1925 instructions().append(count
);
1930 void BytecodeGenerator::emitToPrimitive(RegisterID
* dst
, RegisterID
* src
)
1932 emitOpcode(op_to_primitive
);
1933 instructions().append(dst
->index());
1934 instructions().append(src
->index());
1937 RegisterID
* BytecodeGenerator::emitPushWithScope(RegisterID
* scope
)
1939 ControlFlowContext context
;
1940 context
.isFinallyBlock
= false;
1941 m_scopeContextStack
.append(context
);
1942 m_localScopeDepth
++;
1944 createActivationIfNecessary();
1945 return emitUnaryNoDstOp(op_push_with_scope
, scope
);
1948 void BytecodeGenerator::emitPopScope()
1950 ASSERT(m_scopeContextStack
.size());
1951 ASSERT(!m_scopeContextStack
.last().isFinallyBlock
);
1953 emitOpcode(op_pop_scope
);
1955 m_scopeContextStack
.removeLast();
1956 m_localScopeDepth
--;
1959 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID
, unsigned line
, unsigned charOffset
, unsigned lineStart
)
1961 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1962 if (debugHookID
!= DidReachBreakpoint
)
1965 if (!m_shouldEmitDebugHooks
)
1968 JSTextPosition
divot(line
, charOffset
, lineStart
);
1969 emitExpressionInfo(divot
, divot
, divot
);
1970 emitOpcode(op_debug
);
1971 instructions().append(debugHookID
);
1972 instructions().append(false);
1975 void BytecodeGenerator::pushFinallyContext(StatementNode
* finallyBlock
)
1977 // Reclaim free label scopes.
1978 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
1979 m_labelScopes
.removeLast();
1981 ControlFlowContext scope
;
1982 scope
.isFinallyBlock
= true;
1983 FinallyContext context
= {
1985 static_cast<unsigned>(m_scopeContextStack
.size()),
1986 static_cast<unsigned>(m_switchContextStack
.size()),
1987 static_cast<unsigned>(m_forInContextStack
.size()),
1988 static_cast<unsigned>(m_tryContextStack
.size()),
1989 static_cast<unsigned>(m_labelScopes
.size()),
1993 scope
.finallyContext
= context
;
1994 m_scopeContextStack
.append(scope
);
1998 void BytecodeGenerator::popFinallyContext()
2000 ASSERT(m_scopeContextStack
.size());
2001 ASSERT(m_scopeContextStack
.last().isFinallyBlock
);
2002 ASSERT(m_finallyDepth
> 0);
2003 m_scopeContextStack
.removeLast();
2007 LabelScopePtr
BytecodeGenerator::breakTarget(const Identifier
& name
)
2009 // Reclaim free label scopes.
2011 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2012 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2013 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2014 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2015 // loop condition is a workaround.
2016 while (m_labelScopes
.size()) {
2017 if (m_labelScopes
.last().refCount())
2019 m_labelScopes
.removeLast();
2022 if (!m_labelScopes
.size())
2023 return LabelScopePtr::null();
2025 // We special-case the following, which is a syntax error in Firefox:
2028 if (name
.isEmpty()) {
2029 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2030 LabelScope
* scope
= &m_labelScopes
[i
];
2031 if (scope
->type() != LabelScope::NamedLabel
) {
2032 ASSERT(scope
->breakTarget());
2033 return LabelScopePtr(m_labelScopes
, i
);
2036 return LabelScopePtr::null();
2039 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2040 LabelScope
* scope
= &m_labelScopes
[i
];
2041 if (scope
->name() && *scope
->name() == name
) {
2042 ASSERT(scope
->breakTarget());
2043 return LabelScopePtr(m_labelScopes
, i
);
2046 return LabelScopePtr::null();
2049 LabelScopePtr
BytecodeGenerator::continueTarget(const Identifier
& name
)
2051 // Reclaim free label scopes.
2052 while (m_labelScopes
.size() && !m_labelScopes
.last().refCount())
2053 m_labelScopes
.removeLast();
2055 if (!m_labelScopes
.size())
2056 return LabelScopePtr::null();
2058 if (name
.isEmpty()) {
2059 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2060 LabelScope
* scope
= &m_labelScopes
[i
];
2061 if (scope
->type() == LabelScope::Loop
) {
2062 ASSERT(scope
->continueTarget());
2063 return LabelScopePtr(m_labelScopes
, i
);
2066 return LabelScopePtr::null();
2069 // Continue to the loop nested nearest to the label scope that matches
2071 LabelScopePtr result
= LabelScopePtr::null();
2072 for (int i
= m_labelScopes
.size() - 1; i
>= 0; --i
) {
2073 LabelScope
* scope
= &m_labelScopes
[i
];
2074 if (scope
->type() == LabelScope::Loop
) {
2075 ASSERT(scope
->continueTarget());
2076 result
= LabelScopePtr(m_labelScopes
, i
);
2078 if (scope
->name() && *scope
->name() == name
)
2079 return result
; // may be null.
2081 return LabelScopePtr::null();
2084 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext
* topScope
, ControlFlowContext
* bottomScope
)
2086 while (topScope
> bottomScope
) {
2087 // First we count the number of dynamic scopes we need to remove to get
2088 // to a finally block.
2089 int nNormalScopes
= 0;
2090 while (topScope
> bottomScope
) {
2091 if (topScope
->isFinallyBlock
)
2097 if (nNormalScopes
) {
2098 // We need to remove a number of dynamic scopes to get to the next
2100 while (nNormalScopes
--)
2101 emitOpcode(op_pop_scope
);
2103 // If topScope == bottomScope then there isn't a finally block left to emit.
2104 if (topScope
== bottomScope
)
2108 Vector
<ControlFlowContext
> savedScopeContextStack
;
2109 Vector
<SwitchInfo
> savedSwitchContextStack
;
2110 Vector
<ForInContext
> savedForInContextStack
;
2111 Vector
<TryContext
> poppedTryContexts
;
2112 LabelScopeStore savedLabelScopes
;
2113 while (topScope
> bottomScope
&& topScope
->isFinallyBlock
) {
2114 RefPtr
<Label
> beforeFinally
= emitLabel(newLabel().get());
2116 // Save the current state of the world while instating the state of the world
2117 // for the finally block.
2118 FinallyContext finallyContext
= topScope
->finallyContext
;
2119 bool flipScopes
= finallyContext
.scopeContextStackSize
!= m_scopeContextStack
.size();
2120 bool flipSwitches
= finallyContext
.switchContextStackSize
!= m_switchContextStack
.size();
2121 bool flipForIns
= finallyContext
.forInContextStackSize
!= m_forInContextStack
.size();
2122 bool flipTries
= finallyContext
.tryContextStackSize
!= m_tryContextStack
.size();
2123 bool flipLabelScopes
= finallyContext
.labelScopesSize
!= m_labelScopes
.size();
2124 int topScopeIndex
= -1;
2125 int bottomScopeIndex
= -1;
2127 topScopeIndex
= topScope
- m_scopeContextStack
.begin();
2128 bottomScopeIndex
= bottomScope
- m_scopeContextStack
.begin();
2129 savedScopeContextStack
= m_scopeContextStack
;
2130 m_scopeContextStack
.shrink(finallyContext
.scopeContextStackSize
);
2133 savedSwitchContextStack
= m_switchContextStack
;
2134 m_switchContextStack
.shrink(finallyContext
.switchContextStackSize
);
2137 savedForInContextStack
= m_forInContextStack
;
2138 m_forInContextStack
.shrink(finallyContext
.forInContextStackSize
);
2141 while (m_tryContextStack
.size() != finallyContext
.tryContextStackSize
) {
2142 ASSERT(m_tryContextStack
.size() > finallyContext
.tryContextStackSize
);
2143 TryContext context
= m_tryContextStack
.last();
2144 m_tryContextStack
.removeLast();
2146 range
.start
= context
.start
;
2147 range
.end
= beforeFinally
;
2148 range
.tryData
= context
.tryData
;
2149 m_tryRanges
.append(range
);
2150 poppedTryContexts
.append(context
);
2153 if (flipLabelScopes
) {
2154 savedLabelScopes
= m_labelScopes
;
2155 while (m_labelScopes
.size() > finallyContext
.labelScopesSize
)
2156 m_labelScopes
.removeLast();
2158 int savedFinallyDepth
= m_finallyDepth
;
2159 m_finallyDepth
= finallyContext
.finallyDepth
;
2160 int savedDynamicScopeDepth
= m_localScopeDepth
;
2161 m_localScopeDepth
= finallyContext
.dynamicScopeDepth
;
2163 // Emit the finally block.
2164 emitNode(finallyContext
.finallyBlock
);
2166 RefPtr
<Label
> afterFinally
= emitLabel(newLabel().get());
2168 // Restore the state of the world.
2170 m_scopeContextStack
= savedScopeContextStack
;
2171 topScope
= &m_scopeContextStack
[topScopeIndex
]; // assert it's within bounds
2172 bottomScope
= m_scopeContextStack
.begin() + bottomScopeIndex
; // don't assert, since it the index might be -1.
2175 m_switchContextStack
= savedSwitchContextStack
;
2177 m_forInContextStack
= savedForInContextStack
;
2179 ASSERT(m_tryContextStack
.size() == finallyContext
.tryContextStackSize
);
2180 for (unsigned i
= poppedTryContexts
.size(); i
--;) {
2181 TryContext context
= poppedTryContexts
[i
];
2182 context
.start
= afterFinally
;
2183 m_tryContextStack
.append(context
);
2185 poppedTryContexts
.clear();
2187 if (flipLabelScopes
)
2188 m_labelScopes
= savedLabelScopes
;
2189 m_finallyDepth
= savedFinallyDepth
;
2190 m_localScopeDepth
= savedDynamicScopeDepth
;
2197 void BytecodeGenerator::emitPopScopes(int targetScopeDepth
)
2199 ASSERT(scopeDepth() - targetScopeDepth
>= 0);
2201 size_t scopeDelta
= scopeDepth() - targetScopeDepth
;
2202 ASSERT(scopeDelta
<= m_scopeContextStack
.size());
2206 if (!m_finallyDepth
) {
2207 while (scopeDelta
--)
2208 emitOpcode(op_pop_scope
);
2212 emitComplexPopScopes(&m_scopeContextStack
.last(), &m_scopeContextStack
.last() - scopeDelta
);
2215 RegisterID
* BytecodeGenerator::emitGetPropertyNames(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, Label
* breakTarget
)
2217 size_t begin
= instructions().size();
2219 emitOpcode(op_get_pnames
);
2220 instructions().append(dst
->index());
2221 instructions().append(base
->index());
2222 instructions().append(i
->index());
2223 instructions().append(size
->index());
2224 instructions().append(breakTarget
->bind(begin
, instructions().size()));
2228 RegisterID
* BytecodeGenerator::emitNextPropertyName(RegisterID
* dst
, RegisterID
* base
, RegisterID
* i
, RegisterID
* size
, RegisterID
* iter
, Label
* target
)
2230 size_t begin
= instructions().size();
2232 emitOpcode(op_next_pname
);
2233 instructions().append(dst
->index());
2234 instructions().append(base
->index());
2235 instructions().append(i
->index());
2236 instructions().append(size
->index());
2237 instructions().append(iter
->index());
2238 instructions().append(target
->bind(begin
, instructions().size()));
2242 TryData
* BytecodeGenerator::pushTry(Label
* start
)
2245 tryData
.target
= newLabel();
2246 tryData
.targetScopeDepth
= UINT_MAX
;
2247 m_tryData
.append(tryData
);
2248 TryData
* result
= &m_tryData
.last();
2250 TryContext tryContext
;
2251 tryContext
.start
= start
;
2252 tryContext
.tryData
= result
;
2254 m_tryContextStack
.append(tryContext
);
2259 RegisterID
* BytecodeGenerator::popTryAndEmitCatch(TryData
* tryData
, RegisterID
* targetRegister
, Label
* end
)
2261 m_usesExceptions
= true;
2263 ASSERT_UNUSED(tryData
, m_tryContextStack
.last().tryData
== tryData
);
2266 tryRange
.start
= m_tryContextStack
.last().start
;
2268 tryRange
.tryData
= m_tryContextStack
.last().tryData
;
2269 m_tryRanges
.append(tryRange
);
2270 m_tryContextStack
.removeLast();
2272 emitLabel(tryRange
.tryData
->target
.get());
2273 tryRange
.tryData
->targetScopeDepth
= m_localScopeDepth
;
2275 emitOpcode(op_catch
);
2276 instructions().append(targetRegister
->index());
2277 return targetRegister
;
2280 void BytecodeGenerator::emitThrowReferenceError(const String
& message
)
2282 emitOpcode(op_throw_static_error
);
2283 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm
, message
)))->index());
2284 instructions().append(true);
2287 void BytecodeGenerator::emitPushFunctionNameScope(const Identifier
& property
, RegisterID
* value
, unsigned attributes
)
2289 emitOpcode(op_push_name_scope
);
2290 instructions().append(addConstant(property
));
2291 instructions().append(value
->index());
2292 instructions().append(attributes
);
2295 void BytecodeGenerator::emitPushCatchScope(const Identifier
& property
, RegisterID
* value
, unsigned attributes
)
2297 createActivationIfNecessary();
2299 ControlFlowContext context
;
2300 context
.isFinallyBlock
= false;
2301 m_scopeContextStack
.append(context
);
2302 m_localScopeDepth
++;
2304 emitOpcode(op_push_name_scope
);
2305 instructions().append(addConstant(property
));
2306 instructions().append(value
->index());
2307 instructions().append(attributes
);
2310 void BytecodeGenerator::beginSwitch(RegisterID
* scrutineeRegister
, SwitchInfo::SwitchType type
)
2312 SwitchInfo info
= { static_cast<uint32_t>(instructions().size()), type
};
2314 case SwitchInfo::SwitchImmediate
:
2315 emitOpcode(op_switch_imm
);
2317 case SwitchInfo::SwitchCharacter
:
2318 emitOpcode(op_switch_char
);
2320 case SwitchInfo::SwitchString
:
2321 emitOpcode(op_switch_string
);
2324 RELEASE_ASSERT_NOT_REACHED();
2327 instructions().append(0); // place holder for table index
2328 instructions().append(0); // place holder for default target
2329 instructions().append(scrutineeRegister
->index());
2330 m_switchContextStack
.append(info
);
2333 static int32_t keyForImmediateSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2336 ASSERT(node
->isNumber());
2337 double value
= static_cast<NumberNode
*>(node
)->value();
2338 int32_t key
= static_cast<int32_t>(value
);
2339 ASSERT(key
== value
);
2345 static int32_t keyForCharacterSwitch(ExpressionNode
* node
, int32_t min
, int32_t max
)
2348 ASSERT(node
->isString());
2349 StringImpl
* clause
= static_cast<StringNode
*>(node
)->value().impl();
2350 ASSERT(clause
->length() == 1);
2352 int32_t key
= (*clause
)[0];
2358 static void prepareJumpTableForSwitch(
2359 UnlinkedSimpleJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
,
2360 RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, int32_t min
, int32_t max
,
2361 int32_t (*keyGetter
)(ExpressionNode
*, int32_t min
, int32_t max
))
2363 jumpTable
.min
= min
;
2364 jumpTable
.branchOffsets
.resize(max
- min
+ 1);
2365 jumpTable
.branchOffsets
.fill(0);
2366 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2367 // We're emitting this after the clause labels should have been fixed, so
2368 // the labels should not be "forward" references
2369 ASSERT(!labels
[i
]->isForward());
2370 jumpTable
.add(keyGetter(nodes
[i
], min
, max
), labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2374 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable
& jumpTable
, int32_t switchAddress
, uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
)
2376 for (uint32_t i
= 0; i
< clauseCount
; ++i
) {
2377 // We're emitting this after the clause labels should have been fixed, so
2378 // the labels should not be "forward" references
2379 ASSERT(!labels
[i
]->isForward());
2381 ASSERT(nodes
[i
]->isString());
2382 StringImpl
* clause
= static_cast<StringNode
*>(nodes
[i
])->value().impl();
2383 jumpTable
.offsetTable
.add(clause
, labels
[i
]->bind(switchAddress
, switchAddress
+ 3));
2387 void BytecodeGenerator::endSwitch(uint32_t clauseCount
, RefPtr
<Label
>* labels
, ExpressionNode
** nodes
, Label
* defaultLabel
, int32_t min
, int32_t max
)
2389 SwitchInfo switchInfo
= m_switchContextStack
.last();
2390 m_switchContextStack
.removeLast();
2392 switch (switchInfo
.switchType
) {
2393 case SwitchInfo::SwitchImmediate
:
2394 case SwitchInfo::SwitchCharacter
: {
2395 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfSwitchJumpTables();
2396 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2398 UnlinkedSimpleJumpTable
& jumpTable
= m_codeBlock
->addSwitchJumpTable();
2399 prepareJumpTableForSwitch(
2400 jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
, min
, max
,
2401 switchInfo
.switchType
== SwitchInfo::SwitchImmediate
2402 ? keyForImmediateSwitch
2403 : keyForCharacterSwitch
);
2407 case SwitchInfo::SwitchString
: {
2408 instructions()[switchInfo
.bytecodeOffset
+ 1] = m_codeBlock
->numberOfStringSwitchJumpTables();
2409 instructions()[switchInfo
.bytecodeOffset
+ 2] = defaultLabel
->bind(switchInfo
.bytecodeOffset
, switchInfo
.bytecodeOffset
+ 3);
2411 UnlinkedStringJumpTable
& jumpTable
= m_codeBlock
->addStringSwitchJumpTable();
2412 prepareJumpTableForStringSwitch(jumpTable
, switchInfo
.bytecodeOffset
, clauseCount
, labels
, nodes
);
2417 RELEASE_ASSERT_NOT_REACHED();
2422 RegisterID
* BytecodeGenerator::emitThrowExpressionTooDeepException()
2424 // It would be nice to do an even better job of identifying exactly where the expression is.
2425 // And we could make the caller pass the node pointer in, if there was some way of getting
2426 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2427 // is still good enough to get us an accurate line number.
2428 m_expressionTooDeep
= true;
2429 return newTemporary();
2432 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction
)
2434 m_codeBlock
->setIsNumericCompareFunction(isNumericCompareFunction
);
2437 bool BytecodeGenerator::isArgumentNumber(const Identifier
& ident
, int argumentNumber
)
2439 RegisterID
* registerID
= local(ident
).get();
2440 if (!registerID
|| registerID
->index() >= 0)
2442 return registerID
->index() == CallFrame::argumentOffset(argumentNumber
);
2445 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2447 if (!isStrictMode())
2449 emitOpcode(op_throw_static_error
);
2450 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm
, StrictModeReadonlyPropertyWriteError
)))->index());
2451 instructions().append(false);
2454 void BytecodeGenerator::emitEnumeration(ThrowableExpressionData
* node
, ExpressionNode
* subjectNode
, const std::function
<void(BytecodeGenerator
&, RegisterID
*)>& callBack
)
2456 if (subjectNode
->isResolveNode()
2457 && willResolveToArguments(static_cast<ResolveNode
*>(subjectNode
)->identifier())
2458 && !symbolTable().slowArguments()) {
2459 RefPtr
<RegisterID
> index
= emitLoad(newTemporary(), jsNumber(0));
2461 LabelScopePtr scope
= newLabelScope(LabelScope::Loop
);
2462 RefPtr
<RegisterID
> value
= emitLoad(newTemporary(), jsUndefined());
2464 RefPtr
<Label
> loopCondition
= newLabel();
2465 RefPtr
<Label
> loopStart
= newLabel();
2466 emitJump(loopCondition
.get());
2467 emitLabel(loopStart
.get());
2469 emitGetArgumentByVal(value
.get(), uncheckedRegisterForArguments(), index
.get());
2470 callBack(*this, value
.get());
2472 emitLabel(scope
->continueTarget());
2473 emitInc(index
.get());
2474 emitLabel(loopCondition
.get());
2475 RefPtr
<RegisterID
> length
= emitGetArgumentsLength(newTemporary(), uncheckedRegisterForArguments());
2476 emitJumpIfTrue(emitEqualityOp(op_less
, newTemporary(), index
.get(), length
.get()), loopStart
.get());
2477 emitLabel(scope
->breakTarget());
2481 LabelScopePtr scope
= newLabelScope(LabelScope::Loop
);
2482 RefPtr
<RegisterID
> subject
= newTemporary();
2483 emitNode(subject
.get(), subjectNode
);
2484 RefPtr
<RegisterID
> iterator
= emitGetById(newTemporary(), subject
.get(), propertyNames().iteratorPrivateName
);
2486 CallArguments
args(*this, 0);
2487 emitMove(args
.thisRegister(), subject
.get());
2488 emitCall(iterator
.get(), iterator
.get(), NoExpectedFunction
, args
, node
->divot(), node
->divotStart(), node
->divotEnd());
2490 RefPtr
<RegisterID
> iteratorNext
= emitGetById(newTemporary(), iterator
.get(), propertyNames().iteratorNextPrivateName
);
2491 RefPtr
<RegisterID
> value
= newTemporary();
2492 emitLoad(value
.get(), jsUndefined());
2494 emitJump(scope
->continueTarget());
2496 RefPtr
<Label
> loopStart
= newLabel();
2497 emitLabel(loopStart
.get());
2499 callBack(*this, value
.get());
2500 emitLabel(scope
->continueTarget());
2501 CallArguments
nextArguments(*this, 0, 1);
2502 emitMove(nextArguments
.thisRegister(), iterator
.get());
2503 emitMove(nextArguments
.argumentRegister(0), value
.get());
2504 emitCall(value
.get(), iteratorNext
.get(), NoExpectedFunction
, nextArguments
, node
->divot(), node
->divotStart(), node
->divotEnd());
2505 RefPtr
<RegisterID
> result
= newTemporary();
2506 emitJumpIfFalse(emitEqualityOp(op_stricteq
, result
.get(), value
.get(), emitLoad(0, JSValue(vm()->iterationTerminator
.get()))), loopStart
.get());
2507 emitLabel(scope
->breakTarget());