]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecompiler/BytecodeGenerator.cpp
JavaScriptCore-7600.1.4.15.12.tar.gz
[apple/javascriptcore.git] / bytecompiler / BytecodeGenerator.cpp
1 /*
2 * Copyright (C) 2008, 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "Interpreter.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "JSNameScope.h"
38 #include "LowLevelInterpreter.h"
39 #include "JSCInlines.h"
40 #include "Options.h"
41 #include "StackAlignment.h"
42 #include "StrongInlines.h"
43 #include "UnlinkedCodeBlock.h"
44 #include "UnlinkedInstructionStream.h"
45 #include <wtf/StdLibExtras.h>
46 #include <wtf/text/WTFString.h>
47
48 using namespace std;
49
50 namespace JSC {
51
52 void Label::setLocation(unsigned location)
53 {
54 m_location = location;
55
56 unsigned size = m_unresolvedJumps.size();
57 for (unsigned i = 0; i < size; ++i)
58 m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
59 }
60
61 ParserError BytecodeGenerator::generate()
62 {
63 SamplingRegion samplingRegion("Bytecode Generation");
64
65 m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
66 for (size_t i = 0; i < m_deconstructedParameters.size(); i++) {
67 auto& entry = m_deconstructedParameters[i];
68 entry.second->bindValue(*this, entry.first.get());
69 }
70
71 m_scopeNode->emitBytecode(*this);
72
73 m_staticPropertyAnalyzer.kill();
74
75 for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
76 TryRange& range = m_tryRanges[i];
77 int start = range.start->bind();
78 int end = range.end->bind();
79
80 // This will happen for empty try blocks and for some cases of finally blocks:
81 //
82 // try {
83 // try {
84 // } finally {
85 // return 42;
86 // // *HERE*
87 // }
88 // } finally {
89 // print("things");
90 // }
91 //
92 // The return will pop scopes to execute the outer finally block. But this includes
93 // popping the try context for the inner try. The try context is live in the fall-through
94 // part of the finally block not because we will emit a handler that overlaps the finally,
95 // but because we haven't yet had a chance to plant the catch target. Then when we finish
96 // emitting code for the outer finally block, we repush the try contex, this time with a
97 // new start index. But that means that the start index for the try range corresponding
98 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
99 // than the end index of the try block. This is harmless since end < start handlers will
100 // never get matched in our logic, but we do the runtime a favor and choose to not emit
101 // such handlers at all.
102 if (end <= start)
103 continue;
104
105 ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
106 UnlinkedHandlerInfo info = {
107 static_cast<uint32_t>(start), static_cast<uint32_t>(end),
108 static_cast<uint32_t>(range.tryData->target->bind()),
109 range.tryData->targetScopeDepth
110 };
111 m_codeBlock->addExceptionHandler(info);
112 }
113
114 m_codeBlock->setInstructions(std::make_unique<UnlinkedInstructionStream>(m_instructions));
115
116 m_codeBlock->shrinkToFit();
117
118 if (m_codeBlock->symbolTable())
119 m_codeBlock->setSymbolTable(m_codeBlock->symbolTable()->cloneCapturedNames(*m_codeBlock->vm()));
120
121 if (m_expressionTooDeep)
122 return ParserError(ParserError::OutOfMemory);
123 return ParserError(ParserError::ErrorNone);
124 }
125
126 bool BytecodeGenerator::addVar(
127 const Identifier& ident, ConstantMode constantMode, WatchMode watchMode, RegisterID*& r0)
128 {
129 ASSERT(static_cast<size_t>(m_codeBlock->m_numVars) == m_calleeRegisters.size());
130
131 ConcurrentJITLocker locker(symbolTable().m_lock);
132 int index = virtualRegisterForLocal(m_calleeRegisters.size()).offset();
133 SymbolTableEntry newEntry(index, constantMode == IsConstant ? ReadOnly : 0);
134 SymbolTable::Map::AddResult result = symbolTable().add(locker, ident.impl(), newEntry);
135
136 if (!result.isNewEntry) {
137 r0 = &registerFor(result.iterator->value.getIndex());
138 return false;
139 }
140
141 if (watchMode == IsWatchable) {
142 while (m_watchableVariables.size() < static_cast<size_t>(m_codeBlock->m_numVars))
143 m_watchableVariables.append(Identifier());
144 m_watchableVariables.append(ident);
145 }
146
147 r0 = addVar();
148
149 ASSERT(watchMode == NotWatchable || static_cast<size_t>(m_codeBlock->m_numVars) == m_watchableVariables.size());
150
151 return true;
152 }
153
154 void BytecodeGenerator::preserveLastVar()
155 {
156 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
157 m_lastVar = &m_calleeRegisters.last();
158 }
159
160 BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
161 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
162 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
163 , m_symbolTable(0)
164 , m_scopeNode(programNode)
165 , m_codeBlock(vm, codeBlock)
166 , m_thisRegister(CallFrame::thisArgumentOffset())
167 , m_activationRegister(0)
168 , m_emptyValueRegister(0)
169 , m_globalObjectRegister(0)
170 , m_finallyDepth(0)
171 , m_localScopeDepth(0)
172 , m_codeType(GlobalCode)
173 , m_nextConstantOffset(0)
174 , m_globalConstantIndex(0)
175 , m_firstLazyFunction(0)
176 , m_lastLazyFunction(0)
177 , m_staticPropertyAnalyzer(&m_instructions)
178 , m_vm(&vm)
179 , m_lastOpcodeID(op_end)
180 #ifndef NDEBUG
181 , m_lastOpcodePosition(0)
182 #endif
183 , m_usesExceptions(false)
184 , m_expressionTooDeep(false)
185 , m_isBuiltinFunction(false)
186 {
187 m_codeBlock->setNumParameters(1); // Allocate space for "this"
188
189 emitOpcode(op_enter);
190
191 const VarStack& varStack = programNode->varStack();
192 const FunctionStack& functionStack = programNode->functionStack();
193
194 for (size_t i = 0; i < functionStack.size(); ++i) {
195 FunctionBodyNode* function = functionStack[i];
196 UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
197 codeBlock->addFunctionDeclaration(*m_vm, function->ident(), unlinkedFunction);
198 }
199
200 for (size_t i = 0; i < varStack.size(); ++i)
201 codeBlock->addVariableDeclaration(varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
202
203 }
204
205 BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
206 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
207 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
208 , m_symbolTable(codeBlock->symbolTable())
209 , m_scopeNode(functionBody)
210 , m_codeBlock(vm, codeBlock)
211 , m_activationRegister(0)
212 , m_emptyValueRegister(0)
213 , m_globalObjectRegister(0)
214 , m_finallyDepth(0)
215 , m_localScopeDepth(0)
216 , m_codeType(FunctionCode)
217 , m_nextConstantOffset(0)
218 , m_globalConstantIndex(0)
219 , m_firstLazyFunction(0)
220 , m_lastLazyFunction(0)
221 , m_staticPropertyAnalyzer(&m_instructions)
222 , m_vm(&vm)
223 , m_lastOpcodeID(op_end)
224 #ifndef NDEBUG
225 , m_lastOpcodePosition(0)
226 #endif
227 , m_usesExceptions(false)
228 , m_expressionTooDeep(false)
229 , m_isBuiltinFunction(codeBlock->isBuiltinFunction())
230 {
231 if (m_isBuiltinFunction)
232 m_shouldEmitDebugHooks = false;
233
234 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
235 Vector<Identifier> boundParameterProperties;
236 FunctionParameters& parameters = *functionBody->parameters();
237 for (size_t i = 0; i < parameters.size(); i++) {
238 auto pattern = parameters.at(i);
239 if (pattern->isBindingNode())
240 continue;
241 pattern->collectBoundIdentifiers(boundParameterProperties);
242 continue;
243 }
244 m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
245
246 emitOpcode(op_enter);
247 if (m_codeBlock->needsFullScopeChain() || m_shouldEmitDebugHooks) {
248 m_activationRegister = addVar();
249 emitInitLazyRegister(m_activationRegister);
250 m_codeBlock->setActivationRegister(m_activationRegister->virtualRegister());
251 }
252
253 m_symbolTable->setCaptureStart(virtualRegisterForLocal(m_codeBlock->m_numVars).offset());
254
255 if (functionBody->usesArguments() || codeBlock->usesEval()) { // May reify arguments object.
256 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
257 RegisterID* argumentsRegister = addVar(propertyNames().arguments, IsVariable, NotWatchable); // Can be changed by assigning to 'arguments'.
258
259 // We can save a little space by hard-coding the knowledge that the two
260 // 'arguments' values are stored in consecutive registers, and storing
261 // only the index of the assignable one.
262 codeBlock->setArgumentsRegister(argumentsRegister->virtualRegister());
263 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->virtualRegister() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
264
265 emitInitLazyRegister(argumentsRegister);
266 emitInitLazyRegister(unmodifiedArgumentsRegister);
267
268 if (shouldTearOffArgumentsEagerly()) {
269 emitOpcode(op_create_arguments);
270 instructions().append(argumentsRegister->index());
271 }
272 }
273
274 bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
275
276 bool capturesAnyArgumentByName = false;
277 Vector<RegisterID*, 0, UnsafeVectorOverflow> capturedArguments;
278 if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
279 FunctionParameters& parameters = *functionBody->parameters();
280 capturedArguments.resize(parameters.size());
281 for (size_t i = 0; i < parameters.size(); ++i) {
282 capturedArguments[i] = 0;
283 auto pattern = parameters.at(i);
284 if (!pattern->isBindingNode())
285 continue;
286 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
287 if (!functionBody->captures(ident) && !shouldCaptureAllTheThings)
288 continue;
289 capturesAnyArgumentByName = true;
290 capturedArguments[i] = addVar();
291 }
292 }
293
294 if (capturesAnyArgumentByName && !shouldTearOffArgumentsEagerly()) {
295 size_t parameterCount = m_symbolTable->parameterCount();
296 auto slowArguments = std::make_unique<SlowArgument[]>(parameterCount);
297 for (size_t i = 0; i < parameterCount; ++i) {
298 if (!capturedArguments[i]) {
299 ASSERT(slowArguments[i].status == SlowArgument::Normal);
300 slowArguments[i].index = CallFrame::argumentOffset(i);
301 continue;
302 }
303 slowArguments[i].status = SlowArgument::Captured;
304 slowArguments[i].index = capturedArguments[i]->index();
305 }
306 m_symbolTable->setSlowArguments(WTF::move(slowArguments));
307 }
308
309 RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
310
311 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
312 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
313 IdentifierSet test;
314
315 // Captured variables and functions go first so that activations don't have
316 // to step over the non-captured locals to mark them.
317 if (functionBody->hasCapturedVariables()) {
318 for (size_t i = 0; i < boundParameterProperties.size(); i++) {
319 const Identifier& ident = boundParameterProperties[i];
320 if (functionBody->captures(ident))
321 addVar(ident, IsVariable, IsWatchable);
322 }
323 for (size_t i = 0; i < functionStack.size(); ++i) {
324 FunctionBodyNode* function = functionStack[i];
325 const Identifier& ident = function->ident();
326 if (functionBody->captures(ident)) {
327 m_functions.add(ident.impl());
328 emitNewFunction(addVar(ident, IsVariable, IsWatchable), IsCaptured, function);
329 }
330 }
331 for (size_t i = 0; i < varStack.size(); ++i) {
332 const Identifier& ident = varStack[i].first;
333 if (functionBody->captures(ident))
334 addVar(ident, (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable, IsWatchable);
335 }
336 }
337
338 m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
339
340 bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
341 m_firstLazyFunction = codeBlock->m_numVars;
342 for (size_t i = 0; i < functionStack.size(); ++i) {
343 FunctionBodyNode* function = functionStack[i];
344 const Identifier& ident = function->ident();
345 if (!functionBody->captures(ident)) {
346 m_functions.add(ident.impl());
347 RefPtr<RegisterID> reg = addVar(ident, IsVariable, NotWatchable);
348 // Don't lazily create functions that override the name 'arguments'
349 // as this would complicate lazy instantiation of actual arguments.
350 if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
351 emitNewFunction(reg.get(), NotCaptured, function);
352 else {
353 emitInitLazyRegister(reg.get());
354 m_lazyFunctions.set(reg->virtualRegister().toLocal(), function);
355 }
356 }
357 }
358 m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
359 for (size_t i = 0; i < boundParameterProperties.size(); i++) {
360 const Identifier& ident = boundParameterProperties[i];
361 if (!functionBody->captures(ident))
362 addVar(ident, IsVariable, IsWatchable);
363 }
364 for (size_t i = 0; i < varStack.size(); ++i) {
365 const Identifier& ident = varStack[i].first;
366 if (!functionBody->captures(ident))
367 addVar(ident, (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable, NotWatchable);
368 }
369
370 if (shouldCaptureAllTheThings)
371 m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
372
373 if (m_symbolTable->captureCount())
374 emitOpcode(op_touch_entry);
375
376 m_parameters.grow(parameters.size() + 1); // reserve space for "this"
377
378 // Add "this" as a parameter
379 int nextParameterIndex = CallFrame::thisArgumentOffset();
380 m_thisRegister.setIndex(nextParameterIndex++);
381 m_codeBlock->addParameter();
382 for (size_t i = 0; i < parameters.size(); ++i, ++nextParameterIndex) {
383 int index = nextParameterIndex;
384 auto pattern = parameters.at(i);
385 if (!pattern->isBindingNode()) {
386 m_codeBlock->addParameter();
387 RegisterID& parameter = registerFor(index);
388 parameter.setIndex(index);
389 m_deconstructedParameters.append(std::make_pair(&parameter, pattern));
390 continue;
391 }
392 auto simpleParameter = static_cast<const BindingNode*>(pattern);
393 if (capturedArguments.size() && capturedArguments[i]) {
394 ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(simpleParameter->boundProperty())) || shouldCaptureAllTheThings);
395 index = capturedArguments[i]->index();
396 RegisterID original(nextParameterIndex);
397 emitMove(capturedArguments[i], &original);
398 }
399 addParameter(simpleParameter->boundProperty(), index);
400 }
401 preserveLastVar();
402
403 // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
404 addCallee(functionBody, calleeRegister);
405
406 if (isConstructor()) {
407 emitCreateThis(&m_thisRegister);
408 } else if (functionBody->usesThis() || codeBlock->usesEval()) {
409 m_codeBlock->addPropertyAccessInstruction(instructions().size());
410 emitOpcode(op_to_this);
411 instructions().append(kill(&m_thisRegister));
412 instructions().append(0);
413 }
414 }
415
416 BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
417 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
418 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
419 , m_symbolTable(codeBlock->symbolTable())
420 , m_scopeNode(evalNode)
421 , m_codeBlock(vm, codeBlock)
422 , m_thisRegister(CallFrame::thisArgumentOffset())
423 , m_activationRegister(0)
424 , m_emptyValueRegister(0)
425 , m_globalObjectRegister(0)
426 , m_finallyDepth(0)
427 , m_localScopeDepth(0)
428 , m_codeType(EvalCode)
429 , m_nextConstantOffset(0)
430 , m_globalConstantIndex(0)
431 , m_firstLazyFunction(0)
432 , m_lastLazyFunction(0)
433 , m_staticPropertyAnalyzer(&m_instructions)
434 , m_vm(&vm)
435 , m_lastOpcodeID(op_end)
436 #ifndef NDEBUG
437 , m_lastOpcodePosition(0)
438 #endif
439 , m_usesExceptions(false)
440 , m_expressionTooDeep(false)
441 , m_isBuiltinFunction(false)
442 {
443 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
444 m_codeBlock->setNumParameters(1);
445
446 emitOpcode(op_enter);
447
448 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
449 for (size_t i = 0; i < functionStack.size(); ++i)
450 m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
451
452 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
453 unsigned numVariables = varStack.size();
454 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
455 variables.reserveCapacity(numVariables);
456 for (size_t i = 0; i < numVariables; ++i) {
457 ASSERT(varStack[i].first.impl()->isAtomic());
458 variables.append(varStack[i].first);
459 }
460 codeBlock->adoptVariables(variables);
461 preserveLastVar();
462 }
463
464 BytecodeGenerator::~BytecodeGenerator()
465 {
466 }
467
468 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
469 {
470 emitOpcode(op_init_lazy_reg);
471 instructions().append(reg->index());
472 ASSERT(!hasWatchableVariable(reg->index()));
473 return reg;
474 }
475
476 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
477 {
478 if (!functionNameIsInScope(functionBodyNode->ident(), functionBodyNode->functionMode()))
479 return 0;
480
481 if (functionNameScopeIsDynamic(m_codeBlock->usesEval(), m_codeBlock->isStrictMode()))
482 return 0;
483
484 m_calleeRegister.setIndex(JSStack::Callee);
485 if (functionBodyNode->captures(functionBodyNode->ident()))
486 return emitMove(addVar(), IsCaptured, &m_calleeRegister);
487
488 return &m_calleeRegister;
489 }
490
491 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
492 {
493 if (!calleeRegister)
494 return;
495
496 symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
497 }
498
499 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
500 {
501 // Parameters overwrite var declarations, but not function declarations.
502 StringImpl* rep = ident.impl();
503 if (!m_functions.contains(rep)) {
504 symbolTable().set(rep, parameterIndex);
505 RegisterID& parameter = registerFor(parameterIndex);
506 parameter.setIndex(parameterIndex);
507 }
508
509 // To maintain the calling convention, we have to allocate unique space for
510 // each parameter, even if the parameter doesn't make it into the symbol table.
511 m_codeBlock->addParameter();
512 }
513
514 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
515 {
516 if (ident != propertyNames().arguments)
517 return false;
518
519 if (!shouldOptimizeLocals())
520 return false;
521
522 SymbolTableEntry entry = symbolTable().get(ident.impl());
523 if (entry.isNull())
524 return false;
525
526 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
527 return true;
528
529 return false;
530 }
531
532 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
533 {
534 ASSERT(willResolveToArguments(propertyNames().arguments));
535
536 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
537 ASSERT(!entry.isNull());
538 return &registerFor(entry.getIndex());
539 }
540
541 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
542 {
543 if (!reg->virtualRegister().isLocal())
544 return reg;
545
546 int localVariableNumber = reg->virtualRegister().toLocal();
547
548 if (m_lastLazyFunction <= localVariableNumber || localVariableNumber < m_firstLazyFunction)
549 return reg;
550 emitLazyNewFunction(reg, m_lazyFunctions.get(localVariableNumber));
551 return reg;
552 }
553
554 RegisterID* BytecodeGenerator::newRegister()
555 {
556 m_calleeRegisters.append(virtualRegisterForLocal(m_calleeRegisters.size()));
557 int numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
558 numCalleeRegisters = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeRegisters);
559 m_codeBlock->m_numCalleeRegisters = numCalleeRegisters;
560 return &m_calleeRegisters.last();
561 }
562
563 RegisterID* BytecodeGenerator::newTemporary()
564 {
565 // Reclaim free register IDs.
566 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
567 m_calleeRegisters.removeLast();
568
569 RegisterID* result = newRegister();
570 result->setTemporary();
571 return result;
572 }
573
574 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
575 {
576 // Reclaim free label scopes.
577 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
578 m_labelScopes.removeLast();
579
580 // Allocate new label scope.
581 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
582 m_labelScopes.append(scope);
583 return LabelScopePtr(m_labelScopes, m_labelScopes.size() - 1);
584 }
585
586 PassRefPtr<Label> BytecodeGenerator::newLabel()
587 {
588 // Reclaim free label IDs.
589 while (m_labels.size() && !m_labels.last().refCount())
590 m_labels.removeLast();
591
592 // Allocate new label ID.
593 m_labels.append(this);
594 return &m_labels.last();
595 }
596
597 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
598 {
599 unsigned newLabelIndex = instructions().size();
600 l0->setLocation(newLabelIndex);
601
602 if (m_codeBlock->numberOfJumpTargets()) {
603 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
604 ASSERT(lastLabelIndex <= newLabelIndex);
605 if (newLabelIndex == lastLabelIndex) {
606 // Peephole optimizations have already been disabled by emitting the last label
607 return l0;
608 }
609 }
610
611 m_codeBlock->addJumpTarget(newLabelIndex);
612
613 // This disables peephole optimizations when an instruction is a jump target
614 m_lastOpcodeID = op_end;
615 return l0;
616 }
617
618 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
619 {
620 #ifndef NDEBUG
621 size_t opcodePosition = instructions().size();
622 ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
623 m_lastOpcodePosition = opcodePosition;
624 #endif
625 instructions().append(opcodeID);
626 m_lastOpcodeID = opcodeID;
627 }
628
629 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
630 {
631 return m_codeBlock->addArrayProfile();
632 }
633
634 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
635 {
636 return m_codeBlock->addArrayAllocationProfile();
637 }
638
639 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
640 {
641 return m_codeBlock->addObjectAllocationProfile();
642 }
643
644 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
645 {
646 UnlinkedValueProfile result = m_codeBlock->addValueProfile();
647 emitOpcode(opcodeID);
648 return result;
649 }
650
651 void BytecodeGenerator::emitLoopHint()
652 {
653 emitOpcode(op_loop_hint);
654 }
655
656 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
657 {
658 ASSERT(instructions().size() >= 4);
659 size_t size = instructions().size();
660 dstIndex = instructions().at(size - 3).u.operand;
661 src1Index = instructions().at(size - 2).u.operand;
662 src2Index = instructions().at(size - 1).u.operand;
663 }
664
665 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
666 {
667 ASSERT(instructions().size() >= 3);
668 size_t size = instructions().size();
669 dstIndex = instructions().at(size - 2).u.operand;
670 srcIndex = instructions().at(size - 1).u.operand;
671 }
672
673 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
674 {
675 ASSERT(instructions().size() >= 4);
676 instructions().shrink(instructions().size() - 4);
677 m_lastOpcodeID = op_end;
678 }
679
680 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
681 {
682 ASSERT(instructions().size() >= 3);
683 instructions().shrink(instructions().size() - 3);
684 m_lastOpcodeID = op_end;
685 }
686
687 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
688 {
689 size_t begin = instructions().size();
690 emitOpcode(op_jmp);
691 instructions().append(target->bind(begin, instructions().size()));
692 return target;
693 }
694
695 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
696 {
697 if (m_lastOpcodeID == op_less) {
698 int dstIndex;
699 int src1Index;
700 int src2Index;
701
702 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
703
704 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
705 rewindBinaryOp();
706
707 size_t begin = instructions().size();
708 emitOpcode(op_jless);
709 instructions().append(src1Index);
710 instructions().append(src2Index);
711 instructions().append(target->bind(begin, instructions().size()));
712 return target;
713 }
714 } else if (m_lastOpcodeID == op_lesseq) {
715 int dstIndex;
716 int src1Index;
717 int src2Index;
718
719 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
720
721 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
722 rewindBinaryOp();
723
724 size_t begin = instructions().size();
725 emitOpcode(op_jlesseq);
726 instructions().append(src1Index);
727 instructions().append(src2Index);
728 instructions().append(target->bind(begin, instructions().size()));
729 return target;
730 }
731 } else if (m_lastOpcodeID == op_greater) {
732 int dstIndex;
733 int src1Index;
734 int src2Index;
735
736 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
737
738 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
739 rewindBinaryOp();
740
741 size_t begin = instructions().size();
742 emitOpcode(op_jgreater);
743 instructions().append(src1Index);
744 instructions().append(src2Index);
745 instructions().append(target->bind(begin, instructions().size()));
746 return target;
747 }
748 } else if (m_lastOpcodeID == op_greatereq) {
749 int dstIndex;
750 int src1Index;
751 int src2Index;
752
753 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
754
755 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
756 rewindBinaryOp();
757
758 size_t begin = instructions().size();
759 emitOpcode(op_jgreatereq);
760 instructions().append(src1Index);
761 instructions().append(src2Index);
762 instructions().append(target->bind(begin, instructions().size()));
763 return target;
764 }
765 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
766 int dstIndex;
767 int srcIndex;
768
769 retrieveLastUnaryOp(dstIndex, srcIndex);
770
771 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
772 rewindUnaryOp();
773
774 size_t begin = instructions().size();
775 emitOpcode(op_jeq_null);
776 instructions().append(srcIndex);
777 instructions().append(target->bind(begin, instructions().size()));
778 return target;
779 }
780 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
781 int dstIndex;
782 int srcIndex;
783
784 retrieveLastUnaryOp(dstIndex, srcIndex);
785
786 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
787 rewindUnaryOp();
788
789 size_t begin = instructions().size();
790 emitOpcode(op_jneq_null);
791 instructions().append(srcIndex);
792 instructions().append(target->bind(begin, instructions().size()));
793 return target;
794 }
795 }
796
797 size_t begin = instructions().size();
798
799 emitOpcode(op_jtrue);
800 instructions().append(cond->index());
801 instructions().append(target->bind(begin, instructions().size()));
802 return target;
803 }
804
805 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
806 {
807 if (m_lastOpcodeID == op_less && target->isForward()) {
808 int dstIndex;
809 int src1Index;
810 int src2Index;
811
812 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
813
814 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
815 rewindBinaryOp();
816
817 size_t begin = instructions().size();
818 emitOpcode(op_jnless);
819 instructions().append(src1Index);
820 instructions().append(src2Index);
821 instructions().append(target->bind(begin, instructions().size()));
822 return target;
823 }
824 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
825 int dstIndex;
826 int src1Index;
827 int src2Index;
828
829 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
830
831 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
832 rewindBinaryOp();
833
834 size_t begin = instructions().size();
835 emitOpcode(op_jnlesseq);
836 instructions().append(src1Index);
837 instructions().append(src2Index);
838 instructions().append(target->bind(begin, instructions().size()));
839 return target;
840 }
841 } else if (m_lastOpcodeID == op_greater && target->isForward()) {
842 int dstIndex;
843 int src1Index;
844 int src2Index;
845
846 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
847
848 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
849 rewindBinaryOp();
850
851 size_t begin = instructions().size();
852 emitOpcode(op_jngreater);
853 instructions().append(src1Index);
854 instructions().append(src2Index);
855 instructions().append(target->bind(begin, instructions().size()));
856 return target;
857 }
858 } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
859 int dstIndex;
860 int src1Index;
861 int src2Index;
862
863 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
864
865 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
866 rewindBinaryOp();
867
868 size_t begin = instructions().size();
869 emitOpcode(op_jngreatereq);
870 instructions().append(src1Index);
871 instructions().append(src2Index);
872 instructions().append(target->bind(begin, instructions().size()));
873 return target;
874 }
875 } else if (m_lastOpcodeID == op_not) {
876 int dstIndex;
877 int srcIndex;
878
879 retrieveLastUnaryOp(dstIndex, srcIndex);
880
881 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
882 rewindUnaryOp();
883
884 size_t begin = instructions().size();
885 emitOpcode(op_jtrue);
886 instructions().append(srcIndex);
887 instructions().append(target->bind(begin, instructions().size()));
888 return target;
889 }
890 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
891 int dstIndex;
892 int srcIndex;
893
894 retrieveLastUnaryOp(dstIndex, srcIndex);
895
896 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
897 rewindUnaryOp();
898
899 size_t begin = instructions().size();
900 emitOpcode(op_jneq_null);
901 instructions().append(srcIndex);
902 instructions().append(target->bind(begin, instructions().size()));
903 return target;
904 }
905 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
906 int dstIndex;
907 int srcIndex;
908
909 retrieveLastUnaryOp(dstIndex, srcIndex);
910
911 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
912 rewindUnaryOp();
913
914 size_t begin = instructions().size();
915 emitOpcode(op_jeq_null);
916 instructions().append(srcIndex);
917 instructions().append(target->bind(begin, instructions().size()));
918 return target;
919 }
920 }
921
922 size_t begin = instructions().size();
923 emitOpcode(op_jfalse);
924 instructions().append(cond->index());
925 instructions().append(target->bind(begin, instructions().size()));
926 return target;
927 }
928
929 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
930 {
931 size_t begin = instructions().size();
932
933 emitOpcode(op_jneq_ptr);
934 instructions().append(cond->index());
935 instructions().append(Special::CallFunction);
936 instructions().append(target->bind(begin, instructions().size()));
937 return target;
938 }
939
940 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
941 {
942 size_t begin = instructions().size();
943
944 emitOpcode(op_jneq_ptr);
945 instructions().append(cond->index());
946 instructions().append(Special::ApplyFunction);
947 instructions().append(target->bind(begin, instructions().size()));
948 return target;
949 }
950
951 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
952 {
953 StringImpl* rep = ident.impl();
954 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
955 if (result.isNewEntry)
956 m_codeBlock->addIdentifier(ident);
957
958 return result.iterator->value;
959 }
960
961 // We can't hash JSValue(), so we use a dedicated data member to cache it.
962 RegisterID* BytecodeGenerator::addConstantEmptyValue()
963 {
964 if (!m_emptyValueRegister) {
965 int index = m_nextConstantOffset;
966 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
967 ++m_nextConstantOffset;
968 m_codeBlock->addConstant(JSValue());
969 m_emptyValueRegister = &m_constantPoolRegisters[index];
970 }
971
972 return m_emptyValueRegister;
973 }
974
975 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
976 {
977 if (!v)
978 return addConstantEmptyValue();
979
980 int index = m_nextConstantOffset;
981 JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
982 if (result.isNewEntry) {
983 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
984 ++m_nextConstantOffset;
985 m_codeBlock->addConstant(v);
986 } else
987 index = result.iterator->value;
988 return &m_constantPoolRegisters[index];
989 }
990
991 unsigned BytecodeGenerator::addRegExp(RegExp* r)
992 {
993 return m_codeBlock->addRegExp(r);
994 }
995
996 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, CaptureMode captureMode, RegisterID* src)
997 {
998 m_staticPropertyAnalyzer.mov(dst->index(), src->index());
999
1000 emitOpcode(captureMode == IsCaptured ? op_captured_mov : op_mov);
1001 instructions().append(dst->index());
1002 instructions().append(src->index());
1003 if (captureMode == IsCaptured)
1004 instructions().append(watchableVariable(dst->index()));
1005 return dst;
1006 }
1007
1008 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1009 {
1010 return emitMove(dst, captureMode(dst->index()), src);
1011 }
1012
1013 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1014 {
1015 emitOpcode(opcodeID);
1016 instructions().append(dst->index());
1017 instructions().append(src->index());
1018 return dst;
1019 }
1020
1021 RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1022 {
1023 emitOpcode(op_inc);
1024 instructions().append(srcDst->index());
1025 return srcDst;
1026 }
1027
1028 RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1029 {
1030 emitOpcode(op_dec);
1031 instructions().append(srcDst->index());
1032 return srcDst;
1033 }
1034
1035 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1036 {
1037 emitOpcode(opcodeID);
1038 instructions().append(dst->index());
1039 instructions().append(src1->index());
1040 instructions().append(src2->index());
1041
1042 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1043 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1044 instructions().append(types.toInt());
1045
1046 return dst;
1047 }
1048
1049 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1050 {
1051 if (m_lastOpcodeID == op_typeof) {
1052 int dstIndex;
1053 int srcIndex;
1054
1055 retrieveLastUnaryOp(dstIndex, srcIndex);
1056
1057 if (src1->index() == dstIndex
1058 && src1->isTemporary()
1059 && m_codeBlock->isConstantRegisterIndex(src2->index())
1060 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1061 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1062 if (value == "undefined") {
1063 rewindUnaryOp();
1064 emitOpcode(op_is_undefined);
1065 instructions().append(dst->index());
1066 instructions().append(srcIndex);
1067 return dst;
1068 }
1069 if (value == "boolean") {
1070 rewindUnaryOp();
1071 emitOpcode(op_is_boolean);
1072 instructions().append(dst->index());
1073 instructions().append(srcIndex);
1074 return dst;
1075 }
1076 if (value == "number") {
1077 rewindUnaryOp();
1078 emitOpcode(op_is_number);
1079 instructions().append(dst->index());
1080 instructions().append(srcIndex);
1081 return dst;
1082 }
1083 if (value == "string") {
1084 rewindUnaryOp();
1085 emitOpcode(op_is_string);
1086 instructions().append(dst->index());
1087 instructions().append(srcIndex);
1088 return dst;
1089 }
1090 if (value == "object") {
1091 rewindUnaryOp();
1092 emitOpcode(op_is_object);
1093 instructions().append(dst->index());
1094 instructions().append(srcIndex);
1095 return dst;
1096 }
1097 if (value == "function") {
1098 rewindUnaryOp();
1099 emitOpcode(op_is_function);
1100 instructions().append(dst->index());
1101 instructions().append(srcIndex);
1102 return dst;
1103 }
1104 }
1105 }
1106
1107 emitOpcode(opcodeID);
1108 instructions().append(dst->index());
1109 instructions().append(src1->index());
1110 instructions().append(src2->index());
1111 return dst;
1112 }
1113
1114 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1115 {
1116 return emitLoad(dst, jsBoolean(b));
1117 }
1118
1119 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1120 {
1121 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1122 // Later we can do the extra work to handle that like the other cases. They also don't
1123 // work correctly with NaN as a key.
1124 if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1125 return emitLoad(dst, jsNumber(number));
1126 JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1127 if (!valueInMap)
1128 valueInMap = jsNumber(number);
1129 return emitLoad(dst, valueInMap);
1130 }
1131
1132 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1133 {
1134 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1135 if (!stringInMap)
1136 stringInMap = jsOwnedString(vm(), identifier.string());
1137 return emitLoad(dst, JSValue(stringInMap));
1138 }
1139
1140 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1141 {
1142 RegisterID* constantID = addConstantValue(v);
1143 if (dst)
1144 return emitMove(dst, constantID);
1145 return constantID;
1146 }
1147
1148 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1149 {
1150 if (!m_globalObjectRegister) {
1151 int index = m_nextConstantOffset;
1152 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1153 ++m_nextConstantOffset;
1154 m_codeBlock->addConstant(JSValue());
1155 m_globalObjectRegister = &m_constantPoolRegisters[index];
1156 m_codeBlock->setGlobalObjectRegister(VirtualRegister(index));
1157 }
1158 if (dst)
1159 emitMove(dst, m_globalObjectRegister);
1160 return m_globalObjectRegister;
1161 }
1162
1163 bool BytecodeGenerator::isCaptured(int operand)
1164 {
1165 return m_symbolTable && m_symbolTable->isCaptured(operand);
1166 }
1167
1168 Local BytecodeGenerator::local(const Identifier& property)
1169 {
1170 if (property == propertyNames().thisIdentifier)
1171 return Local(thisRegister(), ReadOnly, NotCaptured);
1172
1173 if (property == propertyNames().arguments)
1174 createArgumentsIfNecessary();
1175
1176 if (!shouldOptimizeLocals())
1177 return Local();
1178
1179 SymbolTableEntry entry = symbolTable().get(property.impl());
1180 if (entry.isNull())
1181 return Local();
1182
1183 RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1184 return Local(local, entry.getAttributes(), captureMode(local->index()));
1185 }
1186
1187 Local BytecodeGenerator::constLocal(const Identifier& property)
1188 {
1189 if (m_codeType != FunctionCode)
1190 return Local();
1191
1192 SymbolTableEntry entry = symbolTable().get(property.impl());
1193 if (entry.isNull())
1194 return Local();
1195
1196 RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1197 return Local(local, entry.getAttributes(), captureMode(local->index()));
1198 }
1199
1200 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1201 {
1202 size_t begin = instructions().size();
1203 emitOpcode(op_check_has_instance);
1204 instructions().append(dst->index());
1205 instructions().append(value->index());
1206 instructions().append(base->index());
1207 instructions().append(target->bind(begin, instructions().size()));
1208 }
1209
1210 // Indicates the least upper bound of resolve type based on local scope. The bytecode linker
1211 // will start with this ResolveType and compute the least upper bound including intercepting scopes.
1212 ResolveType BytecodeGenerator::resolveType()
1213 {
1214 if (m_localScopeDepth)
1215 return Dynamic;
1216 if (m_symbolTable && m_symbolTable->usesNonStrictEval())
1217 return GlobalPropertyWithVarInjectionChecks;
1218 return GlobalProperty;
1219 }
1220
1221 RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Identifier& identifier)
1222 {
1223 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1224
1225 ASSERT(!m_symbolTable || !m_symbolTable->contains(identifier.impl()) || resolveType() == Dynamic);
1226
1227 // resolve_scope dst, id, ResolveType, depth
1228 emitOpcode(op_resolve_scope);
1229 instructions().append(kill(dst));
1230 instructions().append(addConstant(identifier));
1231 instructions().append(resolveType());
1232 instructions().append(0);
1233 instructions().append(0);
1234 return dst;
1235 }
1236
1237 RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Identifier& identifier, ResolveMode resolveMode)
1238 {
1239 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1240
1241 // get_from_scope dst, scope, id, ResolveModeAndType, Structure, Operand
1242 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_from_scope);
1243 instructions().append(kill(dst));
1244 instructions().append(scope->index());
1245 instructions().append(addConstant(identifier));
1246 instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1247 instructions().append(0);
1248 instructions().append(0);
1249 instructions().append(profile);
1250 return dst;
1251 }
1252
1253 RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Identifier& identifier, RegisterID* value, ResolveMode resolveMode)
1254 {
1255 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1256
1257 // put_to_scope scope, id, value, ResolveModeAndType, Structure, Operand
1258 emitOpcode(op_put_to_scope);
1259 instructions().append(scope->index());
1260 instructions().append(addConstant(identifier));
1261 instructions().append(value->index());
1262 instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1263 instructions().append(0);
1264 instructions().append(0);
1265 return value;
1266 }
1267
1268 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1269 {
1270 emitOpcode(op_instanceof);
1271 instructions().append(dst->index());
1272 instructions().append(value->index());
1273 instructions().append(basePrototype->index());
1274 return dst;
1275 }
1276
1277 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1278 {
1279 ASSERT(m_codeType == GlobalCode);
1280 emitOpcode(op_init_global_const_nop);
1281 instructions().append(0);
1282 instructions().append(value->index());
1283 instructions().append(0);
1284 instructions().append(addConstant(identifier));
1285 return value;
1286 }
1287
1288 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1289 {
1290 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1291
1292 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1293 instructions().append(kill(dst));
1294 instructions().append(base->index());
1295 instructions().append(addConstant(property));
1296 instructions().append(0);
1297 instructions().append(0);
1298 instructions().append(0);
1299 instructions().append(0);
1300 instructions().append(profile);
1301 return dst;
1302 }
1303
1304 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1305 {
1306 emitOpcode(op_get_arguments_length);
1307 instructions().append(dst->index());
1308 ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1309 instructions().append(base->index());
1310 instructions().append(addConstant(propertyNames().length));
1311 return dst;
1312 }
1313
1314 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1315 {
1316 unsigned propertyIndex = addConstant(property);
1317
1318 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1319
1320 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1321
1322 emitOpcode(op_put_by_id);
1323 instructions().append(base->index());
1324 instructions().append(propertyIndex);
1325 instructions().append(value->index());
1326 instructions().append(0);
1327 instructions().append(0);
1328 instructions().append(0);
1329 instructions().append(0);
1330 instructions().append(0);
1331 return value;
1332 }
1333
1334 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1335 {
1336 unsigned propertyIndex = addConstant(property);
1337
1338 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1339
1340 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1341
1342 emitOpcode(op_put_by_id);
1343 instructions().append(base->index());
1344 instructions().append(propertyIndex);
1345 instructions().append(value->index());
1346 instructions().append(0);
1347 instructions().append(0);
1348 instructions().append(0);
1349 instructions().append(0);
1350 instructions().append(
1351 property != m_vm->propertyNames->underscoreProto
1352 && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1353 return value;
1354 }
1355
1356 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1357 {
1358 unsigned propertyIndex = addConstant(property);
1359
1360 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1361
1362 emitOpcode(op_put_getter_setter);
1363 instructions().append(base->index());
1364 instructions().append(propertyIndex);
1365 instructions().append(getter->index());
1366 instructions().append(setter->index());
1367 }
1368
1369 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1370 {
1371 emitOpcode(op_del_by_id);
1372 instructions().append(dst->index());
1373 instructions().append(base->index());
1374 instructions().append(addConstant(property));
1375 return dst;
1376 }
1377
1378 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1379 {
1380 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1381 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1382 instructions().append(kill(dst));
1383 ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1384 instructions().append(base->index());
1385 instructions().append(property->index());
1386 instructions().append(arrayProfile);
1387 instructions().append(profile);
1388 return dst;
1389 }
1390
1391 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1392 {
1393 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1394 ForInContext& context = m_forInContextStack[i - 1];
1395 if (context.propertyRegister == property) {
1396 emitOpcode(op_get_by_pname);
1397 instructions().append(dst->index());
1398 instructions().append(base->index());
1399 instructions().append(property->index());
1400 instructions().append(context.expectedSubscriptRegister->index());
1401 instructions().append(context.iterRegister->index());
1402 instructions().append(context.indexRegister->index());
1403 return dst;
1404 }
1405 }
1406 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1407 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1408 instructions().append(kill(dst));
1409 instructions().append(base->index());
1410 instructions().append(property->index());
1411 instructions().append(arrayProfile);
1412 instructions().append(profile);
1413 return dst;
1414 }
1415
1416 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1417 {
1418 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1419 if (m_isBuiltinFunction)
1420 emitOpcode(op_put_by_val_direct);
1421 else
1422 emitOpcode(op_put_by_val);
1423 instructions().append(base->index());
1424 instructions().append(property->index());
1425 instructions().append(value->index());
1426 instructions().append(arrayProfile);
1427 return value;
1428 }
1429
1430 RegisterID* BytecodeGenerator::emitDirectPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1431 {
1432 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1433 emitOpcode(op_put_by_val_direct);
1434 instructions().append(base->index());
1435 instructions().append(property->index());
1436 instructions().append(value->index());
1437 instructions().append(arrayProfile);
1438 return value;
1439 }
1440
1441 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1442 {
1443 emitOpcode(op_del_by_val);
1444 instructions().append(dst->index());
1445 instructions().append(base->index());
1446 instructions().append(property->index());
1447 return dst;
1448 }
1449
1450 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1451 {
1452 emitOpcode(op_put_by_index);
1453 instructions().append(base->index());
1454 instructions().append(index);
1455 instructions().append(value->index());
1456 return value;
1457 }
1458
1459 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1460 {
1461 RefPtr<RegisterID> func = newTemporary();
1462
1463 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1464 emitOpcode(op_get_callee);
1465 instructions().append(func->index());
1466 instructions().append(0);
1467
1468 size_t begin = instructions().size();
1469 m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1470
1471 emitOpcode(op_create_this);
1472 instructions().append(m_thisRegister.index());
1473 instructions().append(func->index());
1474 instructions().append(0);
1475 return dst;
1476 }
1477
1478 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1479 {
1480 size_t begin = instructions().size();
1481 m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1482
1483 emitOpcode(op_new_object);
1484 instructions().append(dst->index());
1485 instructions().append(0);
1486 instructions().append(newObjectAllocationProfile());
1487 return dst;
1488 }
1489
1490 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1491 {
1492 return m_codeBlock->addConstantBuffer(length);
1493 }
1494
1495 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1496 {
1497 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1498 if (!stringInMap) {
1499 stringInMap = jsString(vm(), identifier.string());
1500 addConstantValue(stringInMap);
1501 }
1502 return stringInMap;
1503 }
1504
1505 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1506 {
1507 #if !ASSERT_DISABLED
1508 unsigned checkLength = 0;
1509 #endif
1510 bool hadVariableExpression = false;
1511 if (length) {
1512 for (ElementNode* n = elements; n; n = n->next()) {
1513 if (!n->value()->isConstant()) {
1514 hadVariableExpression = true;
1515 break;
1516 }
1517 if (n->elision())
1518 break;
1519 #if !ASSERT_DISABLED
1520 checkLength++;
1521 #endif
1522 }
1523 if (!hadVariableExpression) {
1524 ASSERT(length == checkLength);
1525 unsigned constantBufferIndex = addConstantBuffer(length);
1526 JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1527 unsigned index = 0;
1528 for (ElementNode* n = elements; index < length; n = n->next()) {
1529 ASSERT(n->value()->isConstant());
1530 constantBuffer[index++] = static_cast<ConstantNode*>(n->value())->jsValue(*this);
1531 }
1532 emitOpcode(op_new_array_buffer);
1533 instructions().append(dst->index());
1534 instructions().append(constantBufferIndex);
1535 instructions().append(length);
1536 instructions().append(newArrayAllocationProfile());
1537 return dst;
1538 }
1539 }
1540
1541 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
1542 for (ElementNode* n = elements; n; n = n->next()) {
1543 if (!length)
1544 break;
1545 length--;
1546 ASSERT(!n->value()->isSpreadExpression());
1547 argv.append(newTemporary());
1548 // op_new_array requires the initial values to be a sequential range of registers
1549 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
1550 emitNode(argv.last().get(), n->value());
1551 }
1552 ASSERT(!length);
1553 emitOpcode(op_new_array);
1554 instructions().append(dst->index());
1555 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1556 instructions().append(argv.size()); // argc
1557 instructions().append(newArrayAllocationProfile());
1558 return dst;
1559 }
1560
1561 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, CaptureMode captureMode, FunctionBodyNode* function)
1562 {
1563 return emitNewFunctionInternal(dst, captureMode, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1564 }
1565
1566 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1567 {
1568 FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1569 if (ptr.isNewEntry)
1570 ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1571 return emitNewFunctionInternal(dst, NotCaptured, ptr.iterator->value, true);
1572 }
1573
1574 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, CaptureMode captureMode, unsigned index, bool doNullCheck)
1575 {
1576 createActivationIfNecessary();
1577 emitOpcode(captureMode == IsCaptured ? op_new_captured_func : op_new_func);
1578 instructions().append(dst->index());
1579 instructions().append(index);
1580 if (captureMode == IsCaptured) {
1581 ASSERT(!doNullCheck);
1582 instructions().append(watchableVariable(dst->index()));
1583 } else
1584 instructions().append(doNullCheck);
1585 return dst;
1586 }
1587
1588 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1589 {
1590 emitOpcode(op_new_regexp);
1591 instructions().append(dst->index());
1592 instructions().append(addRegExp(regExp));
1593 return dst;
1594 }
1595
1596 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1597 {
1598 FunctionBodyNode* function = n->body();
1599 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1600
1601 createActivationIfNecessary();
1602 emitOpcode(op_new_func_exp);
1603 instructions().append(r0->index());
1604 instructions().append(index);
1605 return r0;
1606 }
1607
1608 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1609 {
1610 return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd);
1611 }
1612
1613 void BytecodeGenerator::createArgumentsIfNecessary()
1614 {
1615 if (m_codeType != FunctionCode)
1616 return;
1617
1618 if (!m_codeBlock->usesArguments())
1619 return;
1620
1621 if (shouldTearOffArgumentsEagerly())
1622 return;
1623
1624 emitOpcode(op_create_arguments);
1625 instructions().append(m_codeBlock->argumentsRegister().offset());
1626 ASSERT(!hasWatchableVariable(m_codeBlock->argumentsRegister().offset()));
1627 }
1628
1629 void BytecodeGenerator::createActivationIfNecessary()
1630 {
1631 if (!m_activationRegister)
1632 return;
1633 emitOpcode(op_create_activation);
1634 instructions().append(m_activationRegister->index());
1635 }
1636
1637 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1638 {
1639 createActivationIfNecessary();
1640 return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd);
1641 }
1642
1643 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1644 {
1645 if (identifier == m_vm->propertyNames->Object)
1646 return ExpectObjectConstructor;
1647 if (identifier == m_vm->propertyNames->Array)
1648 return ExpectArrayConstructor;
1649 return NoExpectedFunction;
1650 }
1651
1652 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1653 {
1654 RefPtr<Label> realCall = newLabel();
1655 switch (expectedFunction) {
1656 case ExpectObjectConstructor: {
1657 // If the number of arguments is non-zero, then we can't do anything interesting.
1658 if (callArguments.argumentCountIncludingThis() >= 2)
1659 return NoExpectedFunction;
1660
1661 size_t begin = instructions().size();
1662 emitOpcode(op_jneq_ptr);
1663 instructions().append(func->index());
1664 instructions().append(Special::ObjectConstructor);
1665 instructions().append(realCall->bind(begin, instructions().size()));
1666
1667 if (dst != ignoredResult())
1668 emitNewObject(dst);
1669 break;
1670 }
1671
1672 case ExpectArrayConstructor: {
1673 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1674 // don't do inline it, for now. The only reason is that call arguments are in
1675 // the opposite order of what op_new_array expects, so we'd either need to change
1676 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1677 // things sounds like it's worth it.
1678 if (callArguments.argumentCountIncludingThis() > 2)
1679 return NoExpectedFunction;
1680
1681 size_t begin = instructions().size();
1682 emitOpcode(op_jneq_ptr);
1683 instructions().append(func->index());
1684 instructions().append(Special::ArrayConstructor);
1685 instructions().append(realCall->bind(begin, instructions().size()));
1686
1687 if (dst != ignoredResult()) {
1688 if (callArguments.argumentCountIncludingThis() == 2) {
1689 emitOpcode(op_new_array_with_size);
1690 instructions().append(dst->index());
1691 instructions().append(callArguments.argumentRegister(0)->index());
1692 instructions().append(newArrayAllocationProfile());
1693 } else {
1694 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1695 emitOpcode(op_new_array);
1696 instructions().append(dst->index());
1697 instructions().append(0);
1698 instructions().append(0);
1699 instructions().append(newArrayAllocationProfile());
1700 }
1701 }
1702 break;
1703 }
1704
1705 default:
1706 ASSERT(expectedFunction == NoExpectedFunction);
1707 return NoExpectedFunction;
1708 }
1709
1710 size_t begin = instructions().size();
1711 emitOpcode(op_jmp);
1712 instructions().append(done->bind(begin, instructions().size()));
1713 emitLabel(realCall.get());
1714
1715 return expectedFunction;
1716 }
1717
1718 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1719 {
1720 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1721 ASSERT(func->refCount());
1722
1723 if (m_shouldEmitProfileHooks)
1724 emitMove(callArguments.profileHookRegister(), func);
1725
1726 // Generate code for arguments.
1727 unsigned argument = 0;
1728 if (callArguments.argumentsNode()) {
1729 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
1730 if (n && n->m_expr->isSpreadExpression()) {
1731 RELEASE_ASSERT(!n->m_next);
1732 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
1733 RefPtr<RegisterID> argumentRegister;
1734 if (expression->isResolveNode() && willResolveToArguments(static_cast<ResolveNode*>(expression)->identifier()) && !symbolTable().slowArguments())
1735 argumentRegister = uncheckedRegisterForArguments();
1736 else
1737 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
1738 RefPtr<RegisterID> thisRegister = emitMove(newTemporary(), callArguments.thisRegister());
1739 return emitCallVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, callArguments.profileHookRegister(), divot, divotStart, divotEnd);
1740 }
1741 for (; n; n = n->m_next)
1742 emitNode(callArguments.argumentRegister(argument++), n);
1743 }
1744
1745 // Reserve space for call frame.
1746 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1747 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1748 callFrame.append(newTemporary());
1749
1750 if (m_shouldEmitProfileHooks) {
1751 emitOpcode(op_profile_will_call);
1752 instructions().append(callArguments.profileHookRegister()->index());
1753 }
1754
1755 emitExpressionInfo(divot, divotStart, divotEnd);
1756
1757 RefPtr<Label> done = newLabel();
1758 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1759
1760 // Emit call.
1761 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1762 UnlinkedValueProfile profile = emitProfiledOpcode(opcodeID);
1763 ASSERT(dst);
1764 ASSERT(dst != ignoredResult());
1765 instructions().append(dst->index());
1766 instructions().append(func->index());
1767 instructions().append(callArguments.argumentCountIncludingThis());
1768 instructions().append(callArguments.stackOffset());
1769 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1770 instructions().append(0);
1771 instructions().append(arrayProfile);
1772 instructions().append(profile);
1773
1774 if (expectedFunction != NoExpectedFunction)
1775 emitLabel(done.get());
1776
1777 if (m_shouldEmitProfileHooks) {
1778 emitOpcode(op_profile_did_call);
1779 instructions().append(callArguments.profileHookRegister()->index());
1780 }
1781
1782 return dst;
1783 }
1784
1785 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1786 {
1787 return emitCallVarargs(op_call_varargs, dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, profileHookRegister, divot, divotStart, divotEnd);
1788 }
1789
1790 RegisterID* BytecodeGenerator::emitConstructVarargs(RegisterID* dst, RegisterID* func, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1791 {
1792 return emitCallVarargs(op_construct_varargs, dst, func, 0, arguments, firstFreeRegister, firstVarArgOffset, profileHookRegister, divot, divotStart, divotEnd);
1793 }
1794
1795 RegisterID* BytecodeGenerator::emitCallVarargs(OpcodeID opcode, RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1796 {
1797 if (m_shouldEmitProfileHooks) {
1798 emitMove(profileHookRegister, func);
1799 emitOpcode(op_profile_will_call);
1800 instructions().append(profileHookRegister->index());
1801 }
1802
1803 emitExpressionInfo(divot, divotStart, divotEnd);
1804
1805 // Emit call.
1806 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1807 UnlinkedValueProfile profile = emitProfiledOpcode(opcode);
1808 ASSERT(dst != ignoredResult());
1809 instructions().append(dst->index());
1810 instructions().append(func->index());
1811 instructions().append(thisRegister ? thisRegister->index() : 0);
1812 instructions().append(arguments->index());
1813 instructions().append(firstFreeRegister->index());
1814 instructions().append(firstVarArgOffset);
1815 instructions().append(arrayProfile);
1816 instructions().append(profile);
1817 if (m_shouldEmitProfileHooks) {
1818 emitOpcode(op_profile_did_call);
1819 instructions().append(profileHookRegister->index());
1820 }
1821 return dst;
1822 }
1823
1824 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1825 {
1826 if (m_activationRegister) {
1827 emitOpcode(op_tear_off_activation);
1828 instructions().append(m_activationRegister->index());
1829 }
1830
1831 if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !isStrictMode()) {
1832 emitOpcode(op_tear_off_arguments);
1833 instructions().append(m_codeBlock->argumentsRegister().offset());
1834 instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1835 }
1836
1837 // Constructors use op_ret_object_or_this to check the result is an
1838 // object, unless we can trivially determine the check is not
1839 // necessary (currently, if the return value is 'this').
1840 if (isConstructor() && (src->index() != m_thisRegister.index())) {
1841 emitOpcode(op_ret_object_or_this);
1842 instructions().append(src->index());
1843 instructions().append(m_thisRegister.index());
1844 return src;
1845 }
1846 return emitUnaryNoDstOp(op_ret, src);
1847 }
1848
1849 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1850 {
1851 emitOpcode(opcodeID);
1852 instructions().append(src->index());
1853 return src;
1854 }
1855
1856 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1857 {
1858 ASSERT(func->refCount());
1859
1860 if (m_shouldEmitProfileHooks)
1861 emitMove(callArguments.profileHookRegister(), func);
1862
1863 // Generate code for arguments.
1864 unsigned argument = 0;
1865 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1866
1867 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
1868 if (n && n->m_expr->isSpreadExpression()) {
1869 RELEASE_ASSERT(!n->m_next);
1870 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
1871 RefPtr<RegisterID> argumentRegister;
1872 if (expression->isResolveNode() && willResolveToArguments(static_cast<ResolveNode*>(expression)->identifier()) && !symbolTable().slowArguments())
1873 argumentRegister = uncheckedRegisterForArguments();
1874 else
1875 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
1876 return emitConstructVarargs(dst, func, argumentRegister.get(), newTemporary(), 0, callArguments.profileHookRegister(), divot, divotStart, divotEnd);
1877 }
1878
1879 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1880 emitNode(callArguments.argumentRegister(argument++), n);
1881 }
1882
1883 if (m_shouldEmitProfileHooks) {
1884 emitOpcode(op_profile_will_call);
1885 instructions().append(callArguments.profileHookRegister()->index());
1886 }
1887
1888 // Reserve space for call frame.
1889 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1890 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1891 callFrame.append(newTemporary());
1892
1893 emitExpressionInfo(divot, divotStart, divotEnd);
1894
1895 RefPtr<Label> done = newLabel();
1896 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1897
1898 UnlinkedValueProfile profile = emitProfiledOpcode(op_construct);
1899 ASSERT(dst != ignoredResult());
1900 instructions().append(dst->index());
1901 instructions().append(func->index());
1902 instructions().append(callArguments.argumentCountIncludingThis());
1903 instructions().append(callArguments.stackOffset());
1904 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1905 instructions().append(0);
1906 instructions().append(0);
1907 instructions().append(profile);
1908
1909 if (expectedFunction != NoExpectedFunction)
1910 emitLabel(done.get());
1911
1912 if (m_shouldEmitProfileHooks) {
1913 emitOpcode(op_profile_did_call);
1914 instructions().append(callArguments.profileHookRegister()->index());
1915 }
1916
1917 return dst;
1918 }
1919
1920 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1921 {
1922 emitOpcode(op_strcat);
1923 instructions().append(dst->index());
1924 instructions().append(src->index());
1925 instructions().append(count);
1926
1927 return dst;
1928 }
1929
1930 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1931 {
1932 emitOpcode(op_to_primitive);
1933 instructions().append(dst->index());
1934 instructions().append(src->index());
1935 }
1936
1937 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
1938 {
1939 ControlFlowContext context;
1940 context.isFinallyBlock = false;
1941 m_scopeContextStack.append(context);
1942 m_localScopeDepth++;
1943
1944 createActivationIfNecessary();
1945 return emitUnaryNoDstOp(op_push_with_scope, scope);
1946 }
1947
1948 void BytecodeGenerator::emitPopScope()
1949 {
1950 ASSERT(m_scopeContextStack.size());
1951 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1952
1953 emitOpcode(op_pop_scope);
1954
1955 m_scopeContextStack.removeLast();
1956 m_localScopeDepth--;
1957 }
1958
1959 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, unsigned line, unsigned charOffset, unsigned lineStart)
1960 {
1961 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1962 if (debugHookID != DidReachBreakpoint)
1963 return;
1964 #else
1965 if (!m_shouldEmitDebugHooks)
1966 return;
1967 #endif
1968 JSTextPosition divot(line, charOffset, lineStart);
1969 emitExpressionInfo(divot, divot, divot);
1970 emitOpcode(op_debug);
1971 instructions().append(debugHookID);
1972 instructions().append(false);
1973 }
1974
1975 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
1976 {
1977 // Reclaim free label scopes.
1978 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1979 m_labelScopes.removeLast();
1980
1981 ControlFlowContext scope;
1982 scope.isFinallyBlock = true;
1983 FinallyContext context = {
1984 finallyBlock,
1985 static_cast<unsigned>(m_scopeContextStack.size()),
1986 static_cast<unsigned>(m_switchContextStack.size()),
1987 static_cast<unsigned>(m_forInContextStack.size()),
1988 static_cast<unsigned>(m_tryContextStack.size()),
1989 static_cast<unsigned>(m_labelScopes.size()),
1990 m_finallyDepth,
1991 m_localScopeDepth
1992 };
1993 scope.finallyContext = context;
1994 m_scopeContextStack.append(scope);
1995 m_finallyDepth++;
1996 }
1997
1998 void BytecodeGenerator::popFinallyContext()
1999 {
2000 ASSERT(m_scopeContextStack.size());
2001 ASSERT(m_scopeContextStack.last().isFinallyBlock);
2002 ASSERT(m_finallyDepth > 0);
2003 m_scopeContextStack.removeLast();
2004 m_finallyDepth--;
2005 }
2006
2007 LabelScopePtr BytecodeGenerator::breakTarget(const Identifier& name)
2008 {
2009 // Reclaim free label scopes.
2010 //
2011 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2012 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2013 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2014 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2015 // loop condition is a workaround.
2016 while (m_labelScopes.size()) {
2017 if (m_labelScopes.last().refCount())
2018 break;
2019 m_labelScopes.removeLast();
2020 }
2021
2022 if (!m_labelScopes.size())
2023 return LabelScopePtr::null();
2024
2025 // We special-case the following, which is a syntax error in Firefox:
2026 // label:
2027 // break;
2028 if (name.isEmpty()) {
2029 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2030 LabelScope* scope = &m_labelScopes[i];
2031 if (scope->type() != LabelScope::NamedLabel) {
2032 ASSERT(scope->breakTarget());
2033 return LabelScopePtr(m_labelScopes, i);
2034 }
2035 }
2036 return LabelScopePtr::null();
2037 }
2038
2039 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2040 LabelScope* scope = &m_labelScopes[i];
2041 if (scope->name() && *scope->name() == name) {
2042 ASSERT(scope->breakTarget());
2043 return LabelScopePtr(m_labelScopes, i);
2044 }
2045 }
2046 return LabelScopePtr::null();
2047 }
2048
2049 LabelScopePtr BytecodeGenerator::continueTarget(const Identifier& name)
2050 {
2051 // Reclaim free label scopes.
2052 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2053 m_labelScopes.removeLast();
2054
2055 if (!m_labelScopes.size())
2056 return LabelScopePtr::null();
2057
2058 if (name.isEmpty()) {
2059 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2060 LabelScope* scope = &m_labelScopes[i];
2061 if (scope->type() == LabelScope::Loop) {
2062 ASSERT(scope->continueTarget());
2063 return LabelScopePtr(m_labelScopes, i);
2064 }
2065 }
2066 return LabelScopePtr::null();
2067 }
2068
2069 // Continue to the loop nested nearest to the label scope that matches
2070 // 'name'.
2071 LabelScopePtr result = LabelScopePtr::null();
2072 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2073 LabelScope* scope = &m_labelScopes[i];
2074 if (scope->type() == LabelScope::Loop) {
2075 ASSERT(scope->continueTarget());
2076 result = LabelScopePtr(m_labelScopes, i);
2077 }
2078 if (scope->name() && *scope->name() == name)
2079 return result; // may be null.
2080 }
2081 return LabelScopePtr::null();
2082 }
2083
2084 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2085 {
2086 while (topScope > bottomScope) {
2087 // First we count the number of dynamic scopes we need to remove to get
2088 // to a finally block.
2089 int nNormalScopes = 0;
2090 while (topScope > bottomScope) {
2091 if (topScope->isFinallyBlock)
2092 break;
2093 ++nNormalScopes;
2094 --topScope;
2095 }
2096
2097 if (nNormalScopes) {
2098 // We need to remove a number of dynamic scopes to get to the next
2099 // finally block
2100 while (nNormalScopes--)
2101 emitOpcode(op_pop_scope);
2102
2103 // If topScope == bottomScope then there isn't a finally block left to emit.
2104 if (topScope == bottomScope)
2105 return;
2106 }
2107
2108 Vector<ControlFlowContext> savedScopeContextStack;
2109 Vector<SwitchInfo> savedSwitchContextStack;
2110 Vector<ForInContext> savedForInContextStack;
2111 Vector<TryContext> poppedTryContexts;
2112 LabelScopeStore savedLabelScopes;
2113 while (topScope > bottomScope && topScope->isFinallyBlock) {
2114 RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2115
2116 // Save the current state of the world while instating the state of the world
2117 // for the finally block.
2118 FinallyContext finallyContext = topScope->finallyContext;
2119 bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2120 bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2121 bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2122 bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2123 bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2124 int topScopeIndex = -1;
2125 int bottomScopeIndex = -1;
2126 if (flipScopes) {
2127 topScopeIndex = topScope - m_scopeContextStack.begin();
2128 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2129 savedScopeContextStack = m_scopeContextStack;
2130 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2131 }
2132 if (flipSwitches) {
2133 savedSwitchContextStack = m_switchContextStack;
2134 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2135 }
2136 if (flipForIns) {
2137 savedForInContextStack = m_forInContextStack;
2138 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2139 }
2140 if (flipTries) {
2141 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2142 ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2143 TryContext context = m_tryContextStack.last();
2144 m_tryContextStack.removeLast();
2145 TryRange range;
2146 range.start = context.start;
2147 range.end = beforeFinally;
2148 range.tryData = context.tryData;
2149 m_tryRanges.append(range);
2150 poppedTryContexts.append(context);
2151 }
2152 }
2153 if (flipLabelScopes) {
2154 savedLabelScopes = m_labelScopes;
2155 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2156 m_labelScopes.removeLast();
2157 }
2158 int savedFinallyDepth = m_finallyDepth;
2159 m_finallyDepth = finallyContext.finallyDepth;
2160 int savedDynamicScopeDepth = m_localScopeDepth;
2161 m_localScopeDepth = finallyContext.dynamicScopeDepth;
2162
2163 // Emit the finally block.
2164 emitNode(finallyContext.finallyBlock);
2165
2166 RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2167
2168 // Restore the state of the world.
2169 if (flipScopes) {
2170 m_scopeContextStack = savedScopeContextStack;
2171 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2172 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2173 }
2174 if (flipSwitches)
2175 m_switchContextStack = savedSwitchContextStack;
2176 if (flipForIns)
2177 m_forInContextStack = savedForInContextStack;
2178 if (flipTries) {
2179 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2180 for (unsigned i = poppedTryContexts.size(); i--;) {
2181 TryContext context = poppedTryContexts[i];
2182 context.start = afterFinally;
2183 m_tryContextStack.append(context);
2184 }
2185 poppedTryContexts.clear();
2186 }
2187 if (flipLabelScopes)
2188 m_labelScopes = savedLabelScopes;
2189 m_finallyDepth = savedFinallyDepth;
2190 m_localScopeDepth = savedDynamicScopeDepth;
2191
2192 --topScope;
2193 }
2194 }
2195 }
2196
2197 void BytecodeGenerator::emitPopScopes(int targetScopeDepth)
2198 {
2199 ASSERT(scopeDepth() - targetScopeDepth >= 0);
2200
2201 size_t scopeDelta = scopeDepth() - targetScopeDepth;
2202 ASSERT(scopeDelta <= m_scopeContextStack.size());
2203 if (!scopeDelta)
2204 return;
2205
2206 if (!m_finallyDepth) {
2207 while (scopeDelta--)
2208 emitOpcode(op_pop_scope);
2209 return;
2210 }
2211
2212 emitComplexPopScopes(&m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2213 }
2214
2215 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2216 {
2217 size_t begin = instructions().size();
2218
2219 emitOpcode(op_get_pnames);
2220 instructions().append(dst->index());
2221 instructions().append(base->index());
2222 instructions().append(i->index());
2223 instructions().append(size->index());
2224 instructions().append(breakTarget->bind(begin, instructions().size()));
2225 return dst;
2226 }
2227
2228 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2229 {
2230 size_t begin = instructions().size();
2231
2232 emitOpcode(op_next_pname);
2233 instructions().append(dst->index());
2234 instructions().append(base->index());
2235 instructions().append(i->index());
2236 instructions().append(size->index());
2237 instructions().append(iter->index());
2238 instructions().append(target->bind(begin, instructions().size()));
2239 return dst;
2240 }
2241
2242 TryData* BytecodeGenerator::pushTry(Label* start)
2243 {
2244 TryData tryData;
2245 tryData.target = newLabel();
2246 tryData.targetScopeDepth = UINT_MAX;
2247 m_tryData.append(tryData);
2248 TryData* result = &m_tryData.last();
2249
2250 TryContext tryContext;
2251 tryContext.start = start;
2252 tryContext.tryData = result;
2253
2254 m_tryContextStack.append(tryContext);
2255
2256 return result;
2257 }
2258
2259 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2260 {
2261 m_usesExceptions = true;
2262
2263 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2264
2265 TryRange tryRange;
2266 tryRange.start = m_tryContextStack.last().start;
2267 tryRange.end = end;
2268 tryRange.tryData = m_tryContextStack.last().tryData;
2269 m_tryRanges.append(tryRange);
2270 m_tryContextStack.removeLast();
2271
2272 emitLabel(tryRange.tryData->target.get());
2273 tryRange.tryData->targetScopeDepth = m_localScopeDepth;
2274
2275 emitOpcode(op_catch);
2276 instructions().append(targetRegister->index());
2277 return targetRegister;
2278 }
2279
2280 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2281 {
2282 emitOpcode(op_throw_static_error);
2283 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, message)))->index());
2284 instructions().append(true);
2285 }
2286
2287 void BytecodeGenerator::emitPushFunctionNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2288 {
2289 emitOpcode(op_push_name_scope);
2290 instructions().append(addConstant(property));
2291 instructions().append(value->index());
2292 instructions().append(attributes);
2293 }
2294
2295 void BytecodeGenerator::emitPushCatchScope(const Identifier& property, RegisterID* value, unsigned attributes)
2296 {
2297 createActivationIfNecessary();
2298
2299 ControlFlowContext context;
2300 context.isFinallyBlock = false;
2301 m_scopeContextStack.append(context);
2302 m_localScopeDepth++;
2303
2304 emitOpcode(op_push_name_scope);
2305 instructions().append(addConstant(property));
2306 instructions().append(value->index());
2307 instructions().append(attributes);
2308 }
2309
2310 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2311 {
2312 SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2313 switch (type) {
2314 case SwitchInfo::SwitchImmediate:
2315 emitOpcode(op_switch_imm);
2316 break;
2317 case SwitchInfo::SwitchCharacter:
2318 emitOpcode(op_switch_char);
2319 break;
2320 case SwitchInfo::SwitchString:
2321 emitOpcode(op_switch_string);
2322 break;
2323 default:
2324 RELEASE_ASSERT_NOT_REACHED();
2325 }
2326
2327 instructions().append(0); // place holder for table index
2328 instructions().append(0); // place holder for default target
2329 instructions().append(scrutineeRegister->index());
2330 m_switchContextStack.append(info);
2331 }
2332
2333 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2334 {
2335 UNUSED_PARAM(max);
2336 ASSERT(node->isNumber());
2337 double value = static_cast<NumberNode*>(node)->value();
2338 int32_t key = static_cast<int32_t>(value);
2339 ASSERT(key == value);
2340 ASSERT(key >= min);
2341 ASSERT(key <= max);
2342 return key - min;
2343 }
2344
2345 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2346 {
2347 UNUSED_PARAM(max);
2348 ASSERT(node->isString());
2349 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2350 ASSERT(clause->length() == 1);
2351
2352 int32_t key = (*clause)[0];
2353 ASSERT(key >= min);
2354 ASSERT(key <= max);
2355 return key - min;
2356 }
2357
2358 static void prepareJumpTableForSwitch(
2359 UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
2360 RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max,
2361 int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
2362 {
2363 jumpTable.min = min;
2364 jumpTable.branchOffsets.resize(max - min + 1);
2365 jumpTable.branchOffsets.fill(0);
2366 for (uint32_t i = 0; i < clauseCount; ++i) {
2367 // We're emitting this after the clause labels should have been fixed, so
2368 // the labels should not be "forward" references
2369 ASSERT(!labels[i]->isForward());
2370 jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2371 }
2372 }
2373
2374 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2375 {
2376 for (uint32_t i = 0; i < clauseCount; ++i) {
2377 // We're emitting this after the clause labels should have been fixed, so
2378 // the labels should not be "forward" references
2379 ASSERT(!labels[i]->isForward());
2380
2381 ASSERT(nodes[i]->isString());
2382 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2383 jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2384 }
2385 }
2386
2387 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2388 {
2389 SwitchInfo switchInfo = m_switchContextStack.last();
2390 m_switchContextStack.removeLast();
2391
2392 switch (switchInfo.switchType) {
2393 case SwitchInfo::SwitchImmediate:
2394 case SwitchInfo::SwitchCharacter: {
2395 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfSwitchJumpTables();
2396 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2397
2398 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addSwitchJumpTable();
2399 prepareJumpTableForSwitch(
2400 jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
2401 switchInfo.switchType == SwitchInfo::SwitchImmediate
2402 ? keyForImmediateSwitch
2403 : keyForCharacterSwitch);
2404 break;
2405 }
2406
2407 case SwitchInfo::SwitchString: {
2408 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2409 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2410
2411 UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2412 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2413 break;
2414 }
2415
2416 default:
2417 RELEASE_ASSERT_NOT_REACHED();
2418 break;
2419 }
2420 }
2421
2422 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2423 {
2424 // It would be nice to do an even better job of identifying exactly where the expression is.
2425 // And we could make the caller pass the node pointer in, if there was some way of getting
2426 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2427 // is still good enough to get us an accurate line number.
2428 m_expressionTooDeep = true;
2429 return newTemporary();
2430 }
2431
2432 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2433 {
2434 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2435 }
2436
2437 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2438 {
2439 RegisterID* registerID = local(ident).get();
2440 if (!registerID || registerID->index() >= 0)
2441 return 0;
2442 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2443 }
2444
2445 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2446 {
2447 if (!isStrictMode())
2448 return;
2449 emitOpcode(op_throw_static_error);
2450 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, StrictModeReadonlyPropertyWriteError)))->index());
2451 instructions().append(false);
2452 }
2453
2454 void BytecodeGenerator::emitEnumeration(ThrowableExpressionData* node, ExpressionNode* subjectNode, const std::function<void(BytecodeGenerator&, RegisterID*)>& callBack)
2455 {
2456 if (subjectNode->isResolveNode()
2457 && willResolveToArguments(static_cast<ResolveNode*>(subjectNode)->identifier())
2458 && !symbolTable().slowArguments()) {
2459 RefPtr<RegisterID> index = emitLoad(newTemporary(), jsNumber(0));
2460
2461 LabelScopePtr scope = newLabelScope(LabelScope::Loop);
2462 RefPtr<RegisterID> value = emitLoad(newTemporary(), jsUndefined());
2463
2464 RefPtr<Label> loopCondition = newLabel();
2465 RefPtr<Label> loopStart = newLabel();
2466 emitJump(loopCondition.get());
2467 emitLabel(loopStart.get());
2468 emitLoopHint();
2469 emitGetArgumentByVal(value.get(), uncheckedRegisterForArguments(), index.get());
2470 callBack(*this, value.get());
2471
2472 emitLabel(scope->continueTarget());
2473 emitInc(index.get());
2474 emitLabel(loopCondition.get());
2475 RefPtr<RegisterID> length = emitGetArgumentsLength(newTemporary(), uncheckedRegisterForArguments());
2476 emitJumpIfTrue(emitEqualityOp(op_less, newTemporary(), index.get(), length.get()), loopStart.get());
2477 emitLabel(scope->breakTarget());
2478 return;
2479 }
2480
2481 LabelScopePtr scope = newLabelScope(LabelScope::Loop);
2482 RefPtr<RegisterID> subject = newTemporary();
2483 emitNode(subject.get(), subjectNode);
2484 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), subject.get(), propertyNames().iteratorPrivateName);
2485 {
2486 CallArguments args(*this, 0);
2487 emitMove(args.thisRegister(), subject.get());
2488 emitCall(iterator.get(), iterator.get(), NoExpectedFunction, args, node->divot(), node->divotStart(), node->divotEnd());
2489 }
2490 RefPtr<RegisterID> iteratorNext = emitGetById(newTemporary(), iterator.get(), propertyNames().iteratorNextPrivateName);
2491 RefPtr<RegisterID> value = newTemporary();
2492 emitLoad(value.get(), jsUndefined());
2493
2494 emitJump(scope->continueTarget());
2495
2496 RefPtr<Label> loopStart = newLabel();
2497 emitLabel(loopStart.get());
2498 emitLoopHint();
2499 callBack(*this, value.get());
2500 emitLabel(scope->continueTarget());
2501 CallArguments nextArguments(*this, 0, 1);
2502 emitMove(nextArguments.thisRegister(), iterator.get());
2503 emitMove(nextArguments.argumentRegister(0), value.get());
2504 emitCall(value.get(), iteratorNext.get(), NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd());
2505 RefPtr<RegisterID> result = newTemporary();
2506 emitJumpIfFalse(emitEqualityOp(op_stricteq, result.get(), value.get(), emitLoad(0, JSValue(vm()->iterationTerminator.get()))), loopStart.get());
2507 emitLabel(scope->breakTarget());
2508 }
2509
2510 } // namespace JSC