]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - bytecompiler/BytecodeGenerator.cpp
JavaScriptCore-1097.3.3.tar.gz
[apple/javascriptcore.git] / bytecompiler / BytecodeGenerator.cpp
... / ...
CommitLineData
1/*
2 * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#include "config.h"
31#include "BytecodeGenerator.h"
32
33#include "BatchedTransitionOptimizer.h"
34#include "JSFunction.h"
35#include "Interpreter.h"
36#include "LowLevelInterpreter.h"
37#include "ScopeChain.h"
38#include "StrongInlines.h"
39#include "UString.h"
40
41using namespace std;
42
43namespace JSC {
44
45/*
46 The layout of a register frame looks like this:
47
48 For
49
50 function f(x, y) {
51 var v1;
52 function g() { }
53 var v2;
54 return (x) * (y);
55 }
56
57 assuming (x) and (y) generated temporaries t1 and t2, you would have
58
59 ------------------------------------
60 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
61 ------------------------------------
62 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
63 ------------------------------------
64 | params->|<-locals | temps->
65
66 Because temporary registers are allocated in a stack-like fashion, we
67 can reclaim them with a simple popping algorithm. The same goes for labels.
68 (We never reclaim parameter or local registers, because parameters and
69 locals are DontDelete.)
70
71 The register layout before a function call looks like this:
72
73 For
74
75 function f(x, y)
76 {
77 }
78
79 f(1);
80
81 > <------------------------------
82 < > reserved: call frame | 1 | <-- value held
83 > >snip< <------------------------------
84 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
85 > <------------------------------
86 | params->|<-locals | temps->
87
88 The call instruction fills in the "call frame" registers. It also pads
89 missing arguments at the end of the call:
90
91 > <-----------------------------------
92 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
93 > >snip< <-----------------------------------
94 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
95 > <-----------------------------------
96 | params->|<-locals | temps->
97
98 After filling in missing arguments, the call instruction sets up the new
99 stack frame to overlap the end of the old stack frame:
100
101 |----------------------------------> <
102 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
103 |----------------------------------> >snip< <
104 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
105 |----------------------------------> <
106 | | params->|<-locals | temps->
107
108 That way, arguments are "copied" into the callee's stack frame for free.
109
110 If the caller supplies too many arguments, this trick doesn't work. The
111 extra arguments protrude into space reserved for locals and temporaries.
112 In that case, the call instruction makes a real copy of the call frame header,
113 along with just the arguments expected by the callee, leaving the original
114 call frame header and arguments behind. (The call instruction can't just discard
115 extra arguments, because the "arguments" object may access them later.)
116 This copying strategy ensures that all named values will be at the indices
117 expected by the callee.
118*/
119
120static bool s_dumpsGeneratedCode = false;
121
122void Label::setLocation(unsigned location)
123{
124 m_location = location;
125
126 unsigned size = m_unresolvedJumps.size();
127 for (unsigned i = 0; i < size; ++i)
128 m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
129}
130
131void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
132{
133 s_dumpsGeneratedCode = dumpsGeneratedCode;
134}
135
136bool BytecodeGenerator::dumpsGeneratedCode()
137{
138 return s_dumpsGeneratedCode;
139}
140
141JSObject* BytecodeGenerator::generate()
142{
143 SamplingRegion samplingRegion("Bytecode Generation");
144
145 m_codeBlock->setThisRegister(m_thisRegister.index());
146
147 m_scopeNode->emitBytecode(*this);
148
149 m_codeBlock->instructions() = RefCountedArray<Instruction>(m_instructions);
150
151 if (s_dumpsGeneratedCode)
152 m_codeBlock->dump(m_scopeChain->globalObject->globalExec());
153
154 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
155 symbolTable().clear();
156
157 m_codeBlock->shrinkToFit();
158
159 if (m_expressionTooDeep)
160 return createOutOfMemoryError(m_scopeChain->globalObject.get());
161 return 0;
162}
163
164bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
165{
166 int index = m_calleeRegisters.size();
167 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
168 SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
169
170 if (!result.isNewEntry) {
171 r0 = &registerFor(result.iterator->second.getIndex());
172 return false;
173 }
174
175 r0 = addVar();
176 return true;
177}
178
179int BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant)
180{
181 int index = symbolTable().size();
182 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
183 SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
184 if (!result.isNewEntry)
185 index = result.iterator->second.getIndex();
186 return index;
187}
188
189void BytecodeGenerator::preserveLastVar()
190{
191 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
192 m_lastVar = &m_calleeRegisters.last();
193}
194
195BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
196 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
197 , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
198 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
199 , m_scopeChain(*scopeChain->globalData, scopeChain)
200 , m_symbolTable(symbolTable)
201 , m_scopeNode(programNode)
202 , m_codeBlock(codeBlock)
203 , m_thisRegister(CallFrame::thisArgumentOffset())
204 , m_finallyDepth(0)
205 , m_dynamicScopeDepth(0)
206 , m_baseScopeDepth(0)
207 , m_codeType(GlobalCode)
208 , m_nextConstantOffset(0)
209 , m_globalConstantIndex(0)
210 , m_hasCreatedActivation(true)
211 , m_firstLazyFunction(0)
212 , m_lastLazyFunction(0)
213 , m_globalData(scopeChain->globalData)
214 , m_lastOpcodeID(op_end)
215#ifndef NDEBUG
216 , m_lastOpcodePosition(0)
217#endif
218 , m_stack(wtfThreadData().stack())
219 , m_usesExceptions(false)
220 , m_expressionTooDeep(false)
221{
222 m_globalData->startedCompiling(m_codeBlock);
223 if (m_shouldEmitDebugHooks)
224 m_codeBlock->setNeedsFullScopeChain(true);
225
226 emitOpcode(op_enter);
227 codeBlock->setGlobalData(m_globalData);
228
229 // FIXME: Move code that modifies the global object to Interpreter::execute.
230
231 m_codeBlock->setNumParameters(1); // Allocate space for "this"
232 codeBlock->m_numCapturedVars = codeBlock->m_numVars;
233
234 if (compilationKind == OptimizingCompilation)
235 return;
236
237 JSGlobalObject* globalObject = scopeChain->globalObject.get();
238 ExecState* exec = globalObject->globalExec();
239
240 BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
241
242 const VarStack& varStack = programNode->varStack();
243 const FunctionStack& functionStack = programNode->functionStack();
244
245 size_t newGlobals = varStack.size() + functionStack.size();
246 if (!newGlobals)
247 return;
248 globalObject->resizeRegisters(symbolTable->size() + newGlobals);
249
250 for (size_t i = 0; i < functionStack.size(); ++i) {
251 FunctionBodyNode* function = functionStack[i];
252 globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
253
254 JSValue value = JSFunction::create(exec, makeFunction(exec, function), scopeChain);
255 int index = addGlobalVar(function->ident(), false);
256 globalObject->registerAt(index).set(*m_globalData, globalObject, value);
257 }
258
259 for (size_t i = 0; i < varStack.size(); ++i) {
260 if (globalObject->hasProperty(exec, *varStack[i].first))
261 continue;
262 addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
263 }
264}
265
266BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
267 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
268 , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
269 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
270 , m_scopeChain(*scopeChain->globalData, scopeChain)
271 , m_symbolTable(symbolTable)
272 , m_scopeNode(functionBody)
273 , m_codeBlock(codeBlock)
274 , m_activationRegister(0)
275 , m_finallyDepth(0)
276 , m_dynamicScopeDepth(0)
277 , m_baseScopeDepth(0)
278 , m_codeType(FunctionCode)
279 , m_nextConstantOffset(0)
280 , m_globalConstantIndex(0)
281 , m_hasCreatedActivation(false)
282 , m_firstLazyFunction(0)
283 , m_lastLazyFunction(0)
284 , m_globalData(scopeChain->globalData)
285 , m_lastOpcodeID(op_end)
286#ifndef NDEBUG
287 , m_lastOpcodePosition(0)
288#endif
289 , m_stack(wtfThreadData().stack())
290 , m_usesExceptions(false)
291 , m_expressionTooDeep(false)
292{
293 m_globalData->startedCompiling(m_codeBlock);
294 if (m_shouldEmitDebugHooks)
295 m_codeBlock->setNeedsFullScopeChain(true);
296
297 codeBlock->setGlobalData(m_globalData);
298
299 emitOpcode(op_enter);
300 if (m_codeBlock->needsFullScopeChain()) {
301 m_activationRegister = addVar();
302 emitInitLazyRegister(m_activationRegister);
303 m_codeBlock->setActivationRegister(m_activationRegister->index());
304 }
305
306 // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
307 // object, if created.
308 if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) {
309 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
310 RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
311
312 // We can save a little space by hard-coding the knowledge that the two
313 // 'arguments' values are stored in consecutive registers, and storing
314 // only the index of the assignable one.
315 codeBlock->setArgumentsRegister(argumentsRegister->index());
316 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
317
318 emitInitLazyRegister(argumentsRegister);
319 emitInitLazyRegister(unmodifiedArgumentsRegister);
320
321 if (m_codeBlock->isStrictMode()) {
322 emitOpcode(op_create_arguments);
323 instructions().append(argumentsRegister->index());
324 }
325
326 // The debugger currently retrieves the arguments object from an activation rather than pulling
327 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
328 // but for now we force eager creation of the arguments object when debugging.
329 if (m_shouldEmitDebugHooks) {
330 emitOpcode(op_create_arguments);
331 instructions().append(argumentsRegister->index());
332 }
333 }
334
335 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
336 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
337
338 // Captured variables and functions go first so that activations don't have
339 // to step over the non-captured locals to mark them.
340 m_hasCreatedActivation = false;
341 if (functionBody->hasCapturedVariables()) {
342 for (size_t i = 0; i < functionStack.size(); ++i) {
343 FunctionBodyNode* function = functionStack[i];
344 const Identifier& ident = function->ident();
345 if (functionBody->captures(ident)) {
346 if (!m_hasCreatedActivation) {
347 m_hasCreatedActivation = true;
348 emitOpcode(op_create_activation);
349 instructions().append(m_activationRegister->index());
350 }
351 m_functions.add(ident.impl());
352 emitNewFunction(addVar(ident, false), function);
353 }
354 }
355 for (size_t i = 0; i < varStack.size(); ++i) {
356 const Identifier& ident = *varStack[i].first;
357 if (functionBody->captures(ident))
358 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
359 }
360 }
361 bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
362 if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
363 m_hasCreatedActivation = true;
364 emitOpcode(op_create_activation);
365 instructions().append(m_activationRegister->index());
366 }
367
368 codeBlock->m_numCapturedVars = codeBlock->m_numVars;
369 m_firstLazyFunction = codeBlock->m_numVars;
370 for (size_t i = 0; i < functionStack.size(); ++i) {
371 FunctionBodyNode* function = functionStack[i];
372 const Identifier& ident = function->ident();
373 if (!functionBody->captures(ident)) {
374 m_functions.add(ident.impl());
375 RefPtr<RegisterID> reg = addVar(ident, false);
376 // Don't lazily create functions that override the name 'arguments'
377 // as this would complicate lazy instantiation of actual arguments.
378 if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
379 emitNewFunction(reg.get(), function);
380 else {
381 emitInitLazyRegister(reg.get());
382 m_lazyFunctions.set(reg->index(), function);
383 }
384 }
385 }
386 m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
387 for (size_t i = 0; i < varStack.size(); ++i) {
388 const Identifier& ident = *varStack[i].first;
389 if (!functionBody->captures(ident))
390 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
391 }
392
393 if (m_shouldEmitDebugHooks)
394 codeBlock->m_numCapturedVars = codeBlock->m_numVars;
395
396 FunctionParameters& parameters = *functionBody->parameters();
397 m_parameters.grow(parameters.size() + 1); // reserve space for "this"
398
399 // Add "this" as a parameter
400 int nextParameterIndex = CallFrame::thisArgumentOffset();
401 m_thisRegister.setIndex(nextParameterIndex--);
402 m_codeBlock->addParameter();
403
404 for (size_t i = 0; i < parameters.size(); ++i)
405 addParameter(parameters[i], nextParameterIndex--);
406
407 preserveLastVar();
408
409 if (isConstructor()) {
410 RefPtr<RegisterID> func = newTemporary();
411 RefPtr<RegisterID> funcProto = newTemporary();
412
413 emitOpcode(op_get_callee);
414 instructions().append(func->index());
415 // Load prototype.
416 emitGetById(funcProto.get(), func.get(), globalData()->propertyNames->prototype);
417
418 emitOpcode(op_create_this);
419 instructions().append(m_thisRegister.index());
420 instructions().append(funcProto->index());
421 } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
422 emitOpcode(op_convert_this);
423 instructions().append(m_thisRegister.index());
424 }
425}
426
427BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
428 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
429 , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
430 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
431 , m_scopeChain(*scopeChain->globalData, scopeChain)
432 , m_symbolTable(symbolTable)
433 , m_scopeNode(evalNode)
434 , m_codeBlock(codeBlock)
435 , m_thisRegister(CallFrame::thisArgumentOffset())
436 , m_finallyDepth(0)
437 , m_dynamicScopeDepth(0)
438 , m_baseScopeDepth(codeBlock->baseScopeDepth())
439 , m_codeType(EvalCode)
440 , m_nextConstantOffset(0)
441 , m_globalConstantIndex(0)
442 , m_hasCreatedActivation(true)
443 , m_firstLazyFunction(0)
444 , m_lastLazyFunction(0)
445 , m_globalData(scopeChain->globalData)
446 , m_lastOpcodeID(op_end)
447#ifndef NDEBUG
448 , m_lastOpcodePosition(0)
449#endif
450 , m_stack(wtfThreadData().stack())
451 , m_usesExceptions(false)
452 , m_expressionTooDeep(false)
453{
454 m_globalData->startedCompiling(m_codeBlock);
455 if (m_shouldEmitDebugHooks || m_baseScopeDepth)
456 m_codeBlock->setNeedsFullScopeChain(true);
457
458 emitOpcode(op_enter);
459 codeBlock->setGlobalData(m_globalData);
460 m_codeBlock->setNumParameters(1);
461
462 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
463 for (size_t i = 0; i < functionStack.size(); ++i)
464 m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
465
466 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
467 unsigned numVariables = varStack.size();
468 Vector<Identifier> variables;
469 variables.reserveCapacity(numVariables);
470 for (size_t i = 0; i < numVariables; ++i)
471 variables.append(*varStack[i].first);
472 codeBlock->adoptVariables(variables);
473 codeBlock->m_numCapturedVars = codeBlock->m_numVars;
474 preserveLastVar();
475}
476
477BytecodeGenerator::~BytecodeGenerator()
478{
479 m_globalData->finishedCompiling(m_codeBlock);
480}
481
482RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
483{
484 emitOpcode(op_init_lazy_reg);
485 instructions().append(reg->index());
486 return reg;
487}
488
489void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
490{
491 // Parameters overwrite var declarations, but not function declarations.
492 StringImpl* rep = ident.impl();
493 if (!m_functions.contains(rep)) {
494 symbolTable().set(rep, parameterIndex);
495 RegisterID& parameter = registerFor(parameterIndex);
496 parameter.setIndex(parameterIndex);
497 }
498
499 // To maintain the calling convention, we have to allocate unique space for
500 // each parameter, even if the parameter doesn't make it into the symbol table.
501 m_codeBlock->addParameter();
502}
503
504RegisterID* BytecodeGenerator::registerFor(const Identifier& ident)
505{
506 if (ident == propertyNames().thisIdentifier)
507 return &m_thisRegister;
508
509 if (m_codeType == GlobalCode)
510 return 0;
511
512 if (!shouldOptimizeLocals())
513 return 0;
514
515 SymbolTableEntry entry = symbolTable().get(ident.impl());
516 if (entry.isNull())
517 return 0;
518
519 if (ident == propertyNames().arguments)
520 createArgumentsIfNecessary();
521
522 return createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
523}
524
525RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident)
526{
527 if (m_codeType == EvalCode)
528 return 0;
529
530 if (m_codeType == GlobalCode)
531 return 0;
532
533 SymbolTableEntry entry = symbolTable().get(ident.impl());
534 if (entry.isNull())
535 return 0;
536
537 return createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
538}
539
540bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
541{
542 if (ident != propertyNames().arguments)
543 return false;
544
545 if (!shouldOptimizeLocals())
546 return false;
547
548 SymbolTableEntry entry = symbolTable().get(ident.impl());
549 if (entry.isNull())
550 return false;
551
552 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
553 return true;
554
555 return false;
556}
557
558RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
559{
560 ASSERT(willResolveToArguments(propertyNames().arguments));
561
562 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
563 ASSERT(!entry.isNull());
564 return &registerFor(entry.getIndex());
565}
566
567RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
568{
569 if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
570 return reg;
571 emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
572 return reg;
573}
574
575bool BytecodeGenerator::isLocal(const Identifier& ident)
576{
577 if (ident == propertyNames().thisIdentifier)
578 return true;
579
580 return shouldOptimizeLocals() && symbolTable().contains(ident.impl());
581}
582
583bool BytecodeGenerator::isLocalConstant(const Identifier& ident)
584{
585 return symbolTable().get(ident.impl()).isReadOnly();
586}
587
588RegisterID* BytecodeGenerator::newRegister()
589{
590 m_calleeRegisters.append(m_calleeRegisters.size());
591 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
592 return &m_calleeRegisters.last();
593}
594
595RegisterID* BytecodeGenerator::newTemporary()
596{
597 // Reclaim free register IDs.
598 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
599 m_calleeRegisters.removeLast();
600
601 RegisterID* result = newRegister();
602 result->setTemporary();
603 return result;
604}
605
606RegisterID* BytecodeGenerator::highestUsedRegister()
607{
608 size_t count = m_codeBlock->m_numCalleeRegisters;
609 while (m_calleeRegisters.size() < count)
610 newRegister();
611 return &m_calleeRegisters.last();
612}
613
614PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
615{
616 // Reclaim free label scopes.
617 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
618 m_labelScopes.removeLast();
619
620 // Allocate new label scope.
621 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
622 m_labelScopes.append(scope);
623 return &m_labelScopes.last();
624}
625
626PassRefPtr<Label> BytecodeGenerator::newLabel()
627{
628 // Reclaim free label IDs.
629 while (m_labels.size() && !m_labels.last().refCount())
630 m_labels.removeLast();
631
632 // Allocate new label ID.
633 m_labels.append(this);
634 return &m_labels.last();
635}
636
637PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
638{
639 unsigned newLabelIndex = instructions().size();
640 l0->setLocation(newLabelIndex);
641
642 if (m_codeBlock->numberOfJumpTargets()) {
643 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
644 ASSERT(lastLabelIndex <= newLabelIndex);
645 if (newLabelIndex == lastLabelIndex) {
646 // Peephole optimizations have already been disabled by emitting the last label
647 return l0;
648 }
649 }
650
651 m_codeBlock->addJumpTarget(newLabelIndex);
652
653 // This disables peephole optimizations when an instruction is a jump target
654 m_lastOpcodeID = op_end;
655 return l0;
656}
657
658void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
659{
660#ifndef NDEBUG
661 size_t opcodePosition = instructions().size();
662 ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
663 m_lastOpcodePosition = opcodePosition;
664#endif
665 instructions().append(globalData()->interpreter->getOpcode(opcodeID));
666 m_lastOpcodeID = opcodeID;
667}
668
669ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
670{
671#if ENABLE(VALUE_PROFILER)
672 ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
673#else
674 ValueProfile* result = 0;
675#endif
676 emitOpcode(opcodeID);
677 return result;
678}
679
680void BytecodeGenerator::emitLoopHint()
681{
682#if ENABLE(DFG_JIT)
683 emitOpcode(op_loop_hint);
684#endif
685}
686
687void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
688{
689 ASSERT(instructions().size() >= 4);
690 size_t size = instructions().size();
691 dstIndex = instructions().at(size - 3).u.operand;
692 src1Index = instructions().at(size - 2).u.operand;
693 src2Index = instructions().at(size - 1).u.operand;
694}
695
696void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
697{
698 ASSERT(instructions().size() >= 3);
699 size_t size = instructions().size();
700 dstIndex = instructions().at(size - 2).u.operand;
701 srcIndex = instructions().at(size - 1).u.operand;
702}
703
704void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
705{
706 ASSERT(instructions().size() >= 4);
707 instructions().shrink(instructions().size() - 4);
708 m_lastOpcodeID = op_end;
709}
710
711void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
712{
713 ASSERT(instructions().size() >= 3);
714 instructions().shrink(instructions().size() - 3);
715 m_lastOpcodeID = op_end;
716}
717
718PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
719{
720 size_t begin = instructions().size();
721 emitOpcode(target->isForward() ? op_jmp : op_loop);
722 instructions().append(target->bind(begin, instructions().size()));
723 return target;
724}
725
726PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
727{
728 if (m_lastOpcodeID == op_less) {
729 int dstIndex;
730 int src1Index;
731 int src2Index;
732
733 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
734
735 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
736 rewindBinaryOp();
737
738 size_t begin = instructions().size();
739 emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
740 instructions().append(src1Index);
741 instructions().append(src2Index);
742 instructions().append(target->bind(begin, instructions().size()));
743 return target;
744 }
745 } else if (m_lastOpcodeID == op_lesseq) {
746 int dstIndex;
747 int src1Index;
748 int src2Index;
749
750 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
751
752 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
753 rewindBinaryOp();
754
755 size_t begin = instructions().size();
756 emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
757 instructions().append(src1Index);
758 instructions().append(src2Index);
759 instructions().append(target->bind(begin, instructions().size()));
760 return target;
761 }
762 } else if (m_lastOpcodeID == op_greater) {
763 int dstIndex;
764 int src1Index;
765 int src2Index;
766
767 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
768
769 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
770 rewindBinaryOp();
771
772 size_t begin = instructions().size();
773 emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
774 instructions().append(src1Index);
775 instructions().append(src2Index);
776 instructions().append(target->bind(begin, instructions().size()));
777 return target;
778 }
779 } else if (m_lastOpcodeID == op_greatereq) {
780 int dstIndex;
781 int src1Index;
782 int src2Index;
783
784 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
785
786 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
787 rewindBinaryOp();
788
789 size_t begin = instructions().size();
790 emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
791 instructions().append(src1Index);
792 instructions().append(src2Index);
793 instructions().append(target->bind(begin, instructions().size()));
794 return target;
795 }
796 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
797 int dstIndex;
798 int srcIndex;
799
800 retrieveLastUnaryOp(dstIndex, srcIndex);
801
802 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
803 rewindUnaryOp();
804
805 size_t begin = instructions().size();
806 emitOpcode(op_jeq_null);
807 instructions().append(srcIndex);
808 instructions().append(target->bind(begin, instructions().size()));
809 return target;
810 }
811 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
812 int dstIndex;
813 int srcIndex;
814
815 retrieveLastUnaryOp(dstIndex, srcIndex);
816
817 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
818 rewindUnaryOp();
819
820 size_t begin = instructions().size();
821 emitOpcode(op_jneq_null);
822 instructions().append(srcIndex);
823 instructions().append(target->bind(begin, instructions().size()));
824 return target;
825 }
826 }
827
828 size_t begin = instructions().size();
829
830 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
831 instructions().append(cond->index());
832 instructions().append(target->bind(begin, instructions().size()));
833 return target;
834}
835
836PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
837{
838 if (m_lastOpcodeID == op_less && target->isForward()) {
839 int dstIndex;
840 int src1Index;
841 int src2Index;
842
843 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
844
845 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
846 rewindBinaryOp();
847
848 size_t begin = instructions().size();
849 emitOpcode(op_jnless);
850 instructions().append(src1Index);
851 instructions().append(src2Index);
852 instructions().append(target->bind(begin, instructions().size()));
853 return target;
854 }
855 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
856 int dstIndex;
857 int src1Index;
858 int src2Index;
859
860 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
861
862 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
863 rewindBinaryOp();
864
865 size_t begin = instructions().size();
866 emitOpcode(op_jnlesseq);
867 instructions().append(src1Index);
868 instructions().append(src2Index);
869 instructions().append(target->bind(begin, instructions().size()));
870 return target;
871 }
872 } else if (m_lastOpcodeID == op_greater && target->isForward()) {
873 int dstIndex;
874 int src1Index;
875 int src2Index;
876
877 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
878
879 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
880 rewindBinaryOp();
881
882 size_t begin = instructions().size();
883 emitOpcode(op_jngreater);
884 instructions().append(src1Index);
885 instructions().append(src2Index);
886 instructions().append(target->bind(begin, instructions().size()));
887 return target;
888 }
889 } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
890 int dstIndex;
891 int src1Index;
892 int src2Index;
893
894 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
895
896 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
897 rewindBinaryOp();
898
899 size_t begin = instructions().size();
900 emitOpcode(op_jngreatereq);
901 instructions().append(src1Index);
902 instructions().append(src2Index);
903 instructions().append(target->bind(begin, instructions().size()));
904 return target;
905 }
906 } else if (m_lastOpcodeID == op_not) {
907 int dstIndex;
908 int srcIndex;
909
910 retrieveLastUnaryOp(dstIndex, srcIndex);
911
912 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
913 rewindUnaryOp();
914
915 size_t begin = instructions().size();
916 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
917 instructions().append(srcIndex);
918 instructions().append(target->bind(begin, instructions().size()));
919 return target;
920 }
921 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
922 int dstIndex;
923 int srcIndex;
924
925 retrieveLastUnaryOp(dstIndex, srcIndex);
926
927 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
928 rewindUnaryOp();
929
930 size_t begin = instructions().size();
931 emitOpcode(op_jneq_null);
932 instructions().append(srcIndex);
933 instructions().append(target->bind(begin, instructions().size()));
934 return target;
935 }
936 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
937 int dstIndex;
938 int srcIndex;
939
940 retrieveLastUnaryOp(dstIndex, srcIndex);
941
942 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
943 rewindUnaryOp();
944
945 size_t begin = instructions().size();
946 emitOpcode(op_jeq_null);
947 instructions().append(srcIndex);
948 instructions().append(target->bind(begin, instructions().size()));
949 return target;
950 }
951 }
952
953 size_t begin = instructions().size();
954 emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
955 instructions().append(cond->index());
956 instructions().append(target->bind(begin, instructions().size()));
957 return target;
958}
959
960PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
961{
962 size_t begin = instructions().size();
963
964 emitOpcode(op_jneq_ptr);
965 instructions().append(cond->index());
966 instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction()));
967 instructions().append(target->bind(begin, instructions().size()));
968 return target;
969}
970
971PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
972{
973 size_t begin = instructions().size();
974
975 emitOpcode(op_jneq_ptr);
976 instructions().append(cond->index());
977 instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction()));
978 instructions().append(target->bind(begin, instructions().size()));
979 return target;
980}
981
982unsigned BytecodeGenerator::addConstant(const Identifier& ident)
983{
984 StringImpl* rep = ident.impl();
985 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
986 if (result.isNewEntry)
987 m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
988
989 return result.iterator->second;
990}
991
992RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
993{
994 int index = m_nextConstantOffset;
995
996 JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
997 if (result.isNewEntry) {
998 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
999 ++m_nextConstantOffset;
1000 m_codeBlock->addConstant(JSValue(v));
1001 } else
1002 index = result.iterator->second;
1003
1004 return &m_constantPoolRegisters[index];
1005}
1006
1007unsigned BytecodeGenerator::addRegExp(RegExp* r)
1008{
1009 return m_codeBlock->addRegExp(r);
1010}
1011
1012RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1013{
1014 emitOpcode(op_mov);
1015 instructions().append(dst->index());
1016 instructions().append(src->index());
1017 return dst;
1018}
1019
1020RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1021{
1022 emitOpcode(opcodeID);
1023 instructions().append(dst->index());
1024 instructions().append(src->index());
1025 return dst;
1026}
1027
1028RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1029{
1030 emitOpcode(op_pre_inc);
1031 instructions().append(srcDst->index());
1032 return srcDst;
1033}
1034
1035RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1036{
1037 emitOpcode(op_pre_dec);
1038 instructions().append(srcDst->index());
1039 return srcDst;
1040}
1041
1042RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1043{
1044 emitOpcode(op_post_inc);
1045 instructions().append(dst->index());
1046 instructions().append(srcDst->index());
1047 return dst;
1048}
1049
1050RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1051{
1052 emitOpcode(op_post_dec);
1053 instructions().append(dst->index());
1054 instructions().append(srcDst->index());
1055 return dst;
1056}
1057
1058RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1059{
1060 emitOpcode(opcodeID);
1061 instructions().append(dst->index());
1062 instructions().append(src1->index());
1063 instructions().append(src2->index());
1064
1065 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1066 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1067 instructions().append(types.toInt());
1068
1069 return dst;
1070}
1071
1072RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1073{
1074 if (m_lastOpcodeID == op_typeof) {
1075 int dstIndex;
1076 int srcIndex;
1077
1078 retrieveLastUnaryOp(dstIndex, srcIndex);
1079
1080 if (src1->index() == dstIndex
1081 && src1->isTemporary()
1082 && m_codeBlock->isConstantRegisterIndex(src2->index())
1083 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1084 const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1085 if (value == "undefined") {
1086 rewindUnaryOp();
1087 emitOpcode(op_is_undefined);
1088 instructions().append(dst->index());
1089 instructions().append(srcIndex);
1090 return dst;
1091 }
1092 if (value == "boolean") {
1093 rewindUnaryOp();
1094 emitOpcode(op_is_boolean);
1095 instructions().append(dst->index());
1096 instructions().append(srcIndex);
1097 return dst;
1098 }
1099 if (value == "number") {
1100 rewindUnaryOp();
1101 emitOpcode(op_is_number);
1102 instructions().append(dst->index());
1103 instructions().append(srcIndex);
1104 return dst;
1105 }
1106 if (value == "string") {
1107 rewindUnaryOp();
1108 emitOpcode(op_is_string);
1109 instructions().append(dst->index());
1110 instructions().append(srcIndex);
1111 return dst;
1112 }
1113 if (value == "object") {
1114 rewindUnaryOp();
1115 emitOpcode(op_is_object);
1116 instructions().append(dst->index());
1117 instructions().append(srcIndex);
1118 return dst;
1119 }
1120 if (value == "function") {
1121 rewindUnaryOp();
1122 emitOpcode(op_is_function);
1123 instructions().append(dst->index());
1124 instructions().append(srcIndex);
1125 return dst;
1126 }
1127 }
1128 }
1129
1130 emitOpcode(opcodeID);
1131 instructions().append(dst->index());
1132 instructions().append(src1->index());
1133 instructions().append(src2->index());
1134 return dst;
1135}
1136
1137RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1138{
1139 return emitLoad(dst, jsBoolean(b));
1140}
1141
1142RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1143{
1144 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1145 // Later we can do the extra work to handle that like the other cases. They also don't
1146 // work correctly with NaN as a key.
1147 if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1148 return emitLoad(dst, jsNumber(number));
1149 JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->second;
1150 if (!valueInMap)
1151 valueInMap = jsNumber(number);
1152 return emitLoad(dst, valueInMap);
1153}
1154
1155RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1156{
1157 JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1158 if (!stringInMap)
1159 stringInMap = jsOwnedString(globalData(), identifier.ustring());
1160 return emitLoad(dst, JSValue(stringInMap));
1161}
1162
1163RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1164{
1165 RegisterID* constantID = addConstantValue(v);
1166 if (dst)
1167 return emitMove(dst, constantID);
1168 return constantID;
1169}
1170
1171bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, bool& requiresDynamicChecks, JSObject*& globalObject)
1172{
1173 // Cases where we cannot statically optimize the lookup.
1174 if (property == propertyNames().arguments || !canOptimizeNonLocals()) {
1175 stackDepth = 0;
1176 index = missingSymbolMarker();
1177
1178 if (shouldOptimizeLocals() && m_codeType == GlobalCode) {
1179 ScopeChainIterator iter = m_scopeChain->begin();
1180 globalObject = iter->get();
1181 ASSERT((++iter) == m_scopeChain->end());
1182 }
1183 return false;
1184 }
1185
1186 size_t depth = 0;
1187 requiresDynamicChecks = false;
1188 ScopeChainIterator iter = m_scopeChain->begin();
1189 ScopeChainIterator end = m_scopeChain->end();
1190 for (; iter != end; ++iter, ++depth) {
1191 JSObject* currentScope = iter->get();
1192 if (!currentScope->isVariableObject())
1193 break;
1194 JSVariableObject* currentVariableObject = jsCast<JSVariableObject*>(currentScope);
1195 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1196
1197 // Found the property
1198 if (!entry.isNull()) {
1199 if (entry.isReadOnly() && forWriting) {
1200 stackDepth = 0;
1201 index = missingSymbolMarker();
1202 if (++iter == end)
1203 globalObject = currentVariableObject;
1204 return false;
1205 }
1206 stackDepth = depth + m_codeBlock->needsFullScopeChain();
1207 index = entry.getIndex();
1208 if (++iter == end)
1209 globalObject = currentVariableObject;
1210 return true;
1211 }
1212 bool scopeRequiresDynamicChecks = false;
1213 if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1214 break;
1215 requiresDynamicChecks |= scopeRequiresDynamicChecks;
1216 }
1217 // Can't locate the property but we're able to avoid a few lookups.
1218 stackDepth = depth + m_codeBlock->needsFullScopeChain();
1219 index = missingSymbolMarker();
1220 JSObject* scope = iter->get();
1221 if (++iter == end)
1222 globalObject = scope;
1223 return true;
1224}
1225
1226void BytecodeGenerator::emitCheckHasInstance(RegisterID* base)
1227{
1228 emitOpcode(op_check_has_instance);
1229 instructions().append(base->index());
1230}
1231
1232RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1233{
1234 emitOpcode(op_instanceof);
1235 instructions().append(dst->index());
1236 instructions().append(value->index());
1237 instructions().append(base->index());
1238 instructions().append(basePrototype->index());
1239 return dst;
1240}
1241
1242static const unsigned maxGlobalResolves = 128;
1243
1244bool BytecodeGenerator::shouldAvoidResolveGlobal()
1245{
1246 return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1247}
1248
1249RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property)
1250{
1251 size_t depth = 0;
1252 int index = 0;
1253 JSObject* globalObject = 0;
1254 bool requiresDynamicChecks = false;
1255 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) && !globalObject) {
1256 // We can't optimise at all :-(
1257 ValueProfile* profile = emitProfiledOpcode(op_resolve);
1258 instructions().append(dst->index());
1259 instructions().append(addConstant(property));
1260 instructions().append(profile);
1261 return dst;
1262 }
1263 if (shouldAvoidResolveGlobal()) {
1264 globalObject = 0;
1265 requiresDynamicChecks = true;
1266 }
1267
1268 if (globalObject) {
1269 if (index != missingSymbolMarker() && !requiresDynamicChecks) {
1270 // Directly index the property lookup across multiple scopes.
1271 return emitGetScopedVar(dst, depth, index, globalObject);
1272 }
1273
1274#if ENABLE(JIT)
1275 m_codeBlock->addGlobalResolveInfo(instructions().size());
1276#endif
1277 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1278 ValueProfile* profile = emitProfiledOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
1279 instructions().append(dst->index());
1280 instructions().append(addConstant(property));
1281 instructions().append(0);
1282 instructions().append(0);
1283 if (requiresDynamicChecks)
1284 instructions().append(depth);
1285 instructions().append(profile);
1286 return dst;
1287 }
1288
1289 if (requiresDynamicChecks) {
1290 // If we get here we have eval nested inside a |with| just give up
1291 ValueProfile* profile = emitProfiledOpcode(op_resolve);
1292 instructions().append(dst->index());
1293 instructions().append(addConstant(property));
1294 instructions().append(profile);
1295 return dst;
1296 }
1297
1298 if (index != missingSymbolMarker()) {
1299 // Directly index the property lookup across multiple scopes.
1300 return emitGetScopedVar(dst, depth, index, globalObject);
1301 }
1302
1303 // In this case we are at least able to drop a few scope chains from the
1304 // lookup chain, although we still need to hash from then on.
1305 ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1306 instructions().append(dst->index());
1307 instructions().append(addConstant(property));
1308 instructions().append(depth);
1309 instructions().append(profile);
1310 return dst;
1311}
1312
1313RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject)
1314{
1315 if (globalObject) {
1316 if (m_lastOpcodeID == op_put_global_var) {
1317 int dstIndex;
1318 int srcIndex;
1319 retrieveLastUnaryOp(dstIndex, srcIndex);
1320
1321 if (dstIndex == index && srcIndex == dst->index())
1322 return dst;
1323 }
1324
1325 ValueProfile* profile = emitProfiledOpcode(op_get_global_var);
1326 instructions().append(dst->index());
1327 instructions().append(index);
1328 instructions().append(profile);
1329 return dst;
1330 }
1331
1332 ValueProfile* profile = emitProfiledOpcode(op_get_scoped_var);
1333 instructions().append(dst->index());
1334 instructions().append(index);
1335 instructions().append(depth);
1336 instructions().append(profile);
1337 return dst;
1338}
1339
1340RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject)
1341{
1342 if (globalObject) {
1343 emitOpcode(op_put_global_var);
1344 instructions().append(index);
1345 instructions().append(value->index());
1346 return value;
1347 }
1348 emitOpcode(op_put_scoped_var);
1349 instructions().append(index);
1350 instructions().append(depth);
1351 instructions().append(value->index());
1352 return value;
1353}
1354
1355RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property)
1356{
1357 size_t depth = 0;
1358 int index = 0;
1359 JSObject* globalObject = 0;
1360 bool requiresDynamicChecks = false;
1361 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject);
1362 if (!globalObject || requiresDynamicChecks) {
1363 // We can't optimise at all :-(
1364 ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1365 instructions().append(dst->index());
1366 instructions().append(addConstant(property));
1367 instructions().append(false);
1368 instructions().append(profile);
1369 return dst;
1370 }
1371
1372 // Global object is the base
1373 return emitLoad(dst, JSValue(globalObject));
1374}
1375
1376RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const Identifier& property)
1377{
1378 if (!m_codeBlock->isStrictMode())
1379 return emitResolveBase(dst, property);
1380 size_t depth = 0;
1381 int index = 0;
1382 JSObject* globalObject = 0;
1383 bool requiresDynamicChecks = false;
1384 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject);
1385 if (!globalObject || requiresDynamicChecks) {
1386 // We can't optimise at all :-(
1387 ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1388 instructions().append(dst->index());
1389 instructions().append(addConstant(property));
1390 instructions().append(true);
1391 instructions().append(profile);
1392 return dst;
1393 }
1394
1395 // Global object is the base
1396 RefPtr<RegisterID> result = emitLoad(dst, JSValue(globalObject));
1397 emitOpcode(op_ensure_property_exists);
1398 instructions().append(dst->index());
1399 instructions().append(addConstant(property));
1400 return result.get();
1401}
1402
1403RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property)
1404{
1405 size_t depth = 0;
1406 int index = 0;
1407 JSObject* globalObject = 0;
1408 bool requiresDynamicChecks = false;
1409 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) {
1410 // We can't optimise at all :-(
1411 ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1412 instructions().append(baseDst->index());
1413 instructions().append(propDst->index());
1414 instructions().append(addConstant(property));
1415 instructions().append(profile);
1416 return baseDst;
1417 }
1418
1419 bool forceGlobalResolve = false;
1420
1421 // Global object is the base
1422 emitLoad(baseDst, JSValue(globalObject));
1423
1424 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1425 // Directly index the property lookup across multiple scopes.
1426 emitGetScopedVar(propDst, depth, index, globalObject);
1427 return baseDst;
1428 }
1429 if (shouldAvoidResolveGlobal()) {
1430 ValueProfile* profile = emitProfiledOpcode(op_resolve);
1431 instructions().append(propDst->index());
1432 instructions().append(addConstant(property));
1433 instructions().append(profile);
1434 return baseDst;
1435 }
1436#if ENABLE(JIT)
1437 m_codeBlock->addGlobalResolveInfo(instructions().size());
1438#endif
1439#if ENABLE(CLASSIC_INTERPRETER)
1440 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1441#endif
1442 ValueProfile* profile = emitProfiledOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
1443 instructions().append(propDst->index());
1444 instructions().append(addConstant(property));
1445 instructions().append(0);
1446 instructions().append(0);
1447 if (requiresDynamicChecks)
1448 instructions().append(depth);
1449 instructions().append(profile);
1450 return baseDst;
1451}
1452
1453RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const Identifier& property)
1454{
1455 size_t depth = 0;
1456 int index = 0;
1457 JSObject* globalObject = 0;
1458 bool requiresDynamicChecks = false;
1459 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) {
1460 // We can't optimise at all :-(
1461 ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1462 instructions().append(baseDst->index());
1463 instructions().append(propDst->index());
1464 instructions().append(addConstant(property));
1465 instructions().append(profile);
1466 return baseDst;
1467 }
1468
1469 bool forceGlobalResolve = false;
1470
1471 // Global object is the base
1472 emitLoad(baseDst, jsUndefined());
1473
1474 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1475 // Directly index the property lookup across multiple scopes.
1476 emitGetScopedVar(propDst, depth, index, globalObject);
1477 return baseDst;
1478 }
1479 if (shouldAvoidResolveGlobal()) {
1480 ValueProfile* profile = emitProfiledOpcode(op_resolve);
1481 instructions().append(propDst->index());
1482 instructions().append(addConstant(property));
1483 instructions().append(profile);
1484 return baseDst;
1485 }
1486#if ENABLE(JIT)
1487 m_codeBlock->addGlobalResolveInfo(instructions().size());
1488#endif
1489#if ENABLE(CLASSIC_INTERPRETER)
1490 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1491#endif
1492 ValueProfile* profile = emitProfiledOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
1493 instructions().append(propDst->index());
1494 instructions().append(addConstant(property));
1495 instructions().append(0);
1496 instructions().append(0);
1497 if (requiresDynamicChecks)
1498 instructions().append(depth);
1499 instructions().append(profile);
1500 return baseDst;
1501}
1502
1503void BytecodeGenerator::emitMethodCheck()
1504{
1505 emitOpcode(op_method_check);
1506}
1507
1508RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1509{
1510 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1511
1512 ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1513 instructions().append(dst->index());
1514 instructions().append(base->index());
1515 instructions().append(addConstant(property));
1516 instructions().append(0);
1517 instructions().append(0);
1518 instructions().append(0);
1519 instructions().append(0);
1520 instructions().append(profile);
1521 return dst;
1522}
1523
1524RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1525{
1526 emitOpcode(op_get_arguments_length);
1527 instructions().append(dst->index());
1528 ASSERT(base->index() == m_codeBlock->argumentsRegister());
1529 instructions().append(base->index());
1530 instructions().append(addConstant(propertyNames().length));
1531 return dst;
1532}
1533
1534RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1535{
1536 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1537
1538 emitOpcode(op_put_by_id);
1539 instructions().append(base->index());
1540 instructions().append(addConstant(property));
1541 instructions().append(value->index());
1542 instructions().append(0);
1543 instructions().append(0);
1544 instructions().append(0);
1545 instructions().append(0);
1546 instructions().append(0);
1547 return value;
1548}
1549
1550RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1551{
1552 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1553
1554 emitOpcode(op_put_by_id);
1555 instructions().append(base->index());
1556 instructions().append(addConstant(property));
1557 instructions().append(value->index());
1558 instructions().append(0);
1559 instructions().append(0);
1560 instructions().append(0);
1561 instructions().append(0);
1562 instructions().append(property != m_globalData->propertyNames->underscoreProto);
1563 return value;
1564}
1565
1566void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1567{
1568 emitOpcode(op_put_getter_setter);
1569 instructions().append(base->index());
1570 instructions().append(addConstant(property));
1571 instructions().append(getter->index());
1572 instructions().append(setter->index());
1573}
1574
1575RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1576{
1577 emitOpcode(op_del_by_id);
1578 instructions().append(dst->index());
1579 instructions().append(base->index());
1580 instructions().append(addConstant(property));
1581 return dst;
1582}
1583
1584RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1585{
1586 ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1587 instructions().append(dst->index());
1588 ASSERT(base->index() == m_codeBlock->argumentsRegister());
1589 instructions().append(base->index());
1590 instructions().append(property->index());
1591 instructions().append(profile);
1592 return dst;
1593}
1594
1595RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1596{
1597 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1598 ForInContext& context = m_forInContextStack[i - 1];
1599 if (context.propertyRegister == property) {
1600 emitOpcode(op_get_by_pname);
1601 instructions().append(dst->index());
1602 instructions().append(base->index());
1603 instructions().append(property->index());
1604 instructions().append(context.expectedSubscriptRegister->index());
1605 instructions().append(context.iterRegister->index());
1606 instructions().append(context.indexRegister->index());
1607 return dst;
1608 }
1609 }
1610 ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1611 instructions().append(dst->index());
1612 instructions().append(base->index());
1613 instructions().append(property->index());
1614 instructions().append(profile);
1615 return dst;
1616}
1617
1618RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1619{
1620 emitOpcode(op_put_by_val);
1621 instructions().append(base->index());
1622 instructions().append(property->index());
1623 instructions().append(value->index());
1624 return value;
1625}
1626
1627RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1628{
1629 emitOpcode(op_del_by_val);
1630 instructions().append(dst->index());
1631 instructions().append(base->index());
1632 instructions().append(property->index());
1633 return dst;
1634}
1635
1636RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1637{
1638 emitOpcode(op_put_by_index);
1639 instructions().append(base->index());
1640 instructions().append(index);
1641 instructions().append(value->index());
1642 return value;
1643}
1644
1645RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1646{
1647 emitOpcode(op_new_object);
1648 instructions().append(dst->index());
1649 return dst;
1650}
1651
1652unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1653{
1654 return m_codeBlock->addConstantBuffer(length);
1655}
1656
1657JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1658{
1659 JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1660 if (!stringInMap) {
1661 stringInMap = jsString(globalData(), identifier.ustring());
1662 addConstantValue(stringInMap);
1663 }
1664 return stringInMap;
1665}
1666
1667RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1668{
1669#if !ASSERT_DISABLED
1670 unsigned checkLength = 0;
1671#endif
1672 bool hadVariableExpression = false;
1673 if (length) {
1674 for (ElementNode* n = elements; n; n = n->next()) {
1675 if (!n->value()->isNumber() && !n->value()->isString()) {
1676 hadVariableExpression = true;
1677 break;
1678 }
1679 if (n->elision())
1680 break;
1681#if !ASSERT_DISABLED
1682 checkLength++;
1683#endif
1684 }
1685 if (!hadVariableExpression) {
1686 ASSERT(length == checkLength);
1687 unsigned constantBufferIndex = addConstantBuffer(length);
1688 JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1689 unsigned index = 0;
1690 for (ElementNode* n = elements; index < length; n = n->next()) {
1691 if (n->value()->isNumber())
1692 constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1693 else {
1694 ASSERT(n->value()->isString());
1695 constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1696 }
1697 }
1698 emitOpcode(op_new_array_buffer);
1699 instructions().append(dst->index());
1700 instructions().append(constantBufferIndex);
1701 instructions().append(length);
1702 return dst;
1703 }
1704 }
1705
1706 Vector<RefPtr<RegisterID>, 16> argv;
1707 for (ElementNode* n = elements; n; n = n->next()) {
1708 if (n->elision())
1709 break;
1710 argv.append(newTemporary());
1711 // op_new_array requires the initial values to be a sequential range of registers
1712 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1713 emitNode(argv.last().get(), n->value());
1714 }
1715 emitOpcode(op_new_array);
1716 instructions().append(dst->index());
1717 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1718 instructions().append(argv.size()); // argc
1719 return dst;
1720}
1721
1722RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1723{
1724 return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false);
1725}
1726
1727RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1728{
1729 FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1730 if (ptr.isNewEntry)
1731 ptr.iterator->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1732 return emitNewFunctionInternal(dst, ptr.iterator->second, true);
1733}
1734
1735RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1736{
1737 createActivationIfNecessary();
1738 emitOpcode(op_new_func);
1739 instructions().append(dst->index());
1740 instructions().append(index);
1741 instructions().append(doNullCheck);
1742 return dst;
1743}
1744
1745RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1746{
1747 emitOpcode(op_new_regexp);
1748 instructions().append(dst->index());
1749 instructions().append(addRegExp(regExp));
1750 return dst;
1751}
1752
1753RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1754{
1755 FunctionBodyNode* function = n->body();
1756 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1757
1758 createActivationIfNecessary();
1759 emitOpcode(op_new_func_exp);
1760 instructions().append(r0->index());
1761 instructions().append(index);
1762 return r0;
1763}
1764
1765RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1766{
1767 return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
1768}
1769
1770void BytecodeGenerator::createArgumentsIfNecessary()
1771{
1772 if (m_codeType != FunctionCode)
1773 return;
1774
1775 if (!m_codeBlock->usesArguments())
1776 return;
1777
1778 // If we're in strict mode we tear off the arguments on function
1779 // entry, so there's no need to check if we need to create them
1780 // now
1781 if (m_codeBlock->isStrictMode())
1782 return;
1783
1784 emitOpcode(op_create_arguments);
1785 instructions().append(m_codeBlock->argumentsRegister());
1786}
1787
1788void BytecodeGenerator::createActivationIfNecessary()
1789{
1790 if (m_hasCreatedActivation)
1791 return;
1792 if (!m_codeBlock->needsFullScopeChain())
1793 return;
1794 emitOpcode(op_create_activation);
1795 instructions().append(m_activationRegister->index());
1796}
1797
1798RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1799{
1800 return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
1801}
1802
1803RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1804{
1805 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1806 ASSERT(func->refCount());
1807
1808 if (m_shouldEmitProfileHooks)
1809 emitMove(callArguments.profileHookRegister(), func);
1810
1811 // Generate code for arguments.
1812 unsigned argument = 0;
1813 for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1814 emitNode(callArguments.argumentRegister(argument++), n);
1815
1816 // Reserve space for call frame.
1817 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1818 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1819 callFrame.append(newTemporary());
1820
1821 if (m_shouldEmitProfileHooks) {
1822 emitOpcode(op_profile_will_call);
1823 instructions().append(callArguments.profileHookRegister()->index());
1824 }
1825
1826 emitExpressionInfo(divot, startOffset, endOffset);
1827
1828 // Emit call.
1829 emitOpcode(opcodeID);
1830 instructions().append(func->index()); // func
1831 instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1832 instructions().append(callArguments.registerOffset()); // registerOffset
1833#if ENABLE(LLINT)
1834 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1835#else
1836 instructions().append(0);
1837#endif
1838 instructions().append(0);
1839 if (dst != ignoredResult()) {
1840 ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1841 instructions().append(dst->index()); // dst
1842 instructions().append(profile);
1843 }
1844
1845 if (m_shouldEmitProfileHooks) {
1846 emitOpcode(op_profile_did_call);
1847 instructions().append(callArguments.profileHookRegister()->index());
1848 }
1849
1850 return dst;
1851}
1852
1853RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1854{
1855 if (m_shouldEmitProfileHooks) {
1856 emitMove(profileHookRegister, func);
1857 emitOpcode(op_profile_will_call);
1858 instructions().append(profileHookRegister->index());
1859 }
1860
1861 emitExpressionInfo(divot, startOffset, endOffset);
1862
1863 // Emit call.
1864 emitOpcode(op_call_varargs);
1865 instructions().append(func->index());
1866 instructions().append(thisRegister->index());
1867 instructions().append(arguments->index());
1868 instructions().append(firstFreeRegister->index());
1869 if (dst != ignoredResult()) {
1870 ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1871 instructions().append(dst->index());
1872 instructions().append(profile);
1873 }
1874 if (m_shouldEmitProfileHooks) {
1875 emitOpcode(op_profile_did_call);
1876 instructions().append(profileHookRegister->index());
1877 }
1878 return dst;
1879}
1880
1881RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1882{
1883 if (m_codeBlock->needsFullScopeChain()) {
1884 emitOpcode(op_tear_off_activation);
1885 instructions().append(m_activationRegister->index());
1886 instructions().append(m_codeBlock->argumentsRegister());
1887 } else if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1888 emitOpcode(op_tear_off_arguments);
1889 instructions().append(m_codeBlock->argumentsRegister());
1890 }
1891
1892 // Constructors use op_ret_object_or_this to check the result is an
1893 // object, unless we can trivially determine the check is not
1894 // necessary (currently, if the return value is 'this').
1895 if (isConstructor() && (src->index() != m_thisRegister.index())) {
1896 emitOpcode(op_ret_object_or_this);
1897 instructions().append(src->index());
1898 instructions().append(m_thisRegister.index());
1899 return src;
1900 }
1901 return emitUnaryNoDstOp(op_ret, src);
1902}
1903
1904RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1905{
1906 emitOpcode(opcodeID);
1907 instructions().append(src->index());
1908 return src;
1909}
1910
1911RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1912{
1913 ASSERT(func->refCount());
1914
1915 if (m_shouldEmitProfileHooks)
1916 emitMove(callArguments.profileHookRegister(), func);
1917
1918 // Generate code for arguments.
1919 unsigned argument = 0;
1920 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1921 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1922 emitNode(callArguments.argumentRegister(argument++), n);
1923 }
1924
1925 if (m_shouldEmitProfileHooks) {
1926 emitOpcode(op_profile_will_call);
1927 instructions().append(callArguments.profileHookRegister()->index());
1928 }
1929
1930 // Reserve space for call frame.
1931 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1932 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1933 callFrame.append(newTemporary());
1934
1935 emitExpressionInfo(divot, startOffset, endOffset);
1936
1937 emitOpcode(op_construct);
1938 instructions().append(func->index()); // func
1939 instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1940 instructions().append(callArguments.registerOffset()); // registerOffset
1941#if ENABLE(LLINT)
1942 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1943#else
1944 instructions().append(0);
1945#endif
1946 instructions().append(0);
1947 if (dst != ignoredResult()) {
1948 ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1949 instructions().append(dst->index()); // dst
1950 instructions().append(profile);
1951 }
1952
1953 if (m_shouldEmitProfileHooks) {
1954 emitOpcode(op_profile_did_call);
1955 instructions().append(callArguments.profileHookRegister()->index());
1956 }
1957
1958 return dst;
1959}
1960
1961RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1962{
1963 emitOpcode(op_strcat);
1964 instructions().append(dst->index());
1965 instructions().append(src->index());
1966 instructions().append(count);
1967
1968 return dst;
1969}
1970
1971void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1972{
1973 emitOpcode(op_to_primitive);
1974 instructions().append(dst->index());
1975 instructions().append(src->index());
1976}
1977
1978RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1979{
1980 ASSERT(scope->isTemporary());
1981 ControlFlowContext context;
1982 context.isFinallyBlock = false;
1983 m_scopeContextStack.append(context);
1984 m_dynamicScopeDepth++;
1985
1986 return emitUnaryNoDstOp(op_push_scope, scope);
1987}
1988
1989void BytecodeGenerator::emitPopScope()
1990{
1991 ASSERT(m_scopeContextStack.size());
1992 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1993
1994 emitOpcode(op_pop_scope);
1995
1996 m_scopeContextStack.removeLast();
1997 m_dynamicScopeDepth--;
1998}
1999
2000void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
2001{
2002#if ENABLE(DEBUG_WITH_BREAKPOINT)
2003 if (debugHookID != DidReachBreakpoint)
2004 return;
2005#else
2006 if (!m_shouldEmitDebugHooks)
2007 return;
2008#endif
2009 emitOpcode(op_debug);
2010 instructions().append(debugHookID);
2011 instructions().append(firstLine);
2012 instructions().append(lastLine);
2013}
2014
2015void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2016{
2017 ControlFlowContext scope;
2018 scope.isFinallyBlock = true;
2019 FinallyContext context = {
2020 finallyBlock,
2021 m_scopeContextStack.size(),
2022 m_switchContextStack.size(),
2023 m_forInContextStack.size(),
2024 m_labelScopes.size(),
2025 m_finallyDepth,
2026 m_dynamicScopeDepth
2027 };
2028 scope.finallyContext = context;
2029 m_scopeContextStack.append(scope);
2030 m_finallyDepth++;
2031}
2032
2033void BytecodeGenerator::popFinallyContext()
2034{
2035 ASSERT(m_scopeContextStack.size());
2036 ASSERT(m_scopeContextStack.last().isFinallyBlock);
2037 ASSERT(m_finallyDepth > 0);
2038 m_scopeContextStack.removeLast();
2039 m_finallyDepth--;
2040}
2041
2042LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2043{
2044 // Reclaim free label scopes.
2045 //
2046 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2047 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2048 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2049 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2050 // loop condition is a workaround.
2051 while (m_labelScopes.size()) {
2052 if (m_labelScopes.last().refCount())
2053 break;
2054 m_labelScopes.removeLast();
2055 }
2056
2057 if (!m_labelScopes.size())
2058 return 0;
2059
2060 // We special-case the following, which is a syntax error in Firefox:
2061 // label:
2062 // break;
2063 if (name.isEmpty()) {
2064 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2065 LabelScope* scope = &m_labelScopes[i];
2066 if (scope->type() != LabelScope::NamedLabel) {
2067 ASSERT(scope->breakTarget());
2068 return scope;
2069 }
2070 }
2071 return 0;
2072 }
2073
2074 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2075 LabelScope* scope = &m_labelScopes[i];
2076 if (scope->name() && *scope->name() == name) {
2077 ASSERT(scope->breakTarget());
2078 return scope;
2079 }
2080 }
2081 return 0;
2082}
2083
2084LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2085{
2086 // Reclaim free label scopes.
2087 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2088 m_labelScopes.removeLast();
2089
2090 if (!m_labelScopes.size())
2091 return 0;
2092
2093 if (name.isEmpty()) {
2094 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2095 LabelScope* scope = &m_labelScopes[i];
2096 if (scope->type() == LabelScope::Loop) {
2097 ASSERT(scope->continueTarget());
2098 return scope;
2099 }
2100 }
2101 return 0;
2102 }
2103
2104 // Continue to the loop nested nearest to the label scope that matches
2105 // 'name'.
2106 LabelScope* result = 0;
2107 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2108 LabelScope* scope = &m_labelScopes[i];
2109 if (scope->type() == LabelScope::Loop) {
2110 ASSERT(scope->continueTarget());
2111 result = scope;
2112 }
2113 if (scope->name() && *scope->name() == name)
2114 return result; // may be 0
2115 }
2116 return 0;
2117}
2118
2119PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2120{
2121 while (topScope > bottomScope) {
2122 // First we count the number of dynamic scopes we need to remove to get
2123 // to a finally block.
2124 int nNormalScopes = 0;
2125 while (topScope > bottomScope) {
2126 if (topScope->isFinallyBlock)
2127 break;
2128 ++nNormalScopes;
2129 --topScope;
2130 }
2131
2132 if (nNormalScopes) {
2133 size_t begin = instructions().size();
2134
2135 // We need to remove a number of dynamic scopes to get to the next
2136 // finally block
2137 emitOpcode(op_jmp_scopes);
2138 instructions().append(nNormalScopes);
2139
2140 // If topScope == bottomScope then there isn't actually a finally block
2141 // left to emit, so make the jmp_scopes jump directly to the target label
2142 if (topScope == bottomScope) {
2143 instructions().append(target->bind(begin, instructions().size()));
2144 return target;
2145 }
2146
2147 // Otherwise we just use jmp_scopes to pop a group of scopes and go
2148 // to the next instruction
2149 RefPtr<Label> nextInsn = newLabel();
2150 instructions().append(nextInsn->bind(begin, instructions().size()));
2151 emitLabel(nextInsn.get());
2152 }
2153
2154 Vector<ControlFlowContext> savedScopeContextStack;
2155 Vector<SwitchInfo> savedSwitchContextStack;
2156 Vector<ForInContext> savedForInContextStack;
2157 SegmentedVector<LabelScope, 8> savedLabelScopes;
2158 while (topScope > bottomScope && topScope->isFinallyBlock) {
2159 // Save the current state of the world while instating the state of the world
2160 // for the finally block.
2161 FinallyContext finallyContext = topScope->finallyContext;
2162 bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2163 bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2164 bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2165 bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2166 int topScopeIndex = -1;
2167 int bottomScopeIndex = -1;
2168 if (flipScopes) {
2169 topScopeIndex = topScope - m_scopeContextStack.begin();
2170 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2171 savedScopeContextStack = m_scopeContextStack;
2172 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2173 }
2174 if (flipSwitches) {
2175 savedSwitchContextStack = m_switchContextStack;
2176 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2177 }
2178 if (flipForIns) {
2179 savedForInContextStack = m_forInContextStack;
2180 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2181 }
2182 if (flipLabelScopes) {
2183 savedLabelScopes = m_labelScopes;
2184 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2185 m_labelScopes.removeLast();
2186 }
2187 int savedFinallyDepth = m_finallyDepth;
2188 m_finallyDepth = finallyContext.finallyDepth;
2189 int savedDynamicScopeDepth = m_dynamicScopeDepth;
2190 m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2191
2192 // Emit the finally block.
2193 emitNode(finallyContext.finallyBlock);
2194
2195 // Restore the state of the world.
2196 if (flipScopes) {
2197 m_scopeContextStack = savedScopeContextStack;
2198 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2199 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2200 }
2201 if (flipSwitches)
2202 m_switchContextStack = savedSwitchContextStack;
2203 if (flipForIns)
2204 m_forInContextStack = savedForInContextStack;
2205 if (flipLabelScopes)
2206 m_labelScopes = savedLabelScopes;
2207 m_finallyDepth = savedFinallyDepth;
2208 m_dynamicScopeDepth = savedDynamicScopeDepth;
2209
2210 --topScope;
2211 }
2212 }
2213 return emitJump(target);
2214}
2215
2216PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2217{
2218 ASSERT(scopeDepth() - targetScopeDepth >= 0);
2219 ASSERT(target->isForward());
2220
2221 size_t scopeDelta = scopeDepth() - targetScopeDepth;
2222 ASSERT(scopeDelta <= m_scopeContextStack.size());
2223 if (!scopeDelta)
2224 return emitJump(target);
2225
2226 if (m_finallyDepth)
2227 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2228
2229 size_t begin = instructions().size();
2230
2231 emitOpcode(op_jmp_scopes);
2232 instructions().append(scopeDelta);
2233 instructions().append(target->bind(begin, instructions().size()));
2234 return target;
2235}
2236
2237RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2238{
2239 size_t begin = instructions().size();
2240
2241 emitOpcode(op_get_pnames);
2242 instructions().append(dst->index());
2243 instructions().append(base->index());
2244 instructions().append(i->index());
2245 instructions().append(size->index());
2246 instructions().append(breakTarget->bind(begin, instructions().size()));
2247 return dst;
2248}
2249
2250RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2251{
2252 size_t begin = instructions().size();
2253
2254 emitOpcode(op_next_pname);
2255 instructions().append(dst->index());
2256 instructions().append(base->index());
2257 instructions().append(i->index());
2258 instructions().append(size->index());
2259 instructions().append(iter->index());
2260 instructions().append(target->bind(begin, instructions().size()));
2261 return dst;
2262}
2263
2264RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
2265{
2266 m_usesExceptions = true;
2267#if ENABLE(JIT)
2268#if ENABLE(LLINT)
2269 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast<void*>(&llint_op_catch))) };
2270#else
2271 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
2272#endif
2273#else
2274 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
2275#endif
2276
2277 m_codeBlock->addExceptionHandler(info);
2278 emitOpcode(op_catch);
2279 instructions().append(targetRegister->index());
2280 return targetRegister;
2281}
2282
2283void BytecodeGenerator::emitThrowReferenceError(const UString& message)
2284{
2285 emitOpcode(op_throw_reference_error);
2286 instructions().append(addConstantValue(jsString(globalData(), message))->index());
2287}
2288
2289void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
2290{
2291 ControlFlowContext context;
2292 context.isFinallyBlock = false;
2293 m_scopeContextStack.append(context);
2294 m_dynamicScopeDepth++;
2295
2296 emitOpcode(op_push_new_scope);
2297 instructions().append(dst->index());
2298 instructions().append(addConstant(property));
2299 instructions().append(value->index());
2300}
2301
2302void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2303{
2304 SwitchInfo info = { instructions().size(), type };
2305 switch (type) {
2306 case SwitchInfo::SwitchImmediate:
2307 emitOpcode(op_switch_imm);
2308 break;
2309 case SwitchInfo::SwitchCharacter:
2310 emitOpcode(op_switch_char);
2311 break;
2312 case SwitchInfo::SwitchString:
2313 emitOpcode(op_switch_string);
2314 break;
2315 default:
2316 ASSERT_NOT_REACHED();
2317 }
2318
2319 instructions().append(0); // place holder for table index
2320 instructions().append(0); // place holder for default target
2321 instructions().append(scrutineeRegister->index());
2322 m_switchContextStack.append(info);
2323}
2324
2325static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2326{
2327 UNUSED_PARAM(max);
2328 ASSERT(node->isNumber());
2329 double value = static_cast<NumberNode*>(node)->value();
2330 int32_t key = static_cast<int32_t>(value);
2331 ASSERT(key == value);
2332 ASSERT(key >= min);
2333 ASSERT(key <= max);
2334 return key - min;
2335}
2336
2337static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2338{
2339 jumpTable.min = min;
2340 jumpTable.branchOffsets.resize(max - min + 1);
2341 jumpTable.branchOffsets.fill(0);
2342 for (uint32_t i = 0; i < clauseCount; ++i) {
2343 // We're emitting this after the clause labels should have been fixed, so
2344 // the labels should not be "forward" references
2345 ASSERT(!labels[i]->isForward());
2346 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2347 }
2348}
2349
2350static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2351{
2352 UNUSED_PARAM(max);
2353 ASSERT(node->isString());
2354 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2355 ASSERT(clause->length() == 1);
2356
2357 int32_t key = (*clause)[0];
2358 ASSERT(key >= min);
2359 ASSERT(key <= max);
2360 return key - min;
2361}
2362
2363static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2364{
2365 jumpTable.min = min;
2366 jumpTable.branchOffsets.resize(max - min + 1);
2367 jumpTable.branchOffsets.fill(0);
2368 for (uint32_t i = 0; i < clauseCount; ++i) {
2369 // We're emitting this after the clause labels should have been fixed, so
2370 // the labels should not be "forward" references
2371 ASSERT(!labels[i]->isForward());
2372 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2373 }
2374}
2375
2376static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2377{
2378 for (uint32_t i = 0; i < clauseCount; ++i) {
2379 // We're emitting this after the clause labels should have been fixed, so
2380 // the labels should not be "forward" references
2381 ASSERT(!labels[i]->isForward());
2382
2383 ASSERT(nodes[i]->isString());
2384 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2385 OffsetLocation location;
2386 location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2387 jumpTable.offsetTable.add(clause, location);
2388 }
2389}
2390
2391void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2392{
2393 SwitchInfo switchInfo = m_switchContextStack.last();
2394 m_switchContextStack.removeLast();
2395 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2396 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2397 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2398
2399 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2400 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2401 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2402 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2403 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2404
2405 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2406 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2407 } else {
2408 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2409 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2410 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2411
2412 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2413 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2414 }
2415}
2416
2417RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2418{
2419 // It would be nice to do an even better job of identifying exactly where the expression is.
2420 // And we could make the caller pass the node pointer in, if there was some way of getting
2421 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2422 // is still good enough to get us an accurate line number.
2423 m_expressionTooDeep = true;
2424 return newTemporary();
2425}
2426
2427void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2428{
2429 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2430}
2431
2432bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2433{
2434 RegisterID* registerID = registerFor(ident);
2435 if (!registerID || registerID->index() >= 0)
2436 return 0;
2437 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2438}
2439
2440} // namespace JSC