]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecompiler/BytecodeGenerator.cpp
JavaScriptCore-1218.33.tar.gz
[apple/javascriptcore.git] / bytecompiler / BytecodeGenerator.cpp
1 /*
2 * Copyright (C) 2008, 2009, 2012, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "Interpreter.h"
36 #include "JSActivation.h"
37 #include "JSFunction.h"
38 #include "JSNameScope.h"
39 #include "LowLevelInterpreter.h"
40 #include "Operations.h"
41 #include "Options.h"
42 #include "StrongInlines.h"
43 #include "UnlinkedCodeBlock.h"
44 #include <wtf/text/WTFString.h>
45
46 using namespace std;
47
48 namespace JSC {
49
50 void Label::setLocation(unsigned location)
51 {
52 m_location = location;
53
54 unsigned size = m_unresolvedJumps.size();
55 for (unsigned i = 0; i < size; ++i)
56 m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
57 }
58
59 #ifndef NDEBUG
60 void ResolveResult::checkValidity()
61 {
62 switch (m_type) {
63 case Register:
64 case ReadOnlyRegister:
65 ASSERT(m_local);
66 return;
67 case Dynamic:
68 ASSERT(!m_local);
69 return;
70 case Lexical:
71 case ReadOnlyLexical:
72 ASSERT(!m_local);
73 return;
74 default:
75 RELEASE_ASSERT_NOT_REACHED();
76 }
77 }
78 #endif
79
80 ParserError BytecodeGenerator::generate()
81 {
82 SamplingRegion samplingRegion("Bytecode Generation");
83
84 m_codeBlock->setThisRegister(m_thisRegister.index());
85
86 m_scopeNode->emitBytecode(*this);
87
88 m_staticPropertyAnalyzer.kill();
89
90 for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
91 TryRange& range = m_tryRanges[i];
92 int start = range.start->bind();
93 int end = range.end->bind();
94
95 // This will happen for empty try blocks and for some cases of finally blocks:
96 //
97 // try {
98 // try {
99 // } finally {
100 // return 42;
101 // // *HERE*
102 // }
103 // } finally {
104 // print("things");
105 // }
106 //
107 // The return will pop scopes to execute the outer finally block. But this includes
108 // popping the try context for the inner try. The try context is live in the fall-through
109 // part of the finally block not because we will emit a handler that overlaps the finally,
110 // but because we haven't yet had a chance to plant the catch target. Then when we finish
111 // emitting code for the outer finally block, we repush the try contex, this time with a
112 // new start index. But that means that the start index for the try range corresponding
113 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
114 // than the end index of the try block. This is harmless since end < start handlers will
115 // never get matched in our logic, but we do the runtime a favor and choose to not emit
116 // such handlers at all.
117 if (end <= start)
118 continue;
119
120 ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
121 UnlinkedHandlerInfo info = {
122 static_cast<uint32_t>(start), static_cast<uint32_t>(end),
123 static_cast<uint32_t>(range.tryData->target->bind()),
124 range.tryData->targetScopeDepth
125 };
126 m_codeBlock->addExceptionHandler(info);
127 }
128
129 m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
130
131 m_codeBlock->shrinkToFit();
132
133 if (m_expressionTooDeep)
134 return ParserError(ParserError::OutOfMemory);
135 return ParserError(ParserError::ErrorNone);
136 }
137
138 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
139 {
140 int index = m_calleeRegisters.size();
141 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
142 SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
143
144 if (!result.isNewEntry) {
145 r0 = &registerFor(result.iterator->value.getIndex());
146 return false;
147 }
148
149 r0 = addVar();
150 return true;
151 }
152
153 void BytecodeGenerator::preserveLastVar()
154 {
155 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
156 m_lastVar = &m_calleeRegisters.last();
157 }
158
159 BytecodeGenerator::BytecodeGenerator(VM& vm, JSScope*, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
160 : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
161 , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
162 , m_symbolTable(0)
163 , m_scopeNode(programNode)
164 , m_codeBlock(vm, codeBlock)
165 , m_thisRegister(CallFrame::thisArgumentOffset())
166 , m_emptyValueRegister(0)
167 , m_globalObjectRegister(0)
168 , m_finallyDepth(0)
169 , m_dynamicScopeDepth(0)
170 , m_codeType(GlobalCode)
171 , m_nextConstantOffset(0)
172 , m_globalConstantIndex(0)
173 , m_hasCreatedActivation(true)
174 , m_firstLazyFunction(0)
175 , m_lastLazyFunction(0)
176 , m_staticPropertyAnalyzer(&m_instructions)
177 , m_vm(&vm)
178 , m_lastOpcodeID(op_end)
179 #ifndef NDEBUG
180 , m_lastOpcodePosition(0)
181 #endif
182 , m_stack(wtfThreadData().stack())
183 , m_usesExceptions(false)
184 , m_expressionTooDeep(false)
185 {
186 if (m_shouldEmitDebugHooks)
187 m_codeBlock->setNeedsFullScopeChain(true);
188
189 m_codeBlock->setNumParameters(1); // Allocate space for "this"
190
191 emitOpcode(op_enter);
192
193 const VarStack& varStack = programNode->varStack();
194 const FunctionStack& functionStack = programNode->functionStack();
195
196 for (size_t i = 0; i < functionStack.size(); ++i) {
197 FunctionBodyNode* function = functionStack[i];
198 UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
199 codeBlock->addFunctionDeclaration(*m_vm, function->ident(), unlinkedFunction);
200 }
201
202 for (size_t i = 0; i < varStack.size(); ++i)
203 codeBlock->addVariableDeclaration(*varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
204
205 }
206
207 BytecodeGenerator::BytecodeGenerator(VM& vm, JSScope* scope, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
208 : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
209 , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
210 , m_symbolTable(codeBlock->symbolTable())
211 , m_scopeNode(functionBody)
212 , m_scope(vm, scope)
213 , m_codeBlock(vm, codeBlock)
214 , m_activationRegister(0)
215 , m_emptyValueRegister(0)
216 , m_globalObjectRegister(0)
217 , m_finallyDepth(0)
218 , m_dynamicScopeDepth(0)
219 , m_codeType(FunctionCode)
220 , m_nextConstantOffset(0)
221 , m_globalConstantIndex(0)
222 , m_hasCreatedActivation(false)
223 , m_firstLazyFunction(0)
224 , m_lastLazyFunction(0)
225 , m_staticPropertyAnalyzer(&m_instructions)
226 , m_vm(&vm)
227 , m_lastOpcodeID(op_end)
228 #ifndef NDEBUG
229 , m_lastOpcodePosition(0)
230 #endif
231 , m_stack(wtfThreadData().stack())
232 , m_usesExceptions(false)
233 , m_expressionTooDeep(false)
234 {
235 if (m_shouldEmitDebugHooks)
236 m_codeBlock->setNeedsFullScopeChain(true);
237
238 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
239 m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
240
241 emitOpcode(op_enter);
242 if (m_codeBlock->needsFullScopeChain()) {
243 m_activationRegister = addVar();
244 emitInitLazyRegister(m_activationRegister);
245 m_codeBlock->setActivationRegister(m_activationRegister->index());
246 }
247
248 m_symbolTable->setCaptureStart(m_codeBlock->m_numVars);
249
250 if (functionBody->usesArguments() || codeBlock->usesEval() || m_shouldEmitDebugHooks) { // May reify arguments object.
251 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
252 RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
253
254 // We can save a little space by hard-coding the knowledge that the two
255 // 'arguments' values are stored in consecutive registers, and storing
256 // only the index of the assignable one.
257 codeBlock->setArgumentsRegister(argumentsRegister->index());
258 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
259
260 emitInitLazyRegister(argumentsRegister);
261 emitInitLazyRegister(unmodifiedArgumentsRegister);
262
263 if (m_codeBlock->isStrictMode()) {
264 emitOpcode(op_create_arguments);
265 instructions().append(argumentsRegister->index());
266 }
267
268 // The debugger currently retrieves the arguments object from an activation rather than pulling
269 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
270 // but for now we force eager creation of the arguments object when debugging.
271 if (m_shouldEmitDebugHooks) {
272 emitOpcode(op_create_arguments);
273 instructions().append(argumentsRegister->index());
274 }
275 }
276
277 bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
278
279 bool capturesAnyArgumentByName = false;
280 Vector<RegisterID*, 0, UnsafeVectorOverflow> capturedArguments;
281 if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
282 FunctionParameters& parameters = *functionBody->parameters();
283 capturedArguments.resize(parameters.size());
284 for (size_t i = 0; i < parameters.size(); ++i) {
285 capturedArguments[i] = 0;
286 if (!functionBody->captures(parameters.at(i)) && !shouldCaptureAllTheThings)
287 continue;
288 capturesAnyArgumentByName = true;
289 capturedArguments[i] = addVar();
290 }
291 }
292
293 if (capturesAnyArgumentByName && !codeBlock->isStrictMode()) {
294 size_t parameterCount = m_symbolTable->parameterCount();
295 OwnArrayPtr<SlowArgument> slowArguments = adoptArrayPtr(new SlowArgument[parameterCount]);
296 for (size_t i = 0; i < parameterCount; ++i) {
297 if (!capturedArguments[i]) {
298 ASSERT(slowArguments[i].status == SlowArgument::Normal);
299 slowArguments[i].index = CallFrame::argumentOffset(i);
300 continue;
301 }
302 slowArguments[i].status = SlowArgument::Captured;
303 slowArguments[i].index = capturedArguments[i]->index();
304 }
305 m_symbolTable->setSlowArguments(slowArguments.release());
306 }
307
308 RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
309
310 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
311 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
312
313 // Captured variables and functions go first so that activations don't have
314 // to step over the non-captured locals to mark them.
315 m_hasCreatedActivation = false;
316 if (functionBody->hasCapturedVariables()) {
317 for (size_t i = 0; i < functionStack.size(); ++i) {
318 FunctionBodyNode* function = functionStack[i];
319 const Identifier& ident = function->ident();
320 if (functionBody->captures(ident)) {
321 if (!m_hasCreatedActivation) {
322 m_hasCreatedActivation = true;
323 emitOpcode(op_create_activation);
324 instructions().append(m_activationRegister->index());
325 }
326 m_functions.add(ident.impl());
327 emitNewFunction(addVar(ident, false), function);
328 }
329 }
330 for (size_t i = 0; i < varStack.size(); ++i) {
331 const Identifier& ident = *varStack[i].first;
332 if (functionBody->captures(ident))
333 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
334 }
335 }
336 bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
337 if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
338 m_hasCreatedActivation = true;
339 emitOpcode(op_create_activation);
340 instructions().append(m_activationRegister->index());
341 }
342
343 m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
344
345 m_firstLazyFunction = codeBlock->m_numVars;
346 for (size_t i = 0; i < functionStack.size(); ++i) {
347 FunctionBodyNode* function = functionStack[i];
348 const Identifier& ident = function->ident();
349 if (!functionBody->captures(ident)) {
350 m_functions.add(ident.impl());
351 RefPtr<RegisterID> reg = addVar(ident, false);
352 // Don't lazily create functions that override the name 'arguments'
353 // as this would complicate lazy instantiation of actual arguments.
354 if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
355 emitNewFunction(reg.get(), function);
356 else {
357 emitInitLazyRegister(reg.get());
358 m_lazyFunctions.set(reg->index(), function);
359 }
360 }
361 }
362 m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
363 for (size_t i = 0; i < varStack.size(); ++i) {
364 const Identifier& ident = *varStack[i].first;
365 if (!functionBody->captures(ident))
366 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
367 }
368
369 if (shouldCaptureAllTheThings)
370 m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
371
372 FunctionParameters& parameters = *functionBody->parameters();
373 m_parameters.grow(parameters.size() + 1); // reserve space for "this"
374
375 // Add "this" as a parameter
376 int nextParameterIndex = CallFrame::thisArgumentOffset();
377 m_thisRegister.setIndex(nextParameterIndex--);
378 m_codeBlock->addParameter();
379
380 for (size_t i = 0; i < parameters.size(); ++i, --nextParameterIndex) {
381 int index = nextParameterIndex;
382 if (capturedArguments.size() && capturedArguments[i]) {
383 ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(parameters.at(i))) || shouldCaptureAllTheThings);
384 index = capturedArguments[i]->index();
385 RegisterID original(nextParameterIndex);
386 emitMove(capturedArguments[i], &original);
387 }
388 addParameter(parameters.at(i), index);
389 }
390 preserveLastVar();
391
392 // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
393 addCallee(functionBody, calleeRegister);
394
395 if (isConstructor()) {
396 emitCreateThis(&m_thisRegister);
397 } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
398 UnlinkedValueProfile profile = emitProfiledOpcode(op_convert_this);
399 instructions().append(kill(&m_thisRegister));
400 instructions().append(profile);
401 }
402 }
403
404 BytecodeGenerator::BytecodeGenerator(VM& vm, JSScope* scope, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
405 : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
406 , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
407 , m_symbolTable(codeBlock->symbolTable())
408 , m_scopeNode(evalNode)
409 , m_scope(vm, scope)
410 , m_codeBlock(vm, codeBlock)
411 , m_thisRegister(CallFrame::thisArgumentOffset())
412 , m_emptyValueRegister(0)
413 , m_globalObjectRegister(0)
414 , m_finallyDepth(0)
415 , m_dynamicScopeDepth(0)
416 , m_codeType(EvalCode)
417 , m_nextConstantOffset(0)
418 , m_globalConstantIndex(0)
419 , m_hasCreatedActivation(true)
420 , m_firstLazyFunction(0)
421 , m_lastLazyFunction(0)
422 , m_staticPropertyAnalyzer(&m_instructions)
423 , m_vm(&vm)
424 , m_lastOpcodeID(op_end)
425 #ifndef NDEBUG
426 , m_lastOpcodePosition(0)
427 #endif
428 , m_stack(wtfThreadData().stack())
429 , m_usesExceptions(false)
430 , m_expressionTooDeep(false)
431 {
432 m_codeBlock->setNeedsFullScopeChain(true);
433
434 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
435 m_codeBlock->setNumParameters(1);
436
437 emitOpcode(op_enter);
438
439 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
440 for (size_t i = 0; i < functionStack.size(); ++i)
441 m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
442
443 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
444 unsigned numVariables = varStack.size();
445 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
446 variables.reserveCapacity(numVariables);
447 for (size_t i = 0; i < numVariables; ++i)
448 variables.append(*varStack[i].first);
449 codeBlock->adoptVariables(variables);
450 preserveLastVar();
451 }
452
453 BytecodeGenerator::~BytecodeGenerator()
454 {
455 }
456
457 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
458 {
459 emitOpcode(op_init_lazy_reg);
460 instructions().append(reg->index());
461 return reg;
462 }
463
464 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
465 {
466 if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
467 return 0;
468
469 m_calleeRegister.setIndex(JSStack::Callee);
470
471 // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
472 if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks) {
473 emitOpcode(op_push_name_scope);
474 instructions().append(addConstant(functionBodyNode->ident()));
475 instructions().append(m_calleeRegister.index());
476 instructions().append(ReadOnly | DontDelete);
477 return 0;
478 }
479
480 if (!functionBodyNode->captures(functionBodyNode->ident()))
481 return &m_calleeRegister;
482
483 // Move the callee into the captured section of the stack.
484 return emitMove(addVar(), &m_calleeRegister);
485 }
486
487 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
488 {
489 if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
490 return;
491
492 // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
493 if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
494 return;
495
496 ASSERT(calleeRegister);
497 symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
498 }
499
500 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
501 {
502 // Parameters overwrite var declarations, but not function declarations.
503 StringImpl* rep = ident.impl();
504 if (!m_functions.contains(rep)) {
505 symbolTable().set(rep, parameterIndex);
506 RegisterID& parameter = registerFor(parameterIndex);
507 parameter.setIndex(parameterIndex);
508 }
509
510 // To maintain the calling convention, we have to allocate unique space for
511 // each parameter, even if the parameter doesn't make it into the symbol table.
512 m_codeBlock->addParameter();
513 }
514
515 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
516 {
517 if (ident != propertyNames().arguments)
518 return false;
519
520 if (!shouldOptimizeLocals())
521 return false;
522
523 SymbolTableEntry entry = symbolTable().get(ident.impl());
524 if (entry.isNull())
525 return false;
526
527 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
528 return true;
529
530 return false;
531 }
532
533 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
534 {
535 ASSERT(willResolveToArguments(propertyNames().arguments));
536
537 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
538 ASSERT(!entry.isNull());
539 return &registerFor(entry.getIndex());
540 }
541
542 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
543 {
544 if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
545 return reg;
546 emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
547 return reg;
548 }
549
550 RegisterID* BytecodeGenerator::newRegister()
551 {
552 m_calleeRegisters.append(m_calleeRegisters.size());
553 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
554 return &m_calleeRegisters.last();
555 }
556
557 RegisterID* BytecodeGenerator::newTemporary()
558 {
559 // Reclaim free register IDs.
560 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
561 m_calleeRegisters.removeLast();
562
563 RegisterID* result = newRegister();
564 result->setTemporary();
565 return result;
566 }
567
568 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
569 {
570 // Reclaim free label scopes.
571 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
572 m_labelScopes.removeLast();
573
574 // Allocate new label scope.
575 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
576 m_labelScopes.append(scope);
577 return LabelScopePtr(&m_labelScopes, m_labelScopes.size() - 1);
578 }
579
580 PassRefPtr<Label> BytecodeGenerator::newLabel()
581 {
582 // Reclaim free label IDs.
583 while (m_labels.size() && !m_labels.last().refCount())
584 m_labels.removeLast();
585
586 // Allocate new label ID.
587 m_labels.append(this);
588 return &m_labels.last();
589 }
590
591 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
592 {
593 unsigned newLabelIndex = instructions().size();
594 l0->setLocation(newLabelIndex);
595
596 if (m_codeBlock->numberOfJumpTargets()) {
597 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
598 ASSERT(lastLabelIndex <= newLabelIndex);
599 if (newLabelIndex == lastLabelIndex) {
600 // Peephole optimizations have already been disabled by emitting the last label
601 return l0;
602 }
603 }
604
605 m_codeBlock->addJumpTarget(newLabelIndex);
606
607 // This disables peephole optimizations when an instruction is a jump target
608 m_lastOpcodeID = op_end;
609 return l0;
610 }
611
612 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
613 {
614 #ifndef NDEBUG
615 size_t opcodePosition = instructions().size();
616 ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
617 m_lastOpcodePosition = opcodePosition;
618 #endif
619 instructions().append(opcodeID);
620 m_lastOpcodeID = opcodeID;
621 }
622
623 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
624 {
625 #if ENABLE(VALUE_PROFILER)
626 return m_codeBlock->addArrayProfile();
627 #else
628 return 0;
629 #endif
630 }
631
632 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
633 {
634 #if ENABLE(VALUE_PROFILER)
635 return m_codeBlock->addArrayAllocationProfile();
636 #else
637 return 0;
638 #endif
639 }
640
641 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
642 {
643 return m_codeBlock->addObjectAllocationProfile();
644 }
645
646 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
647 {
648 #if ENABLE(VALUE_PROFILER)
649 UnlinkedValueProfile result = m_codeBlock->addValueProfile();
650 #else
651 UnlinkedValueProfile result = 0;
652 #endif
653 emitOpcode(opcodeID);
654 return result;
655 }
656
657 void BytecodeGenerator::emitLoopHint()
658 {
659 emitOpcode(op_loop_hint);
660 }
661
662 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
663 {
664 ASSERT(instructions().size() >= 4);
665 size_t size = instructions().size();
666 dstIndex = instructions().at(size - 3).u.operand;
667 src1Index = instructions().at(size - 2).u.operand;
668 src2Index = instructions().at(size - 1).u.operand;
669 }
670
671 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
672 {
673 ASSERT(instructions().size() >= 3);
674 size_t size = instructions().size();
675 dstIndex = instructions().at(size - 2).u.operand;
676 srcIndex = instructions().at(size - 1).u.operand;
677 }
678
679 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
680 {
681 ASSERT(instructions().size() >= 4);
682 instructions().shrink(instructions().size() - 4);
683 m_lastOpcodeID = op_end;
684 }
685
686 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
687 {
688 ASSERT(instructions().size() >= 3);
689 instructions().shrink(instructions().size() - 3);
690 m_lastOpcodeID = op_end;
691 }
692
693 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
694 {
695 size_t begin = instructions().size();
696 emitOpcode(op_jmp);
697 instructions().append(target->bind(begin, instructions().size()));
698 return target;
699 }
700
701 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
702 {
703 if (m_lastOpcodeID == op_less) {
704 int dstIndex;
705 int src1Index;
706 int src2Index;
707
708 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
709
710 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
711 rewindBinaryOp();
712
713 size_t begin = instructions().size();
714 emitOpcode(op_jless);
715 instructions().append(src1Index);
716 instructions().append(src2Index);
717 instructions().append(target->bind(begin, instructions().size()));
718 return target;
719 }
720 } else if (m_lastOpcodeID == op_lesseq) {
721 int dstIndex;
722 int src1Index;
723 int src2Index;
724
725 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
726
727 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
728 rewindBinaryOp();
729
730 size_t begin = instructions().size();
731 emitOpcode(op_jlesseq);
732 instructions().append(src1Index);
733 instructions().append(src2Index);
734 instructions().append(target->bind(begin, instructions().size()));
735 return target;
736 }
737 } else if (m_lastOpcodeID == op_greater) {
738 int dstIndex;
739 int src1Index;
740 int src2Index;
741
742 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
743
744 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
745 rewindBinaryOp();
746
747 size_t begin = instructions().size();
748 emitOpcode(op_jgreater);
749 instructions().append(src1Index);
750 instructions().append(src2Index);
751 instructions().append(target->bind(begin, instructions().size()));
752 return target;
753 }
754 } else if (m_lastOpcodeID == op_greatereq) {
755 int dstIndex;
756 int src1Index;
757 int src2Index;
758
759 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
760
761 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
762 rewindBinaryOp();
763
764 size_t begin = instructions().size();
765 emitOpcode(op_jgreatereq);
766 instructions().append(src1Index);
767 instructions().append(src2Index);
768 instructions().append(target->bind(begin, instructions().size()));
769 return target;
770 }
771 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
772 int dstIndex;
773 int srcIndex;
774
775 retrieveLastUnaryOp(dstIndex, srcIndex);
776
777 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
778 rewindUnaryOp();
779
780 size_t begin = instructions().size();
781 emitOpcode(op_jeq_null);
782 instructions().append(srcIndex);
783 instructions().append(target->bind(begin, instructions().size()));
784 return target;
785 }
786 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
787 int dstIndex;
788 int srcIndex;
789
790 retrieveLastUnaryOp(dstIndex, srcIndex);
791
792 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
793 rewindUnaryOp();
794
795 size_t begin = instructions().size();
796 emitOpcode(op_jneq_null);
797 instructions().append(srcIndex);
798 instructions().append(target->bind(begin, instructions().size()));
799 return target;
800 }
801 }
802
803 size_t begin = instructions().size();
804
805 emitOpcode(op_jtrue);
806 instructions().append(cond->index());
807 instructions().append(target->bind(begin, instructions().size()));
808 return target;
809 }
810
811 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
812 {
813 if (m_lastOpcodeID == op_less && target->isForward()) {
814 int dstIndex;
815 int src1Index;
816 int src2Index;
817
818 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
819
820 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
821 rewindBinaryOp();
822
823 size_t begin = instructions().size();
824 emitOpcode(op_jnless);
825 instructions().append(src1Index);
826 instructions().append(src2Index);
827 instructions().append(target->bind(begin, instructions().size()));
828 return target;
829 }
830 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
831 int dstIndex;
832 int src1Index;
833 int src2Index;
834
835 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
836
837 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
838 rewindBinaryOp();
839
840 size_t begin = instructions().size();
841 emitOpcode(op_jnlesseq);
842 instructions().append(src1Index);
843 instructions().append(src2Index);
844 instructions().append(target->bind(begin, instructions().size()));
845 return target;
846 }
847 } else if (m_lastOpcodeID == op_greater && target->isForward()) {
848 int dstIndex;
849 int src1Index;
850 int src2Index;
851
852 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
853
854 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
855 rewindBinaryOp();
856
857 size_t begin = instructions().size();
858 emitOpcode(op_jngreater);
859 instructions().append(src1Index);
860 instructions().append(src2Index);
861 instructions().append(target->bind(begin, instructions().size()));
862 return target;
863 }
864 } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
865 int dstIndex;
866 int src1Index;
867 int src2Index;
868
869 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
870
871 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
872 rewindBinaryOp();
873
874 size_t begin = instructions().size();
875 emitOpcode(op_jngreatereq);
876 instructions().append(src1Index);
877 instructions().append(src2Index);
878 instructions().append(target->bind(begin, instructions().size()));
879 return target;
880 }
881 } else if (m_lastOpcodeID == op_not) {
882 int dstIndex;
883 int srcIndex;
884
885 retrieveLastUnaryOp(dstIndex, srcIndex);
886
887 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
888 rewindUnaryOp();
889
890 size_t begin = instructions().size();
891 emitOpcode(op_jtrue);
892 instructions().append(srcIndex);
893 instructions().append(target->bind(begin, instructions().size()));
894 return target;
895 }
896 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
897 int dstIndex;
898 int srcIndex;
899
900 retrieveLastUnaryOp(dstIndex, srcIndex);
901
902 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
903 rewindUnaryOp();
904
905 size_t begin = instructions().size();
906 emitOpcode(op_jneq_null);
907 instructions().append(srcIndex);
908 instructions().append(target->bind(begin, instructions().size()));
909 return target;
910 }
911 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
912 int dstIndex;
913 int srcIndex;
914
915 retrieveLastUnaryOp(dstIndex, srcIndex);
916
917 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
918 rewindUnaryOp();
919
920 size_t begin = instructions().size();
921 emitOpcode(op_jeq_null);
922 instructions().append(srcIndex);
923 instructions().append(target->bind(begin, instructions().size()));
924 return target;
925 }
926 }
927
928 size_t begin = instructions().size();
929 emitOpcode(op_jfalse);
930 instructions().append(cond->index());
931 instructions().append(target->bind(begin, instructions().size()));
932 return target;
933 }
934
935 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
936 {
937 size_t begin = instructions().size();
938
939 emitOpcode(op_jneq_ptr);
940 instructions().append(cond->index());
941 instructions().append(Special::CallFunction);
942 instructions().append(target->bind(begin, instructions().size()));
943 return target;
944 }
945
946 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
947 {
948 size_t begin = instructions().size();
949
950 emitOpcode(op_jneq_ptr);
951 instructions().append(cond->index());
952 instructions().append(Special::ApplyFunction);
953 instructions().append(target->bind(begin, instructions().size()));
954 return target;
955 }
956
957 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
958 {
959 StringImpl* rep = ident.impl();
960 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
961 if (result.isNewEntry)
962 m_codeBlock->addIdentifier(Identifier(m_vm, rep));
963
964 return result.iterator->value;
965 }
966
967 // We can't hash JSValue(), so we use a dedicated data member to cache it.
968 RegisterID* BytecodeGenerator::addConstantEmptyValue()
969 {
970 if (!m_emptyValueRegister) {
971 int index = m_nextConstantOffset;
972 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
973 ++m_nextConstantOffset;
974 m_codeBlock->addConstant(JSValue());
975 m_emptyValueRegister = &m_constantPoolRegisters[index];
976 }
977
978 return m_emptyValueRegister;
979 }
980
981 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
982 {
983 if (!v)
984 return addConstantEmptyValue();
985
986 int index = m_nextConstantOffset;
987 JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
988 if (result.isNewEntry) {
989 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
990 ++m_nextConstantOffset;
991 m_codeBlock->addConstant(v);
992 } else
993 index = result.iterator->value;
994 return &m_constantPoolRegisters[index];
995 }
996
997 unsigned BytecodeGenerator::addRegExp(RegExp* r)
998 {
999 return m_codeBlock->addRegExp(r);
1000 }
1001
1002 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1003 {
1004 m_staticPropertyAnalyzer.mov(dst->index(), src->index());
1005
1006 emitOpcode(op_mov);
1007 instructions().append(dst->index());
1008 instructions().append(src->index());
1009 return dst;
1010 }
1011
1012 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1013 {
1014 emitOpcode(opcodeID);
1015 instructions().append(dst->index());
1016 instructions().append(src->index());
1017 return dst;
1018 }
1019
1020 RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1021 {
1022 emitOpcode(op_inc);
1023 instructions().append(srcDst->index());
1024 return srcDst;
1025 }
1026
1027 RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1028 {
1029 emitOpcode(op_dec);
1030 instructions().append(srcDst->index());
1031 return srcDst;
1032 }
1033
1034 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1035 {
1036 emitOpcode(opcodeID);
1037 instructions().append(dst->index());
1038 instructions().append(src1->index());
1039 instructions().append(src2->index());
1040
1041 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1042 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1043 instructions().append(types.toInt());
1044
1045 return dst;
1046 }
1047
1048 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1049 {
1050 if (m_lastOpcodeID == op_typeof) {
1051 int dstIndex;
1052 int srcIndex;
1053
1054 retrieveLastUnaryOp(dstIndex, srcIndex);
1055
1056 if (src1->index() == dstIndex
1057 && src1->isTemporary()
1058 && m_codeBlock->isConstantRegisterIndex(src2->index())
1059 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1060 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1061 if (value == "undefined") {
1062 rewindUnaryOp();
1063 emitOpcode(op_is_undefined);
1064 instructions().append(dst->index());
1065 instructions().append(srcIndex);
1066 return dst;
1067 }
1068 if (value == "boolean") {
1069 rewindUnaryOp();
1070 emitOpcode(op_is_boolean);
1071 instructions().append(dst->index());
1072 instructions().append(srcIndex);
1073 return dst;
1074 }
1075 if (value == "number") {
1076 rewindUnaryOp();
1077 emitOpcode(op_is_number);
1078 instructions().append(dst->index());
1079 instructions().append(srcIndex);
1080 return dst;
1081 }
1082 if (value == "string") {
1083 rewindUnaryOp();
1084 emitOpcode(op_is_string);
1085 instructions().append(dst->index());
1086 instructions().append(srcIndex);
1087 return dst;
1088 }
1089 if (value == "object") {
1090 rewindUnaryOp();
1091 emitOpcode(op_is_object);
1092 instructions().append(dst->index());
1093 instructions().append(srcIndex);
1094 return dst;
1095 }
1096 if (value == "function") {
1097 rewindUnaryOp();
1098 emitOpcode(op_is_function);
1099 instructions().append(dst->index());
1100 instructions().append(srcIndex);
1101 return dst;
1102 }
1103 }
1104 }
1105
1106 emitOpcode(opcodeID);
1107 instructions().append(dst->index());
1108 instructions().append(src1->index());
1109 instructions().append(src2->index());
1110 return dst;
1111 }
1112
1113 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1114 {
1115 return emitLoad(dst, jsBoolean(b));
1116 }
1117
1118 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1119 {
1120 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1121 // Later we can do the extra work to handle that like the other cases. They also don't
1122 // work correctly with NaN as a key.
1123 if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1124 return emitLoad(dst, jsNumber(number));
1125 JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1126 if (!valueInMap)
1127 valueInMap = jsNumber(number);
1128 return emitLoad(dst, valueInMap);
1129 }
1130
1131 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1132 {
1133 JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1134 if (!stringInMap)
1135 stringInMap = jsOwnedString(vm(), identifier.string());
1136 return emitLoad(dst, JSValue(stringInMap));
1137 }
1138
1139 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1140 {
1141 RegisterID* constantID = addConstantValue(v);
1142 if (dst)
1143 return emitMove(dst, constantID);
1144 return constantID;
1145 }
1146
1147 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1148 {
1149 if (!m_globalObjectRegister) {
1150 int index = m_nextConstantOffset;
1151 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1152 ++m_nextConstantOffset;
1153 m_codeBlock->addConstant(JSValue());
1154 m_globalObjectRegister = &m_constantPoolRegisters[index];
1155 m_codeBlock->setGlobalObjectRegister(index);
1156 }
1157 if (dst)
1158 emitMove(dst, m_globalObjectRegister);
1159 return m_globalObjectRegister;
1160 }
1161
1162 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1163 {
1164 if (property == propertyNames().thisIdentifier)
1165 return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1166
1167 // Check if the property should be allocated in a register.
1168 if (m_codeType != GlobalCode && shouldOptimizeLocals() && m_symbolTable) {
1169 SymbolTableEntry entry = symbolTable().get(property.impl());
1170 if (!entry.isNull()) {
1171 if (property == propertyNames().arguments)
1172 createArgumentsIfNecessary();
1173 unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1174 RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1175 return ResolveResult::registerResolve(local, flags);
1176 }
1177 }
1178 // Cases where we cannot statically optimize the lookup.
1179 if (property == propertyNames().arguments || !canOptimizeNonLocals())
1180 return ResolveResult::dynamicResolve();
1181
1182 if (!m_scope || m_codeType != FunctionCode || m_shouldEmitDebugHooks)
1183 return ResolveResult::dynamicResolve();
1184
1185 ScopeChainIterator iter = m_scope->begin();
1186 ScopeChainIterator end = m_scope->end();
1187 size_t depth = m_codeBlock->needsFullScopeChain();
1188 unsigned flags = 0;
1189 for (; iter != end; ++iter, ++depth) {
1190 JSObject* currentScope = iter.get();
1191 if (!currentScope->isStaticScopeObject())
1192 return ResolveResult::dynamicResolve();
1193
1194 JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1195 SymbolTableEntry entry = currentVariableObject->symbolTable()->get(property.impl());
1196
1197 // Found the property
1198 if (!entry.isNull()) {
1199 if (entry.isReadOnly())
1200 flags |= ResolveResult::ReadOnlyFlag;
1201 if (++iter == end)
1202 return ResolveResult::dynamicResolve();
1203 #if !ASSERT_DISABLED
1204 if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1205 ASSERT(activation->isValid(entry));
1206 #endif
1207 return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1208 }
1209 bool scopeRequiresDynamicChecks = false;
1210 if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1211 break;
1212 if (scopeRequiresDynamicChecks)
1213 flags |= ResolveResult::DynamicFlag;
1214 }
1215
1216 return ResolveResult::dynamicResolve();
1217 }
1218
1219 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1220 {
1221 // Register-allocated const declarations.
1222 if (m_codeType == FunctionCode && m_symbolTable) {
1223 SymbolTableEntry entry = symbolTable().get(property.impl());
1224 if (!entry.isNull()) {
1225 unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1226 RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1227 return ResolveResult::registerResolve(local, flags);
1228 }
1229 }
1230
1231 return ResolveResult::dynamicResolve();
1232 }
1233
1234 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1235 {
1236 size_t begin = instructions().size();
1237 emitOpcode(op_check_has_instance);
1238 instructions().append(dst->index());
1239 instructions().append(value->index());
1240 instructions().append(base->index());
1241 instructions().append(target->bind(begin, instructions().size()));
1242 }
1243
1244 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1245 {
1246 emitOpcode(op_instanceof);
1247 instructions().append(dst->index());
1248 instructions().append(value->index());
1249 instructions().append(basePrototype->index());
1250 return dst;
1251 }
1252
1253 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1254 {
1255 return !m_labelScopes.size();
1256 }
1257
1258 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1259 {
1260
1261 if (resolveResult.isStatic())
1262 return emitGetStaticVar(dst, resolveResult, property);
1263
1264 UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve);
1265 instructions().append(kill(dst));
1266 instructions().append(addConstant(property));
1267 instructions().append(getResolveOperations(property));
1268 instructions().append(profile);
1269 return dst;
1270 }
1271
1272 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1273 {
1274 if (!resolveResult.isDynamic()) {
1275 // Global object is the base
1276 return emitLoadGlobalObject(dst);
1277 }
1278
1279 // We can't optimise at all :-(
1280 UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1281 instructions().append(kill(dst));
1282 instructions().append(addConstant(property));
1283 instructions().append(false);
1284 instructions().append(getResolveBaseOperations(property));
1285 instructions().append(0);
1286 instructions().append(profile);
1287 return dst;
1288 }
1289
1290 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult&, const Identifier& property, NonlocalResolveInfo& verifier)
1291 {
1292 // We can't optimise at all :-(
1293 UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1294 instructions().append(kill(dst));
1295 instructions().append(addConstant(property));
1296 instructions().append(m_codeBlock->isStrictMode());
1297 uint32_t putToBaseIndex = 0;
1298 instructions().append(getResolveBaseForPutOperations(property, putToBaseIndex));
1299 verifier.resolved(putToBaseIndex);
1300 instructions().append(putToBaseIndex);
1301 instructions().append(profile);
1302 return dst;
1303 }
1304
1305 RegisterID* BytecodeGenerator::emitResolveWithBaseForPut(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1306 {
1307 ASSERT_UNUSED(resolveResult, !resolveResult.isStatic());
1308 UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_base);
1309 instructions().append(kill(baseDst));
1310 instructions().append(propDst->index());
1311 instructions().append(addConstant(property));
1312 uint32_t putToBaseIndex = 0;
1313 instructions().append(getResolveWithBaseForPutOperations(property, putToBaseIndex));
1314 verifier.resolved(putToBaseIndex);
1315 instructions().append(putToBaseIndex);
1316 instructions().append(profile);
1317 return baseDst;
1318 }
1319
1320 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1321 {
1322 if (resolveResult.isStatic()) {
1323 emitLoad(baseDst, jsUndefined());
1324 emitGetStaticVar(propDst, resolveResult, property);
1325 return baseDst;
1326 }
1327
1328 UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_this);
1329 instructions().append(kill(baseDst));
1330 instructions().append(propDst->index());
1331 instructions().append(addConstant(property));
1332 instructions().append(getResolveWithThisOperations(property));
1333 instructions().append(profile);
1334 return baseDst;
1335 }
1336
1337 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier&)
1338 {
1339 ASSERT(m_codeType == FunctionCode);
1340 switch (resolveResult.type()) {
1341 case ResolveResult::Register:
1342 case ResolveResult::ReadOnlyRegister:
1343 if (dst == ignoredResult())
1344 return 0;
1345 return moveToDestinationIfNeeded(dst, resolveResult.local());
1346
1347 case ResolveResult::Lexical:
1348 case ResolveResult::ReadOnlyLexical: {
1349 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_scoped_var);
1350 instructions().append(dst->index());
1351 instructions().append(resolveResult.index());
1352 instructions().append(resolveResult.depth());
1353 instructions().append(profile);
1354 return dst;
1355 }
1356
1357 default:
1358 RELEASE_ASSERT_NOT_REACHED();
1359 return 0;
1360 }
1361 }
1362
1363 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1364 {
1365 ASSERT(m_codeType == GlobalCode);
1366 emitOpcode(op_init_global_const_nop);
1367 instructions().append(0);
1368 instructions().append(value->index());
1369 instructions().append(0);
1370 instructions().append(addConstant(identifier));
1371 return value;
1372 }
1373
1374 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, const Identifier&, RegisterID* value)
1375 {
1376 ASSERT(m_codeType == FunctionCode);
1377 switch (resolveResult.type()) {
1378 case ResolveResult::Register:
1379 case ResolveResult::ReadOnlyRegister:
1380 return moveToDestinationIfNeeded(resolveResult.local(), value);
1381
1382 case ResolveResult::Lexical:
1383 case ResolveResult::ReadOnlyLexical:
1384 emitOpcode(op_put_scoped_var);
1385 instructions().append(resolveResult.index());
1386 instructions().append(resolveResult.depth());
1387 instructions().append(value->index());
1388 return value;
1389
1390 default:
1391 RELEASE_ASSERT_NOT_REACHED();
1392 return 0;
1393 }
1394 }
1395
1396 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1397 {
1398 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1399
1400 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1401 instructions().append(kill(dst));
1402 instructions().append(base->index());
1403 instructions().append(addConstant(property));
1404 instructions().append(0);
1405 instructions().append(0);
1406 instructions().append(0);
1407 instructions().append(0);
1408 instructions().append(profile);
1409 return dst;
1410 }
1411
1412 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1413 {
1414 emitOpcode(op_get_arguments_length);
1415 instructions().append(dst->index());
1416 ASSERT(base->index() == m_codeBlock->argumentsRegister());
1417 instructions().append(base->index());
1418 instructions().append(addConstant(propertyNames().length));
1419 return dst;
1420 }
1421
1422 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1423 {
1424 unsigned propertyIndex = addConstant(property);
1425
1426 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1427
1428 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1429
1430 emitOpcode(op_put_by_id);
1431 instructions().append(base->index());
1432 instructions().append(propertyIndex);
1433 instructions().append(value->index());
1434 instructions().append(0);
1435 instructions().append(0);
1436 instructions().append(0);
1437 instructions().append(0);
1438 instructions().append(0);
1439 return value;
1440 }
1441
1442 RegisterID* BytecodeGenerator::emitPutToBase(RegisterID* base, const Identifier& property, RegisterID* value, NonlocalResolveInfo& resolveInfo)
1443 {
1444 emitOpcode(op_put_to_base);
1445 instructions().append(base->index());
1446 instructions().append(addConstant(property));
1447 instructions().append(value->index());
1448 instructions().append(resolveInfo.put());
1449 return value;
1450 }
1451
1452 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1453 {
1454 unsigned propertyIndex = addConstant(property);
1455
1456 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1457
1458 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1459
1460 emitOpcode(op_put_by_id);
1461 instructions().append(base->index());
1462 instructions().append(propertyIndex);
1463 instructions().append(value->index());
1464 instructions().append(0);
1465 instructions().append(0);
1466 instructions().append(0);
1467 instructions().append(0);
1468 instructions().append(
1469 property != m_vm->propertyNames->underscoreProto
1470 && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1471 return value;
1472 }
1473
1474 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1475 {
1476 unsigned propertyIndex = addConstant(property);
1477
1478 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1479
1480 emitOpcode(op_put_getter_setter);
1481 instructions().append(base->index());
1482 instructions().append(propertyIndex);
1483 instructions().append(getter->index());
1484 instructions().append(setter->index());
1485 }
1486
1487 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1488 {
1489 emitOpcode(op_del_by_id);
1490 instructions().append(dst->index());
1491 instructions().append(base->index());
1492 instructions().append(addConstant(property));
1493 return dst;
1494 }
1495
1496 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1497 {
1498 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1499 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1500 instructions().append(kill(dst));
1501 ASSERT(base->index() == m_codeBlock->argumentsRegister());
1502 instructions().append(base->index());
1503 instructions().append(property->index());
1504 instructions().append(arrayProfile);
1505 instructions().append(profile);
1506 return dst;
1507 }
1508
1509 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1510 {
1511 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1512 ForInContext& context = m_forInContextStack[i - 1];
1513 if (context.propertyRegister == property) {
1514 emitOpcode(op_get_by_pname);
1515 instructions().append(dst->index());
1516 instructions().append(base->index());
1517 instructions().append(property->index());
1518 instructions().append(context.expectedSubscriptRegister->index());
1519 instructions().append(context.iterRegister->index());
1520 instructions().append(context.indexRegister->index());
1521 return dst;
1522 }
1523 }
1524 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1525 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1526 instructions().append(kill(dst));
1527 instructions().append(base->index());
1528 instructions().append(property->index());
1529 instructions().append(arrayProfile);
1530 instructions().append(profile);
1531 return dst;
1532 }
1533
1534 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1535 {
1536 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1537 emitOpcode(op_put_by_val);
1538 instructions().append(base->index());
1539 instructions().append(property->index());
1540 instructions().append(value->index());
1541 instructions().append(arrayProfile);
1542 return value;
1543 }
1544
1545 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1546 {
1547 emitOpcode(op_del_by_val);
1548 instructions().append(dst->index());
1549 instructions().append(base->index());
1550 instructions().append(property->index());
1551 return dst;
1552 }
1553
1554 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1555 {
1556 emitOpcode(op_put_by_index);
1557 instructions().append(base->index());
1558 instructions().append(index);
1559 instructions().append(value->index());
1560 return value;
1561 }
1562
1563 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1564 {
1565 RefPtr<RegisterID> func = newTemporary();
1566
1567 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_callee);
1568 instructions().append(func->index());
1569 instructions().append(profile);
1570
1571 size_t begin = instructions().size();
1572 m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1573
1574 emitOpcode(op_create_this);
1575 instructions().append(m_thisRegister.index());
1576 instructions().append(func->index());
1577 instructions().append(0);
1578 return dst;
1579 }
1580
1581 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1582 {
1583 size_t begin = instructions().size();
1584 m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1585
1586 emitOpcode(op_new_object);
1587 instructions().append(dst->index());
1588 instructions().append(0);
1589 instructions().append(newObjectAllocationProfile());
1590 return dst;
1591 }
1592
1593 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1594 {
1595 return m_codeBlock->addConstantBuffer(length);
1596 }
1597
1598 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1599 {
1600 JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1601 if (!stringInMap) {
1602 stringInMap = jsString(vm(), identifier.string());
1603 addConstantValue(stringInMap);
1604 }
1605 return stringInMap;
1606 }
1607
1608 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1609 {
1610 #if !ASSERT_DISABLED
1611 unsigned checkLength = 0;
1612 #endif
1613 bool hadVariableExpression = false;
1614 if (length) {
1615 for (ElementNode* n = elements; n; n = n->next()) {
1616 if (!n->value()->isConstant()) {
1617 hadVariableExpression = true;
1618 break;
1619 }
1620 if (n->elision())
1621 break;
1622 #if !ASSERT_DISABLED
1623 checkLength++;
1624 #endif
1625 }
1626 if (!hadVariableExpression) {
1627 ASSERT(length == checkLength);
1628 unsigned constantBufferIndex = addConstantBuffer(length);
1629 JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1630 unsigned index = 0;
1631 for (ElementNode* n = elements; index < length; n = n->next()) {
1632 ASSERT(n->value()->isConstant());
1633 constantBuffer[index++] = static_cast<ConstantNode*>(n->value())->jsValue(*this);
1634 }
1635 emitOpcode(op_new_array_buffer);
1636 instructions().append(dst->index());
1637 instructions().append(constantBufferIndex);
1638 instructions().append(length);
1639 instructions().append(newArrayAllocationProfile());
1640 return dst;
1641 }
1642 }
1643
1644 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
1645 for (ElementNode* n = elements; n; n = n->next()) {
1646 if (n->elision())
1647 break;
1648 argv.append(newTemporary());
1649 // op_new_array requires the initial values to be a sequential range of registers
1650 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1651 emitNode(argv.last().get(), n->value());
1652 }
1653 emitOpcode(op_new_array);
1654 instructions().append(dst->index());
1655 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1656 instructions().append(argv.size()); // argc
1657 instructions().append(newArrayAllocationProfile());
1658 return dst;
1659 }
1660
1661 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1662 {
1663 return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1664 }
1665
1666 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1667 {
1668 FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1669 if (ptr.isNewEntry)
1670 ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1671 return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1672 }
1673
1674 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1675 {
1676 createActivationIfNecessary();
1677 emitOpcode(op_new_func);
1678 instructions().append(dst->index());
1679 instructions().append(index);
1680 instructions().append(doNullCheck);
1681 return dst;
1682 }
1683
1684 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1685 {
1686 emitOpcode(op_new_regexp);
1687 instructions().append(dst->index());
1688 instructions().append(addRegExp(regExp));
1689 return dst;
1690 }
1691
1692 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1693 {
1694 FunctionBodyNode* function = n->body();
1695 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1696
1697 createActivationIfNecessary();
1698 emitOpcode(op_new_func_exp);
1699 instructions().append(r0->index());
1700 instructions().append(index);
1701 return r0;
1702 }
1703
1704 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset, unsigned line, unsigned lineStart)
1705 {
1706 return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, startOffset, endOffset, line, lineStart);
1707 }
1708
1709 void BytecodeGenerator::createArgumentsIfNecessary()
1710 {
1711 if (m_codeType != FunctionCode)
1712 return;
1713
1714 if (!m_codeBlock->usesArguments())
1715 return;
1716
1717 // If we're in strict mode we tear off the arguments on function
1718 // entry, so there's no need to check if we need to create them
1719 // now
1720 if (m_codeBlock->isStrictMode())
1721 return;
1722
1723 emitOpcode(op_create_arguments);
1724 instructions().append(m_codeBlock->argumentsRegister());
1725 }
1726
1727 void BytecodeGenerator::createActivationIfNecessary()
1728 {
1729 if (m_hasCreatedActivation)
1730 return;
1731 if (!m_codeBlock->needsFullScopeChain())
1732 return;
1733 emitOpcode(op_create_activation);
1734 instructions().append(m_activationRegister->index());
1735 }
1736
1737 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset, unsigned line, unsigned lineStart)
1738 {
1739 return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, startOffset, endOffset, line, lineStart);
1740 }
1741
1742 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1743 {
1744 if (identifier == m_vm->propertyNames->Object)
1745 return ExpectObjectConstructor;
1746 if (identifier == m_vm->propertyNames->Array)
1747 return ExpectArrayConstructor;
1748 return NoExpectedFunction;
1749 }
1750
1751 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1752 {
1753 RefPtr<Label> realCall = newLabel();
1754 switch (expectedFunction) {
1755 case ExpectObjectConstructor: {
1756 // If the number of arguments is non-zero, then we can't do anything interesting.
1757 if (callArguments.argumentCountIncludingThis() >= 2)
1758 return NoExpectedFunction;
1759
1760 size_t begin = instructions().size();
1761 emitOpcode(op_jneq_ptr);
1762 instructions().append(func->index());
1763 instructions().append(Special::ObjectConstructor);
1764 instructions().append(realCall->bind(begin, instructions().size()));
1765
1766 if (dst != ignoredResult())
1767 emitNewObject(dst);
1768 break;
1769 }
1770
1771 case ExpectArrayConstructor: {
1772 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1773 // don't do inline it, for now. The only reason is that call arguments are in
1774 // the opposite order of what op_new_array expects, so we'd either need to change
1775 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1776 // things sounds like it's worth it.
1777 if (callArguments.argumentCountIncludingThis() > 2)
1778 return NoExpectedFunction;
1779
1780 size_t begin = instructions().size();
1781 emitOpcode(op_jneq_ptr);
1782 instructions().append(func->index());
1783 instructions().append(Special::ArrayConstructor);
1784 instructions().append(realCall->bind(begin, instructions().size()));
1785
1786 if (dst != ignoredResult()) {
1787 if (callArguments.argumentCountIncludingThis() == 2) {
1788 emitOpcode(op_new_array_with_size);
1789 instructions().append(dst->index());
1790 instructions().append(callArguments.argumentRegister(0)->index());
1791 instructions().append(newArrayAllocationProfile());
1792 } else {
1793 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1794 emitOpcode(op_new_array);
1795 instructions().append(dst->index());
1796 instructions().append(0);
1797 instructions().append(0);
1798 instructions().append(newArrayAllocationProfile());
1799 }
1800 }
1801 break;
1802 }
1803
1804 default:
1805 ASSERT(expectedFunction == NoExpectedFunction);
1806 return NoExpectedFunction;
1807 }
1808
1809 size_t begin = instructions().size();
1810 emitOpcode(op_jmp);
1811 instructions().append(done->bind(begin, instructions().size()));
1812 emitLabel(realCall.get());
1813
1814 return expectedFunction;
1815 }
1816
1817 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset, unsigned line, unsigned lineStart)
1818 {
1819 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1820 ASSERT(func->refCount());
1821
1822 if (m_shouldEmitProfileHooks)
1823 emitMove(callArguments.profileHookRegister(), func);
1824
1825 // Generate code for arguments.
1826 unsigned argument = 0;
1827 for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1828 emitNode(callArguments.argumentRegister(argument++), n);
1829
1830 // Reserve space for call frame.
1831 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1832 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1833 callFrame.append(newTemporary());
1834
1835 if (m_shouldEmitProfileHooks) {
1836 emitOpcode(op_profile_will_call);
1837 instructions().append(callArguments.profileHookRegister()->index());
1838 }
1839
1840 emitExpressionInfo(divot, startOffset, endOffset, line, lineStart);
1841
1842 RefPtr<Label> done = newLabel();
1843 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1844
1845 // Emit call.
1846 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1847 emitOpcode(opcodeID);
1848 instructions().append(func->index()); // func
1849 instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1850 instructions().append(callArguments.registerOffset()); // registerOffset
1851 #if ENABLE(LLINT)
1852 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1853 #else
1854 instructions().append(0);
1855 #endif
1856 instructions().append(arrayProfile);
1857 if (dst != ignoredResult()) {
1858 UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1859 instructions().append(kill(dst));
1860 instructions().append(profile);
1861 }
1862
1863 if (expectedFunction != NoExpectedFunction)
1864 emitLabel(done.get());
1865
1866 if (m_shouldEmitProfileHooks) {
1867 emitOpcode(op_profile_did_call);
1868 instructions().append(callArguments.profileHookRegister()->index());
1869 }
1870
1871 return dst;
1872 }
1873
1874 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset, unsigned line, unsigned lineStart)
1875 {
1876 if (m_shouldEmitProfileHooks) {
1877 emitMove(profileHookRegister, func);
1878 emitOpcode(op_profile_will_call);
1879 instructions().append(profileHookRegister->index());
1880 }
1881
1882 emitExpressionInfo(divot, startOffset, endOffset, line, lineStart);
1883
1884 // Emit call.
1885 emitOpcode(op_call_varargs);
1886 instructions().append(func->index());
1887 instructions().append(thisRegister->index());
1888 instructions().append(arguments->index());
1889 instructions().append(firstFreeRegister->index());
1890 if (dst != ignoredResult()) {
1891 UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1892 instructions().append(kill(dst));
1893 instructions().append(profile);
1894 }
1895 if (m_shouldEmitProfileHooks) {
1896 emitOpcode(op_profile_did_call);
1897 instructions().append(profileHookRegister->index());
1898 }
1899 return dst;
1900 }
1901
1902 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1903 {
1904 if (m_codeBlock->needsFullScopeChain()) {
1905 emitOpcode(op_tear_off_activation);
1906 instructions().append(m_activationRegister->index());
1907 }
1908
1909 if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1910 emitOpcode(op_tear_off_arguments);
1911 instructions().append(m_codeBlock->argumentsRegister());
1912 instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1913 }
1914
1915 // Constructors use op_ret_object_or_this to check the result is an
1916 // object, unless we can trivially determine the check is not
1917 // necessary (currently, if the return value is 'this').
1918 if (isConstructor() && (src->index() != m_thisRegister.index())) {
1919 emitOpcode(op_ret_object_or_this);
1920 instructions().append(src->index());
1921 instructions().append(m_thisRegister.index());
1922 return src;
1923 }
1924 return emitUnaryNoDstOp(op_ret, src);
1925 }
1926
1927 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1928 {
1929 emitOpcode(opcodeID);
1930 instructions().append(src->index());
1931 return src;
1932 }
1933
1934 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset, unsigned line, unsigned lineStart)
1935 {
1936 ASSERT(func->refCount());
1937
1938 if (m_shouldEmitProfileHooks)
1939 emitMove(callArguments.profileHookRegister(), func);
1940
1941 // Generate code for arguments.
1942 unsigned argument = 0;
1943 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1944 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1945 emitNode(callArguments.argumentRegister(argument++), n);
1946 }
1947
1948 if (m_shouldEmitProfileHooks) {
1949 emitOpcode(op_profile_will_call);
1950 instructions().append(callArguments.profileHookRegister()->index());
1951 }
1952
1953 // Reserve space for call frame.
1954 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1955 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1956 callFrame.append(newTemporary());
1957
1958 emitExpressionInfo(divot, startOffset, endOffset, line, lineStart);
1959
1960 RefPtr<Label> done = newLabel();
1961 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1962
1963 emitOpcode(op_construct);
1964 instructions().append(func->index()); // func
1965 instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1966 instructions().append(callArguments.registerOffset()); // registerOffset
1967 #if ENABLE(LLINT)
1968 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1969 #else
1970 instructions().append(0);
1971 #endif
1972 instructions().append(0);
1973 if (dst != ignoredResult()) {
1974 UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1975 instructions().append(kill(dst));
1976 instructions().append(profile);
1977 }
1978
1979 if (expectedFunction != NoExpectedFunction)
1980 emitLabel(done.get());
1981
1982 if (m_shouldEmitProfileHooks) {
1983 emitOpcode(op_profile_did_call);
1984 instructions().append(callArguments.profileHookRegister()->index());
1985 }
1986
1987 return dst;
1988 }
1989
1990 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1991 {
1992 emitOpcode(op_strcat);
1993 instructions().append(dst->index());
1994 instructions().append(src->index());
1995 instructions().append(count);
1996
1997 return dst;
1998 }
1999
2000 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2001 {
2002 emitOpcode(op_to_primitive);
2003 instructions().append(dst->index());
2004 instructions().append(src->index());
2005 }
2006
2007 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
2008 {
2009 ControlFlowContext context;
2010 context.isFinallyBlock = false;
2011 m_scopeContextStack.append(context);
2012 m_dynamicScopeDepth++;
2013
2014 return emitUnaryNoDstOp(op_push_with_scope, scope);
2015 }
2016
2017 void BytecodeGenerator::emitPopScope()
2018 {
2019 ASSERT(m_scopeContextStack.size());
2020 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2021
2022 emitOpcode(op_pop_scope);
2023
2024 m_scopeContextStack.removeLast();
2025 m_dynamicScopeDepth--;
2026 }
2027
2028 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, unsigned firstLine, unsigned lastLine, unsigned charOffset, unsigned lineStart)
2029 {
2030 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2031 if (debugHookID != DidReachBreakpoint)
2032 return;
2033 #else
2034 if (!m_shouldEmitDebugHooks)
2035 return;
2036 #endif
2037 emitExpressionInfo(charOffset, 0, 0, firstLine, lineStart);
2038 unsigned charPosition = charOffset - m_scopeNode->source().startOffset();
2039 emitOpcode(op_debug);
2040 instructions().append(debugHookID);
2041 instructions().append(firstLine);
2042 instructions().append(lastLine);
2043 instructions().append(charPosition);
2044 }
2045
2046 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2047 {
2048 ControlFlowContext scope;
2049 scope.isFinallyBlock = true;
2050 FinallyContext context = {
2051 finallyBlock,
2052 static_cast<unsigned>(m_scopeContextStack.size()),
2053 static_cast<unsigned>(m_switchContextStack.size()),
2054 static_cast<unsigned>(m_forInContextStack.size()),
2055 static_cast<unsigned>(m_tryContextStack.size()),
2056 static_cast<unsigned>(m_labelScopes.size()),
2057 m_finallyDepth,
2058 m_dynamicScopeDepth
2059 };
2060 scope.finallyContext = context;
2061 m_scopeContextStack.append(scope);
2062 m_finallyDepth++;
2063 }
2064
2065 void BytecodeGenerator::popFinallyContext()
2066 {
2067 ASSERT(m_scopeContextStack.size());
2068 ASSERT(m_scopeContextStack.last().isFinallyBlock);
2069 ASSERT(m_finallyDepth > 0);
2070 m_scopeContextStack.removeLast();
2071 m_finallyDepth--;
2072 }
2073
2074 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2075 {
2076 // Reclaim free label scopes.
2077 //
2078 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2079 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2080 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2081 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2082 // loop condition is a workaround.
2083 while (m_labelScopes.size()) {
2084 if (m_labelScopes.last().refCount())
2085 break;
2086 m_labelScopes.removeLast();
2087 }
2088
2089 if (!m_labelScopes.size())
2090 return 0;
2091
2092 // We special-case the following, which is a syntax error in Firefox:
2093 // label:
2094 // break;
2095 if (name.isEmpty()) {
2096 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2097 LabelScope* scope = &m_labelScopes[i];
2098 if (scope->type() != LabelScope::NamedLabel) {
2099 ASSERT(scope->breakTarget());
2100 return scope;
2101 }
2102 }
2103 return 0;
2104 }
2105
2106 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2107 LabelScope* scope = &m_labelScopes[i];
2108 if (scope->name() && *scope->name() == name) {
2109 ASSERT(scope->breakTarget());
2110 return scope;
2111 }
2112 }
2113 return 0;
2114 }
2115
2116 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2117 {
2118 // Reclaim free label scopes.
2119 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2120 m_labelScopes.removeLast();
2121
2122 if (!m_labelScopes.size())
2123 return 0;
2124
2125 if (name.isEmpty()) {
2126 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2127 LabelScope* scope = &m_labelScopes[i];
2128 if (scope->type() == LabelScope::Loop) {
2129 ASSERT(scope->continueTarget());
2130 return scope;
2131 }
2132 }
2133 return 0;
2134 }
2135
2136 // Continue to the loop nested nearest to the label scope that matches
2137 // 'name'.
2138 LabelScope* result = 0;
2139 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2140 LabelScope* scope = &m_labelScopes[i];
2141 if (scope->type() == LabelScope::Loop) {
2142 ASSERT(scope->continueTarget());
2143 result = scope;
2144 }
2145 if (scope->name() && *scope->name() == name)
2146 return result; // may be 0
2147 }
2148 return 0;
2149 }
2150
2151 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2152 {
2153 while (topScope > bottomScope) {
2154 // First we count the number of dynamic scopes we need to remove to get
2155 // to a finally block.
2156 int nNormalScopes = 0;
2157 while (topScope > bottomScope) {
2158 if (topScope->isFinallyBlock)
2159 break;
2160 ++nNormalScopes;
2161 --topScope;
2162 }
2163
2164 if (nNormalScopes) {
2165 // We need to remove a number of dynamic scopes to get to the next
2166 // finally block
2167 while (nNormalScopes--)
2168 emitOpcode(op_pop_scope);
2169
2170 // If topScope == bottomScope then there isn't a finally block left to emit.
2171 if (topScope == bottomScope)
2172 return;
2173 }
2174
2175 Vector<ControlFlowContext> savedScopeContextStack;
2176 Vector<SwitchInfo> savedSwitchContextStack;
2177 Vector<ForInContext> savedForInContextStack;
2178 Vector<TryContext> poppedTryContexts;
2179 LabelScopeStore savedLabelScopes;
2180 while (topScope > bottomScope && topScope->isFinallyBlock) {
2181 RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2182
2183 // Save the current state of the world while instating the state of the world
2184 // for the finally block.
2185 FinallyContext finallyContext = topScope->finallyContext;
2186 bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2187 bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2188 bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2189 bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2190 bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2191 int topScopeIndex = -1;
2192 int bottomScopeIndex = -1;
2193 if (flipScopes) {
2194 topScopeIndex = topScope - m_scopeContextStack.begin();
2195 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2196 savedScopeContextStack = m_scopeContextStack;
2197 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2198 }
2199 if (flipSwitches) {
2200 savedSwitchContextStack = m_switchContextStack;
2201 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2202 }
2203 if (flipForIns) {
2204 savedForInContextStack = m_forInContextStack;
2205 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2206 }
2207 if (flipTries) {
2208 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2209 ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2210 TryContext context = m_tryContextStack.last();
2211 m_tryContextStack.removeLast();
2212 TryRange range;
2213 range.start = context.start;
2214 range.end = beforeFinally;
2215 range.tryData = context.tryData;
2216 m_tryRanges.append(range);
2217 poppedTryContexts.append(context);
2218 }
2219 }
2220 if (flipLabelScopes) {
2221 savedLabelScopes = m_labelScopes;
2222 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2223 m_labelScopes.removeLast();
2224 }
2225 int savedFinallyDepth = m_finallyDepth;
2226 m_finallyDepth = finallyContext.finallyDepth;
2227 int savedDynamicScopeDepth = m_dynamicScopeDepth;
2228 m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2229
2230 // Emit the finally block.
2231 emitNode(finallyContext.finallyBlock);
2232
2233 RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2234
2235 // Restore the state of the world.
2236 if (flipScopes) {
2237 m_scopeContextStack = savedScopeContextStack;
2238 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2239 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2240 }
2241 if (flipSwitches)
2242 m_switchContextStack = savedSwitchContextStack;
2243 if (flipForIns)
2244 m_forInContextStack = savedForInContextStack;
2245 if (flipTries) {
2246 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2247 for (unsigned i = poppedTryContexts.size(); i--;) {
2248 TryContext context = poppedTryContexts[i];
2249 context.start = afterFinally;
2250 m_tryContextStack.append(context);
2251 }
2252 poppedTryContexts.clear();
2253 }
2254 if (flipLabelScopes)
2255 m_labelScopes = savedLabelScopes;
2256 m_finallyDepth = savedFinallyDepth;
2257 m_dynamicScopeDepth = savedDynamicScopeDepth;
2258
2259 --topScope;
2260 }
2261 }
2262 }
2263
2264 void BytecodeGenerator::emitPopScopes(int targetScopeDepth)
2265 {
2266 ASSERT(scopeDepth() - targetScopeDepth >= 0);
2267
2268 size_t scopeDelta = scopeDepth() - targetScopeDepth;
2269 ASSERT(scopeDelta <= m_scopeContextStack.size());
2270 if (!scopeDelta)
2271 return;
2272
2273 if (!m_finallyDepth) {
2274 while (scopeDelta--)
2275 emitOpcode(op_pop_scope);
2276 return;
2277 }
2278
2279 emitComplexPopScopes(&m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2280 }
2281
2282 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2283 {
2284 size_t begin = instructions().size();
2285
2286 emitOpcode(op_get_pnames);
2287 instructions().append(dst->index());
2288 instructions().append(base->index());
2289 instructions().append(i->index());
2290 instructions().append(size->index());
2291 instructions().append(breakTarget->bind(begin, instructions().size()));
2292 return dst;
2293 }
2294
2295 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2296 {
2297 size_t begin = instructions().size();
2298
2299 emitOpcode(op_next_pname);
2300 instructions().append(dst->index());
2301 instructions().append(base->index());
2302 instructions().append(i->index());
2303 instructions().append(size->index());
2304 instructions().append(iter->index());
2305 instructions().append(target->bind(begin, instructions().size()));
2306 return dst;
2307 }
2308
2309 TryData* BytecodeGenerator::pushTry(Label* start)
2310 {
2311 TryData tryData;
2312 tryData.target = newLabel();
2313 tryData.targetScopeDepth = UINT_MAX;
2314 m_tryData.append(tryData);
2315 TryData* result = &m_tryData.last();
2316
2317 TryContext tryContext;
2318 tryContext.start = start;
2319 tryContext.tryData = result;
2320
2321 m_tryContextStack.append(tryContext);
2322
2323 return result;
2324 }
2325
2326 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2327 {
2328 m_usesExceptions = true;
2329
2330 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2331
2332 TryRange tryRange;
2333 tryRange.start = m_tryContextStack.last().start;
2334 tryRange.end = end;
2335 tryRange.tryData = m_tryContextStack.last().tryData;
2336 m_tryRanges.append(tryRange);
2337 m_tryContextStack.removeLast();
2338
2339 emitLabel(tryRange.tryData->target.get());
2340 tryRange.tryData->targetScopeDepth = m_dynamicScopeDepth;
2341
2342 emitOpcode(op_catch);
2343 instructions().append(targetRegister->index());
2344 return targetRegister;
2345 }
2346
2347 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2348 {
2349 emitOpcode(op_throw_static_error);
2350 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, message)))->index());
2351 instructions().append(true);
2352 }
2353
2354 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2355 {
2356 ControlFlowContext context;
2357 context.isFinallyBlock = false;
2358 m_scopeContextStack.append(context);
2359 m_dynamicScopeDepth++;
2360
2361 emitOpcode(op_push_name_scope);
2362 instructions().append(addConstant(property));
2363 instructions().append(value->index());
2364 instructions().append(attributes);
2365 }
2366
2367 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2368 {
2369 SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2370 switch (type) {
2371 case SwitchInfo::SwitchImmediate:
2372 emitOpcode(op_switch_imm);
2373 break;
2374 case SwitchInfo::SwitchCharacter:
2375 emitOpcode(op_switch_char);
2376 break;
2377 case SwitchInfo::SwitchString:
2378 emitOpcode(op_switch_string);
2379 break;
2380 default:
2381 RELEASE_ASSERT_NOT_REACHED();
2382 }
2383
2384 instructions().append(0); // place holder for table index
2385 instructions().append(0); // place holder for default target
2386 instructions().append(scrutineeRegister->index());
2387 m_switchContextStack.append(info);
2388 }
2389
2390 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2391 {
2392 UNUSED_PARAM(max);
2393 ASSERT(node->isNumber());
2394 double value = static_cast<NumberNode*>(node)->value();
2395 int32_t key = static_cast<int32_t>(value);
2396 ASSERT(key == value);
2397 ASSERT(key >= min);
2398 ASSERT(key <= max);
2399 return key - min;
2400 }
2401
2402 static void prepareJumpTableForImmediateSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2403 {
2404 jumpTable.min = min;
2405 jumpTable.branchOffsets.resize(max - min + 1);
2406 jumpTable.branchOffsets.fill(0);
2407 for (uint32_t i = 0; i < clauseCount; ++i) {
2408 // We're emitting this after the clause labels should have been fixed, so
2409 // the labels should not be "forward" references
2410 ASSERT(!labels[i]->isForward());
2411 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2412 }
2413 }
2414
2415 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2416 {
2417 UNUSED_PARAM(max);
2418 ASSERT(node->isString());
2419 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2420 ASSERT(clause->length() == 1);
2421
2422 int32_t key = (*clause)[0];
2423 ASSERT(key >= min);
2424 ASSERT(key <= max);
2425 return key - min;
2426 }
2427
2428 static void prepareJumpTableForCharacterSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2429 {
2430 jumpTable.min = min;
2431 jumpTable.branchOffsets.resize(max - min + 1);
2432 jumpTable.branchOffsets.fill(0);
2433 for (uint32_t i = 0; i < clauseCount; ++i) {
2434 // We're emitting this after the clause labels should have been fixed, so
2435 // the labels should not be "forward" references
2436 ASSERT(!labels[i]->isForward());
2437 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2438 }
2439 }
2440
2441 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2442 {
2443 for (uint32_t i = 0; i < clauseCount; ++i) {
2444 // We're emitting this after the clause labels should have been fixed, so
2445 // the labels should not be "forward" references
2446 ASSERT(!labels[i]->isForward());
2447
2448 ASSERT(nodes[i]->isString());
2449 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2450 jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2451 }
2452 }
2453
2454 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2455 {
2456 SwitchInfo switchInfo = m_switchContextStack.last();
2457 m_switchContextStack.removeLast();
2458 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2459 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2460 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2461
2462 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2463 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2464 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2465 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2466 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2467
2468 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2469 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2470 } else {
2471 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2472 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2473 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2474
2475 UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2476 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2477 }
2478 }
2479
2480 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2481 {
2482 // It would be nice to do an even better job of identifying exactly where the expression is.
2483 // And we could make the caller pass the node pointer in, if there was some way of getting
2484 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2485 // is still good enough to get us an accurate line number.
2486 m_expressionTooDeep = true;
2487 return newTemporary();
2488 }
2489
2490 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2491 {
2492 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2493 }
2494
2495 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2496 {
2497 RegisterID* registerID = resolve(ident).local();
2498 if (!registerID || registerID->index() >= 0)
2499 return 0;
2500 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2501 }
2502
2503 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2504 {
2505 if (!isStrictMode())
2506 return;
2507 emitOpcode(op_throw_static_error);
2508 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, StrictModeReadonlyPropertyWriteError)))->index());
2509 instructions().append(false);
2510 }
2511
2512 } // namespace JSC