]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecompiler/BytecodeGenerator.cpp
JavaScriptCore-721.26.tar.gz
[apple/javascriptcore.git] / bytecompiler / BytecodeGenerator.cpp
1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include "config.h"
31 #include "BytecodeGenerator.h"
32
33 #include "BatchedTransitionOptimizer.h"
34 #include "PrototypeFunction.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
37 #include "UString.h"
38
39 using namespace std;
40
41 namespace JSC {
42
43 /*
44 The layout of a register frame looks like this:
45
46 For
47
48 function f(x, y) {
49 var v1;
50 function g() { }
51 var v2;
52 return (x) * (y);
53 }
54
55 assuming (x) and (y) generated temporaries t1 and t2, you would have
56
57 ------------------------------------
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
59 ------------------------------------
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
61 ------------------------------------
62 | params->|<-locals | temps->
63
64 Because temporary registers are allocated in a stack-like fashion, we
65 can reclaim them with a simple popping algorithm. The same goes for labels.
66 (We never reclaim parameter or local registers, because parameters and
67 locals are DontDelete.)
68
69 The register layout before a function call looks like this:
70
71 For
72
73 function f(x, y)
74 {
75 }
76
77 f(1);
78
79 > <------------------------------
80 < > reserved: call frame | 1 | <-- value held
81 > >snip< <------------------------------
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
83 > <------------------------------
84 | params->|<-locals | temps->
85
86 The call instruction fills in the "call frame" registers. It also pads
87 missing arguments at the end of the call:
88
89 > <-----------------------------------
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
91 > >snip< <-----------------------------------
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
93 > <-----------------------------------
94 | params->|<-locals | temps->
95
96 After filling in missing arguments, the call instruction sets up the new
97 stack frame to overlap the end of the old stack frame:
98
99 |----------------------------------> <
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
101 |----------------------------------> >snip< <
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
103 |----------------------------------> <
104 | | params->|<-locals | temps->
105
106 That way, arguments are "copied" into the callee's stack frame for free.
107
108 If the caller supplies too many arguments, this trick doesn't work. The
109 extra arguments protrude into space reserved for locals and temporaries.
110 In that case, the call instruction makes a real copy of the call frame header,
111 along with just the arguments expected by the callee, leaving the original
112 call frame header and arguments behind. (The call instruction can't just discard
113 extra arguments, because the "arguments" object may access them later.)
114 This copying strategy ensures that all named values will be at the indices
115 expected by the callee.
116 */
117
118 #ifndef NDEBUG
119 static bool s_dumpsGeneratedCode = false;
120 #endif
121
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
123 {
124 #ifndef NDEBUG
125 s_dumpsGeneratedCode = dumpsGeneratedCode;
126 #else
127 UNUSED_PARAM(dumpsGeneratedCode);
128 #endif
129 }
130
131 bool BytecodeGenerator::dumpsGeneratedCode()
132 {
133 #ifndef NDEBUG
134 return s_dumpsGeneratedCode;
135 #else
136 return false;
137 #endif
138 }
139
140 void BytecodeGenerator::generate()
141 {
142 m_codeBlock->setThisRegister(m_thisRegister.index());
143
144 m_scopeNode->emitBytecode(*this);
145
146 #ifndef NDEBUG
147 m_codeBlock->setInstructionCount(m_codeBlock->instructions().size());
148
149 if (s_dumpsGeneratedCode)
150 m_codeBlock->dump(m_scopeChain->globalObject()->globalExec());
151 #endif
152
153 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
154 symbolTable().clear();
155
156 m_codeBlock->setIsNumericCompareFunction(instructions() == m_globalData->numericCompareFunction(m_scopeChain->globalObject()->globalExec()));
157
158 #if !ENABLE(OPCODE_SAMPLING)
159 if (!m_regeneratingForExceptionInfo && (m_codeType == FunctionCode || m_codeType == EvalCode))
160 m_codeBlock->clearExceptionInfo();
161 #endif
162
163 m_codeBlock->shrinkToFit();
164 }
165
166 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
167 {
168 int index = m_calleeRegisters.size();
169 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
170 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
171
172 if (!result.second) {
173 r0 = &registerFor(result.first->second.getIndex());
174 return false;
175 }
176
177 ++m_codeBlock->m_numVars;
178 r0 = newRegister();
179 return true;
180 }
181
182 bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
183 {
184 int index = m_nextGlobalIndex;
185 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
186 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
187
188 if (!result.second)
189 index = result.first->second.getIndex();
190 else {
191 --m_nextGlobalIndex;
192 m_globals.append(index + m_globalVarStorageOffset);
193 }
194
195 r0 = &registerFor(index);
196 return result.second;
197 }
198
199 void BytecodeGenerator::preserveLastVar()
200 {
201 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
202 m_lastVar = &m_calleeRegisters.last();
203 }
204
205 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock)
206 : m_shouldEmitDebugHooks(!!debugger)
207 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
208 , m_scopeChain(&scopeChain)
209 , m_symbolTable(symbolTable)
210 , m_scopeNode(programNode)
211 , m_codeBlock(codeBlock)
212 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
213 , m_finallyDepth(0)
214 , m_dynamicScopeDepth(0)
215 , m_baseScopeDepth(0)
216 , m_codeType(GlobalCode)
217 , m_nextGlobalIndex(-1)
218 , m_nextConstantOffset(0)
219 , m_globalConstantIndex(0)
220 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
221 , m_lastOpcodeID(op_end)
222 , m_emitNodeDepth(0)
223 , m_regeneratingForExceptionInfo(false)
224 , m_codeBlockBeingRegeneratedFrom(0)
225 {
226 if (m_shouldEmitDebugHooks)
227 m_codeBlock->setNeedsFullScopeChain(true);
228
229 emitOpcode(op_enter);
230 codeBlock->setGlobalData(m_globalData);
231
232 // FIXME: Move code that modifies the global object to Interpreter::execute.
233
234 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
235
236 JSGlobalObject* globalObject = scopeChain.globalObject();
237 ExecState* exec = globalObject->globalExec();
238 RegisterFile* registerFile = &exec->globalData().interpreter->registerFile();
239
240 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
241 m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size();
242
243 // Add previously defined symbols to bookkeeping.
244 m_globals.grow(symbolTable->size());
245 SymbolTable::iterator end = symbolTable->end();
246 for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it)
247 registerFor(it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset);
248
249 BatchedTransitionOptimizer optimizer(globalObject);
250
251 const VarStack& varStack = programNode->varStack();
252 const FunctionStack& functionStack = programNode->functionStack();
253 bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals();
254 if (canOptimizeNewGlobals) {
255 // Shift new symbols so they get stored prior to existing symbols.
256 m_nextGlobalIndex -= symbolTable->size();
257
258 for (size_t i = 0; i < functionStack.size(); ++i) {
259 FunctionBodyNode* function = functionStack[i];
260 globalObject->removeDirect(function->ident()); // Make sure our new function is not shadowed by an old property.
261 emitNewFunction(addGlobalVar(function->ident(), false), function);
262 }
263
264 Vector<RegisterID*, 32> newVars;
265 for (size_t i = 0; i < varStack.size(); ++i)
266 if (!globalObject->hasProperty(exec, *varStack[i].first))
267 newVars.append(addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant));
268
269 preserveLastVar();
270
271 for (size_t i = 0; i < newVars.size(); ++i)
272 emitLoad(newVars[i], jsUndefined());
273 } else {
274 for (size_t i = 0; i < functionStack.size(); ++i) {
275 FunctionBodyNode* function = functionStack[i];
276 globalObject->putWithAttributes(exec, function->ident(), new (exec) JSFunction(exec, makeFunction(exec, function), scopeChain.node()), DontDelete);
277 }
278 for (size_t i = 0; i < varStack.size(); ++i) {
279 if (globalObject->hasProperty(exec, *varStack[i].first))
280 continue;
281 int attributes = DontDelete;
282 if (varStack[i].second & DeclarationStacks::IsConstant)
283 attributes |= ReadOnly;
284 globalObject->putWithAttributes(exec, *varStack[i].first, jsUndefined(), attributes);
285 }
286
287 preserveLastVar();
288 }
289 }
290
291 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock)
292 : m_shouldEmitDebugHooks(!!debugger)
293 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
294 , m_scopeChain(&scopeChain)
295 , m_symbolTable(symbolTable)
296 , m_scopeNode(functionBody)
297 , m_codeBlock(codeBlock)
298 , m_finallyDepth(0)
299 , m_dynamicScopeDepth(0)
300 , m_baseScopeDepth(0)
301 , m_codeType(FunctionCode)
302 , m_nextConstantOffset(0)
303 , m_globalConstantIndex(0)
304 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
305 , m_lastOpcodeID(op_end)
306 , m_emitNodeDepth(0)
307 , m_regeneratingForExceptionInfo(false)
308 , m_codeBlockBeingRegeneratedFrom(0)
309 {
310 if (m_shouldEmitDebugHooks)
311 m_codeBlock->setNeedsFullScopeChain(true);
312
313 codeBlock->setGlobalData(m_globalData);
314
315 bool usesArguments = functionBody->usesArguments();
316 codeBlock->setUsesArguments(usesArguments);
317 if (usesArguments) {
318 m_argumentsRegister.setIndex(RegisterFile::OptionalCalleeArguments);
319 addVar(propertyNames().arguments, false);
320 }
321
322 if (m_codeBlock->needsFullScopeChain()) {
323 ++m_codeBlock->m_numVars;
324 m_activationRegisterIndex = newRegister()->index();
325 emitOpcode(op_enter_with_activation);
326 instructions().append(m_activationRegisterIndex);
327 } else
328 emitOpcode(op_enter);
329
330 if (usesArguments) {
331 emitOpcode(op_init_arguments);
332
333 // The debugger currently retrieves the arguments object from an activation rather than pulling
334 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
335 // but for now we force eager creation of the arguments object when debugging.
336 if (m_shouldEmitDebugHooks)
337 emitOpcode(op_create_arguments);
338 }
339
340 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
341 for (size_t i = 0; i < functionStack.size(); ++i) {
342 FunctionBodyNode* function = functionStack[i];
343 const Identifier& ident = function->ident();
344 m_functions.add(ident.ustring().rep());
345 emitNewFunction(addVar(ident, false), function);
346 }
347
348 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
349 for (size_t i = 0; i < varStack.size(); ++i)
350 addVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
351
352 FunctionParameters& parameters = *functionBody->parameters();
353 size_t parameterCount = parameters.size();
354 m_nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1;
355 m_parameters.grow(1 + parameterCount); // reserve space for "this"
356
357 // Add "this" as a parameter
358 m_thisRegister.setIndex(m_nextParameterIndex);
359 ++m_nextParameterIndex;
360 ++m_codeBlock->m_numParameters;
361
362 if (functionBody->usesThis() || m_shouldEmitDebugHooks) {
363 emitOpcode(op_convert_this);
364 instructions().append(m_thisRegister.index());
365 }
366
367 for (size_t i = 0; i < parameterCount; ++i)
368 addParameter(parameters[i]);
369
370 preserveLastVar();
371 }
372
373 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock)
374 : m_shouldEmitDebugHooks(!!debugger)
375 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
376 , m_scopeChain(&scopeChain)
377 , m_symbolTable(symbolTable)
378 , m_scopeNode(evalNode)
379 , m_codeBlock(codeBlock)
380 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
381 , m_finallyDepth(0)
382 , m_dynamicScopeDepth(0)
383 , m_baseScopeDepth(codeBlock->baseScopeDepth())
384 , m_codeType(EvalCode)
385 , m_nextConstantOffset(0)
386 , m_globalConstantIndex(0)
387 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
388 , m_lastOpcodeID(op_end)
389 , m_emitNodeDepth(0)
390 , m_regeneratingForExceptionInfo(false)
391 , m_codeBlockBeingRegeneratedFrom(0)
392 {
393 if (m_shouldEmitDebugHooks || m_baseScopeDepth)
394 m_codeBlock->setNeedsFullScopeChain(true);
395
396 emitOpcode(op_enter);
397 codeBlock->setGlobalData(m_globalData);
398 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
399
400 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
401 for (size_t i = 0; i < functionStack.size(); ++i)
402 m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
403
404 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
405 unsigned numVariables = varStack.size();
406 Vector<Identifier> variables;
407 variables.reserveCapacity(numVariables);
408 for (size_t i = 0; i < numVariables; ++i)
409 variables.append(*varStack[i].first);
410 codeBlock->adoptVariables(variables);
411
412 preserveLastVar();
413 }
414
415 RegisterID* BytecodeGenerator::addParameter(const Identifier& ident)
416 {
417 // Parameters overwrite var declarations, but not function declarations.
418 RegisterID* result = 0;
419 UString::Rep* rep = ident.ustring().rep();
420 if (!m_functions.contains(rep)) {
421 symbolTable().set(rep, m_nextParameterIndex);
422 RegisterID& parameter = registerFor(m_nextParameterIndex);
423 parameter.setIndex(m_nextParameterIndex);
424 result = &parameter;
425 }
426
427 // To maintain the calling convention, we have to allocate unique space for
428 // each parameter, even if the parameter doesn't make it into the symbol table.
429 ++m_nextParameterIndex;
430 ++m_codeBlock->m_numParameters;
431 return result;
432 }
433
434 RegisterID* BytecodeGenerator::registerFor(const Identifier& ident)
435 {
436 if (ident == propertyNames().thisIdentifier)
437 return &m_thisRegister;
438
439 if (!shouldOptimizeLocals())
440 return 0;
441
442 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
443 if (entry.isNull())
444 return 0;
445
446 if (ident == propertyNames().arguments)
447 createArgumentsIfNecessary();
448
449 return &registerFor(entry.getIndex());
450 }
451
452 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
453 {
454 if (ident != propertyNames().arguments)
455 return false;
456
457 if (!shouldOptimizeLocals())
458 return false;
459
460 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
461 if (entry.isNull())
462 return false;
463
464 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
465 return true;
466
467 return false;
468 }
469
470 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
471 {
472 ASSERT(willResolveToArguments(propertyNames().arguments));
473
474 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.ustring().rep());
475 ASSERT(!entry.isNull());
476 return &registerFor(entry.getIndex());
477 }
478
479 RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident)
480 {
481 if (m_codeType == EvalCode)
482 return 0;
483
484 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
485 if (entry.isNull())
486 return 0;
487
488 return &registerFor(entry.getIndex());
489 }
490
491 bool BytecodeGenerator::isLocal(const Identifier& ident)
492 {
493 if (ident == propertyNames().thisIdentifier)
494 return true;
495
496 return shouldOptimizeLocals() && symbolTable().contains(ident.ustring().rep());
497 }
498
499 bool BytecodeGenerator::isLocalConstant(const Identifier& ident)
500 {
501 return symbolTable().get(ident.ustring().rep()).isReadOnly();
502 }
503
504 RegisterID* BytecodeGenerator::newRegister()
505 {
506 m_calleeRegisters.append(m_calleeRegisters.size());
507 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
508 return &m_calleeRegisters.last();
509 }
510
511 RegisterID* BytecodeGenerator::newTemporary()
512 {
513 // Reclaim free register IDs.
514 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
515 m_calleeRegisters.removeLast();
516
517 RegisterID* result = newRegister();
518 result->setTemporary();
519 return result;
520 }
521
522 RegisterID* BytecodeGenerator::highestUsedRegister()
523 {
524 size_t count = m_codeBlock->m_numCalleeRegisters;
525 while (m_calleeRegisters.size() < count)
526 newRegister();
527 return &m_calleeRegisters.last();
528 }
529
530 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
531 {
532 // Reclaim free label scopes.
533 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
534 m_labelScopes.removeLast();
535
536 // Allocate new label scope.
537 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
538 m_labelScopes.append(scope);
539 return &m_labelScopes.last();
540 }
541
542 PassRefPtr<Label> BytecodeGenerator::newLabel()
543 {
544 // Reclaim free label IDs.
545 while (m_labels.size() && !m_labels.last().refCount())
546 m_labels.removeLast();
547
548 // Allocate new label ID.
549 m_labels.append(m_codeBlock);
550 return &m_labels.last();
551 }
552
553 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
554 {
555 unsigned newLabelIndex = instructions().size();
556 l0->setLocation(newLabelIndex);
557
558 if (m_codeBlock->numberOfJumpTargets()) {
559 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
560 ASSERT(lastLabelIndex <= newLabelIndex);
561 if (newLabelIndex == lastLabelIndex) {
562 // Peephole optimizations have already been disabled by emitting the last label
563 return l0;
564 }
565 }
566
567 m_codeBlock->addJumpTarget(newLabelIndex);
568
569 // This disables peephole optimizations when an instruction is a jump target
570 m_lastOpcodeID = op_end;
571 return l0;
572 }
573
574 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
575 {
576 instructions().append(globalData()->interpreter->getOpcode(opcodeID));
577 m_lastOpcodeID = opcodeID;
578 }
579
580 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
581 {
582 ASSERT(instructions().size() >= 4);
583 size_t size = instructions().size();
584 dstIndex = instructions().at(size - 3).u.operand;
585 src1Index = instructions().at(size - 2).u.operand;
586 src2Index = instructions().at(size - 1).u.operand;
587 }
588
589 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
590 {
591 ASSERT(instructions().size() >= 3);
592 size_t size = instructions().size();
593 dstIndex = instructions().at(size - 2).u.operand;
594 srcIndex = instructions().at(size - 1).u.operand;
595 }
596
597 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
598 {
599 ASSERT(instructions().size() >= 4);
600 instructions().shrink(instructions().size() - 4);
601 }
602
603 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
604 {
605 ASSERT(instructions().size() >= 3);
606 instructions().shrink(instructions().size() - 3);
607 }
608
609 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
610 {
611 size_t begin = instructions().size();
612 emitOpcode(target->isForward() ? op_jmp : op_loop);
613 instructions().append(target->bind(begin, instructions().size()));
614 return target;
615 }
616
617 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
618 {
619 if (m_lastOpcodeID == op_less) {
620 int dstIndex;
621 int src1Index;
622 int src2Index;
623
624 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
625
626 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
627 rewindBinaryOp();
628
629 size_t begin = instructions().size();
630 emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
631 instructions().append(src1Index);
632 instructions().append(src2Index);
633 instructions().append(target->bind(begin, instructions().size()));
634 return target;
635 }
636 } else if (m_lastOpcodeID == op_lesseq) {
637 int dstIndex;
638 int src1Index;
639 int src2Index;
640
641 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
642
643 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
644 rewindBinaryOp();
645
646 size_t begin = instructions().size();
647 emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
648 instructions().append(src1Index);
649 instructions().append(src2Index);
650 instructions().append(target->bind(begin, instructions().size()));
651 return target;
652 }
653 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
654 int dstIndex;
655 int srcIndex;
656
657 retrieveLastUnaryOp(dstIndex, srcIndex);
658
659 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
660 rewindUnaryOp();
661
662 size_t begin = instructions().size();
663 emitOpcode(op_jeq_null);
664 instructions().append(srcIndex);
665 instructions().append(target->bind(begin, instructions().size()));
666 return target;
667 }
668 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
669 int dstIndex;
670 int srcIndex;
671
672 retrieveLastUnaryOp(dstIndex, srcIndex);
673
674 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
675 rewindUnaryOp();
676
677 size_t begin = instructions().size();
678 emitOpcode(op_jneq_null);
679 instructions().append(srcIndex);
680 instructions().append(target->bind(begin, instructions().size()));
681 return target;
682 }
683 }
684
685 size_t begin = instructions().size();
686
687 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
688 instructions().append(cond->index());
689 instructions().append(target->bind(begin, instructions().size()));
690 return target;
691 }
692
693 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
694 {
695 if (m_lastOpcodeID == op_less && target->isForward()) {
696 int dstIndex;
697 int src1Index;
698 int src2Index;
699
700 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
701
702 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
703 rewindBinaryOp();
704
705 size_t begin = instructions().size();
706 emitOpcode(op_jnless);
707 instructions().append(src1Index);
708 instructions().append(src2Index);
709 instructions().append(target->bind(begin, instructions().size()));
710 return target;
711 }
712 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
713 int dstIndex;
714 int src1Index;
715 int src2Index;
716
717 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
718
719 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
720 rewindBinaryOp();
721
722 size_t begin = instructions().size();
723 emitOpcode(op_jnlesseq);
724 instructions().append(src1Index);
725 instructions().append(src2Index);
726 instructions().append(target->bind(begin, instructions().size()));
727 return target;
728 }
729 } else if (m_lastOpcodeID == op_not) {
730 int dstIndex;
731 int srcIndex;
732
733 retrieveLastUnaryOp(dstIndex, srcIndex);
734
735 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
736 rewindUnaryOp();
737
738 size_t begin = instructions().size();
739 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
740 instructions().append(srcIndex);
741 instructions().append(target->bind(begin, instructions().size()));
742 return target;
743 }
744 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
745 int dstIndex;
746 int srcIndex;
747
748 retrieveLastUnaryOp(dstIndex, srcIndex);
749
750 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
751 rewindUnaryOp();
752
753 size_t begin = instructions().size();
754 emitOpcode(op_jneq_null);
755 instructions().append(srcIndex);
756 instructions().append(target->bind(begin, instructions().size()));
757 return target;
758 }
759 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
760 int dstIndex;
761 int srcIndex;
762
763 retrieveLastUnaryOp(dstIndex, srcIndex);
764
765 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
766 rewindUnaryOp();
767
768 size_t begin = instructions().size();
769 emitOpcode(op_jeq_null);
770 instructions().append(srcIndex);
771 instructions().append(target->bind(begin, instructions().size()));
772 return target;
773 }
774 }
775
776 size_t begin = instructions().size();
777 emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
778 instructions().append(cond->index());
779 instructions().append(target->bind(begin, instructions().size()));
780 return target;
781 }
782
783 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
784 {
785 size_t begin = instructions().size();
786
787 emitOpcode(op_jneq_ptr);
788 instructions().append(cond->index());
789 instructions().append(m_scopeChain->globalObject()->d()->callFunction);
790 instructions().append(target->bind(begin, instructions().size()));
791 return target;
792 }
793
794 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
795 {
796 size_t begin = instructions().size();
797
798 emitOpcode(op_jneq_ptr);
799 instructions().append(cond->index());
800 instructions().append(m_scopeChain->globalObject()->d()->applyFunction);
801 instructions().append(target->bind(begin, instructions().size()));
802 return target;
803 }
804
805 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
806 {
807 UString::Rep* rep = ident.ustring().rep();
808 pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
809 if (result.second) // new entry
810 m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
811
812 return result.first->second;
813 }
814
815 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
816 {
817 int index = m_nextConstantOffset;
818
819 pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
820 if (result.second) {
821 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
822 ++m_nextConstantOffset;
823 m_codeBlock->addConstantRegister(JSValue(v));
824 } else
825 index = result.first->second;
826
827 return &m_constantPoolRegisters[index];
828 }
829
830 unsigned BytecodeGenerator::addRegExp(RegExp* r)
831 {
832 return m_codeBlock->addRegExp(r);
833 }
834
835 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
836 {
837 emitOpcode(op_mov);
838 instructions().append(dst->index());
839 instructions().append(src->index());
840 return dst;
841 }
842
843 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
844 {
845 emitOpcode(opcodeID);
846 instructions().append(dst->index());
847 instructions().append(src->index());
848 return dst;
849 }
850
851 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
852 {
853 emitOpcode(op_pre_inc);
854 instructions().append(srcDst->index());
855 return srcDst;
856 }
857
858 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
859 {
860 emitOpcode(op_pre_dec);
861 instructions().append(srcDst->index());
862 return srcDst;
863 }
864
865 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
866 {
867 emitOpcode(op_post_inc);
868 instructions().append(dst->index());
869 instructions().append(srcDst->index());
870 return dst;
871 }
872
873 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
874 {
875 emitOpcode(op_post_dec);
876 instructions().append(dst->index());
877 instructions().append(srcDst->index());
878 return dst;
879 }
880
881 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
882 {
883 emitOpcode(opcodeID);
884 instructions().append(dst->index());
885 instructions().append(src1->index());
886 instructions().append(src2->index());
887
888 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
889 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
890 instructions().append(types.toInt());
891
892 return dst;
893 }
894
895 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
896 {
897 if (m_lastOpcodeID == op_typeof) {
898 int dstIndex;
899 int srcIndex;
900
901 retrieveLastUnaryOp(dstIndex, srcIndex);
902
903 if (src1->index() == dstIndex
904 && src1->isTemporary()
905 && m_codeBlock->isConstantRegisterIndex(src2->index())
906 && m_codeBlock->constantRegister(src2->index()).jsValue().isString()) {
907 const UString& value = asString(m_codeBlock->constantRegister(src2->index()).jsValue())->tryGetValue();
908 if (value == "undefined") {
909 rewindUnaryOp();
910 emitOpcode(op_is_undefined);
911 instructions().append(dst->index());
912 instructions().append(srcIndex);
913 return dst;
914 }
915 if (value == "boolean") {
916 rewindUnaryOp();
917 emitOpcode(op_is_boolean);
918 instructions().append(dst->index());
919 instructions().append(srcIndex);
920 return dst;
921 }
922 if (value == "number") {
923 rewindUnaryOp();
924 emitOpcode(op_is_number);
925 instructions().append(dst->index());
926 instructions().append(srcIndex);
927 return dst;
928 }
929 if (value == "string") {
930 rewindUnaryOp();
931 emitOpcode(op_is_string);
932 instructions().append(dst->index());
933 instructions().append(srcIndex);
934 return dst;
935 }
936 if (value == "object") {
937 rewindUnaryOp();
938 emitOpcode(op_is_object);
939 instructions().append(dst->index());
940 instructions().append(srcIndex);
941 return dst;
942 }
943 if (value == "function") {
944 rewindUnaryOp();
945 emitOpcode(op_is_function);
946 instructions().append(dst->index());
947 instructions().append(srcIndex);
948 return dst;
949 }
950 }
951 }
952
953 emitOpcode(opcodeID);
954 instructions().append(dst->index());
955 instructions().append(src1->index());
956 instructions().append(src2->index());
957 return dst;
958 }
959
960 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
961 {
962 return emitLoad(dst, jsBoolean(b));
963 }
964
965 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
966 {
967 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time.
968 // Later we can do the extra work to handle that like the other cases.
969 if (number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
970 return emitLoad(dst, jsNumber(globalData(), number));
971 JSValue& valueInMap = m_numberMap.add(number, JSValue()).first->second;
972 if (!valueInMap)
973 valueInMap = jsNumber(globalData(), number);
974 return emitLoad(dst, valueInMap);
975 }
976
977 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
978 {
979 JSString*& stringInMap = m_stringMap.add(identifier.ustring().rep(), 0).first->second;
980 if (!stringInMap)
981 stringInMap = jsOwnedString(globalData(), identifier.ustring());
982 return emitLoad(dst, JSValue(stringInMap));
983 }
984
985 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
986 {
987 RegisterID* constantID = addConstantValue(v);
988 if (dst)
989 return emitMove(dst, constantID);
990 return constantID;
991 }
992
993 bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, bool& requiresDynamicChecks, JSObject*& globalObject)
994 {
995 // Cases where we cannot statically optimize the lookup.
996 if (property == propertyNames().arguments || !canOptimizeNonLocals()) {
997 stackDepth = 0;
998 index = missingSymbolMarker();
999
1000 if (shouldOptimizeLocals() && m_codeType == GlobalCode) {
1001 ScopeChainIterator iter = m_scopeChain->begin();
1002 globalObject = *iter;
1003 ASSERT((++iter) == m_scopeChain->end());
1004 }
1005 return false;
1006 }
1007
1008 size_t depth = 0;
1009 requiresDynamicChecks = false;
1010 ScopeChainIterator iter = m_scopeChain->begin();
1011 ScopeChainIterator end = m_scopeChain->end();
1012 for (; iter != end; ++iter, ++depth) {
1013 JSObject* currentScope = *iter;
1014 if (!currentScope->isVariableObject())
1015 break;
1016 JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope);
1017 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.ustring().rep());
1018
1019 // Found the property
1020 if (!entry.isNull()) {
1021 if (entry.isReadOnly() && forWriting) {
1022 stackDepth = 0;
1023 index = missingSymbolMarker();
1024 if (++iter == end)
1025 globalObject = currentVariableObject;
1026 return false;
1027 }
1028 stackDepth = depth;
1029 index = entry.getIndex();
1030 if (++iter == end)
1031 globalObject = currentVariableObject;
1032 return true;
1033 }
1034 bool scopeRequiresDynamicChecks = false;
1035 if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1036 break;
1037 requiresDynamicChecks |= scopeRequiresDynamicChecks;
1038 }
1039 // Can't locate the property but we're able to avoid a few lookups.
1040 stackDepth = depth;
1041 index = missingSymbolMarker();
1042 JSObject* scope = *iter;
1043 if (++iter == end)
1044 globalObject = scope;
1045 return true;
1046 }
1047
1048 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1049 {
1050 emitOpcode(op_instanceof);
1051 instructions().append(dst->index());
1052 instructions().append(value->index());
1053 instructions().append(base->index());
1054 instructions().append(basePrototype->index());
1055 return dst;
1056 }
1057
1058 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property)
1059 {
1060 size_t depth = 0;
1061 int index = 0;
1062 JSObject* globalObject = 0;
1063 bool requiresDynamicChecks = false;
1064 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) && !globalObject) {
1065 // We can't optimise at all :-(
1066 emitOpcode(op_resolve);
1067 instructions().append(dst->index());
1068 instructions().append(addConstant(property));
1069 return dst;
1070 }
1071
1072 if (globalObject) {
1073 bool forceGlobalResolve = false;
1074 if (m_regeneratingForExceptionInfo) {
1075 #if ENABLE(JIT)
1076 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1077 #else
1078 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1079 #endif
1080 }
1081
1082 if (index != missingSymbolMarker() && !forceGlobalResolve && !requiresDynamicChecks) {
1083 // Directly index the property lookup across multiple scopes.
1084 return emitGetScopedVar(dst, depth, index, globalObject);
1085 }
1086
1087 #if ENABLE(JIT)
1088 m_codeBlock->addGlobalResolveInfo(instructions().size());
1089 #else
1090 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1091 #endif
1092 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
1093 instructions().append(dst->index());
1094 instructions().append(globalObject);
1095 instructions().append(addConstant(property));
1096 instructions().append(0);
1097 instructions().append(0);
1098 if (requiresDynamicChecks)
1099 instructions().append(depth);
1100 return dst;
1101 }
1102
1103 if (requiresDynamicChecks) {
1104 // If we get here we have eval nested inside a |with| just give up
1105 emitOpcode(op_resolve);
1106 instructions().append(dst->index());
1107 instructions().append(addConstant(property));
1108 return dst;
1109 }
1110
1111 if (index != missingSymbolMarker()) {
1112 // Directly index the property lookup across multiple scopes.
1113 return emitGetScopedVar(dst, depth, index, globalObject);
1114 }
1115
1116 // In this case we are at least able to drop a few scope chains from the
1117 // lookup chain, although we still need to hash from then on.
1118 emitOpcode(op_resolve_skip);
1119 instructions().append(dst->index());
1120 instructions().append(addConstant(property));
1121 instructions().append(depth);
1122 return dst;
1123 }
1124
1125 RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject)
1126 {
1127 if (globalObject) {
1128 emitOpcode(op_get_global_var);
1129 instructions().append(dst->index());
1130 instructions().append(asCell(globalObject));
1131 instructions().append(index);
1132 return dst;
1133 }
1134
1135 emitOpcode(op_get_scoped_var);
1136 instructions().append(dst->index());
1137 instructions().append(index);
1138 instructions().append(depth);
1139 return dst;
1140 }
1141
1142 RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject)
1143 {
1144 if (globalObject) {
1145 emitOpcode(op_put_global_var);
1146 instructions().append(asCell(globalObject));
1147 instructions().append(index);
1148 instructions().append(value->index());
1149 return value;
1150 }
1151 emitOpcode(op_put_scoped_var);
1152 instructions().append(index);
1153 instructions().append(depth);
1154 instructions().append(value->index());
1155 return value;
1156 }
1157
1158 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property)
1159 {
1160 size_t depth = 0;
1161 int index = 0;
1162 JSObject* globalObject = 0;
1163 bool requiresDynamicChecks = false;
1164 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject);
1165 if (!globalObject || requiresDynamicChecks) {
1166 // We can't optimise at all :-(
1167 emitOpcode(op_resolve_base);
1168 instructions().append(dst->index());
1169 instructions().append(addConstant(property));
1170 return dst;
1171 }
1172
1173 // Global object is the base
1174 return emitLoad(dst, JSValue(globalObject));
1175 }
1176
1177 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property)
1178 {
1179 size_t depth = 0;
1180 int index = 0;
1181 JSObject* globalObject = 0;
1182 bool requiresDynamicChecks = false;
1183 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) {
1184 // We can't optimise at all :-(
1185 emitOpcode(op_resolve_with_base);
1186 instructions().append(baseDst->index());
1187 instructions().append(propDst->index());
1188 instructions().append(addConstant(property));
1189 return baseDst;
1190 }
1191
1192 bool forceGlobalResolve = false;
1193 if (m_regeneratingForExceptionInfo) {
1194 #if ENABLE(JIT)
1195 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1196 #else
1197 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1198 #endif
1199 }
1200
1201 // Global object is the base
1202 emitLoad(baseDst, JSValue(globalObject));
1203
1204 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1205 // Directly index the property lookup across multiple scopes.
1206 emitGetScopedVar(propDst, depth, index, globalObject);
1207 return baseDst;
1208 }
1209
1210 #if ENABLE(JIT)
1211 m_codeBlock->addGlobalResolveInfo(instructions().size());
1212 #else
1213 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1214 #endif
1215 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
1216 instructions().append(propDst->index());
1217 instructions().append(globalObject);
1218 instructions().append(addConstant(property));
1219 instructions().append(0);
1220 instructions().append(0);
1221 if (requiresDynamicChecks)
1222 instructions().append(depth);
1223 return baseDst;
1224 }
1225
1226 void BytecodeGenerator::emitMethodCheck()
1227 {
1228 emitOpcode(op_method_check);
1229 }
1230
1231 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1232 {
1233 #if ENABLE(JIT)
1234 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_get_by_id));
1235 #else
1236 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1237 #endif
1238
1239 emitOpcode(op_get_by_id);
1240 instructions().append(dst->index());
1241 instructions().append(base->index());
1242 instructions().append(addConstant(property));
1243 instructions().append(0);
1244 instructions().append(0);
1245 instructions().append(0);
1246 instructions().append(0);
1247 return dst;
1248 }
1249
1250 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1251 {
1252 #if ENABLE(JIT)
1253 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id));
1254 #else
1255 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1256 #endif
1257
1258 emitOpcode(op_put_by_id);
1259 instructions().append(base->index());
1260 instructions().append(addConstant(property));
1261 instructions().append(value->index());
1262 instructions().append(0);
1263 instructions().append(0);
1264 instructions().append(0);
1265 instructions().append(0);
1266 instructions().append(0);
1267 return value;
1268 }
1269
1270 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1271 {
1272 #if ENABLE(JIT)
1273 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id));
1274 #else
1275 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1276 #endif
1277
1278 emitOpcode(op_put_by_id);
1279 instructions().append(base->index());
1280 instructions().append(addConstant(property));
1281 instructions().append(value->index());
1282 instructions().append(0);
1283 instructions().append(0);
1284 instructions().append(0);
1285 instructions().append(0);
1286 instructions().append(property != m_globalData->propertyNames->underscoreProto);
1287 return value;
1288 }
1289
1290 RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value)
1291 {
1292 emitOpcode(op_put_getter);
1293 instructions().append(base->index());
1294 instructions().append(addConstant(property));
1295 instructions().append(value->index());
1296 return value;
1297 }
1298
1299 RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value)
1300 {
1301 emitOpcode(op_put_setter);
1302 instructions().append(base->index());
1303 instructions().append(addConstant(property));
1304 instructions().append(value->index());
1305 return value;
1306 }
1307
1308 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1309 {
1310 emitOpcode(op_del_by_id);
1311 instructions().append(dst->index());
1312 instructions().append(base->index());
1313 instructions().append(addConstant(property));
1314 return dst;
1315 }
1316
1317 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1318 {
1319 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1320 ForInContext& context = m_forInContextStack[i - 1];
1321 if (context.propertyRegister == property) {
1322 emitOpcode(op_get_by_pname);
1323 instructions().append(dst->index());
1324 instructions().append(base->index());
1325 instructions().append(property->index());
1326 instructions().append(context.expectedSubscriptRegister->index());
1327 instructions().append(context.iterRegister->index());
1328 instructions().append(context.indexRegister->index());
1329 return dst;
1330 }
1331 }
1332 emitOpcode(op_get_by_val);
1333 instructions().append(dst->index());
1334 instructions().append(base->index());
1335 instructions().append(property->index());
1336 return dst;
1337 }
1338
1339 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1340 {
1341 emitOpcode(op_put_by_val);
1342 instructions().append(base->index());
1343 instructions().append(property->index());
1344 instructions().append(value->index());
1345 return value;
1346 }
1347
1348 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1349 {
1350 emitOpcode(op_del_by_val);
1351 instructions().append(dst->index());
1352 instructions().append(base->index());
1353 instructions().append(property->index());
1354 return dst;
1355 }
1356
1357 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1358 {
1359 emitOpcode(op_put_by_index);
1360 instructions().append(base->index());
1361 instructions().append(index);
1362 instructions().append(value->index());
1363 return value;
1364 }
1365
1366 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1367 {
1368 emitOpcode(op_new_object);
1369 instructions().append(dst->index());
1370 return dst;
1371 }
1372
1373 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements)
1374 {
1375 Vector<RefPtr<RegisterID>, 16> argv;
1376 for (ElementNode* n = elements; n; n = n->next()) {
1377 if (n->elision())
1378 break;
1379 argv.append(newTemporary());
1380 // op_new_array requires the initial values to be a sequential range of registers
1381 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1382 emitNode(argv.last().get(), n->value());
1383 }
1384 emitOpcode(op_new_array);
1385 instructions().append(dst->index());
1386 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1387 instructions().append(argv.size()); // argc
1388 return dst;
1389 }
1390
1391 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1392 {
1393 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1394
1395 emitOpcode(op_new_func);
1396 instructions().append(dst->index());
1397 instructions().append(index);
1398 return dst;
1399 }
1400
1401 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1402 {
1403 emitOpcode(op_new_regexp);
1404 instructions().append(dst->index());
1405 instructions().append(addRegExp(regExp));
1406 return dst;
1407 }
1408
1409
1410 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1411 {
1412 FunctionBodyNode* function = n->body();
1413 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1414
1415 emitOpcode(op_new_func_exp);
1416 instructions().append(r0->index());
1417 instructions().append(index);
1418 return r0;
1419 }
1420
1421 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1422 {
1423 return emitCall(op_call, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1424 }
1425
1426 void BytecodeGenerator::createArgumentsIfNecessary()
1427 {
1428 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
1429 emitOpcode(op_create_arguments);
1430 }
1431
1432 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1433 {
1434 createArgumentsIfNecessary();
1435 return emitCall(op_call_eval, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1436 }
1437
1438 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1439 {
1440 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1441 ASSERT(func->refCount());
1442 ASSERT(thisRegister->refCount());
1443
1444 RegisterID* originalFunc = func;
1445 if (m_shouldEmitProfileHooks) {
1446 // If codegen decided to recycle func as this call's destination register,
1447 // we need to undo that optimization here so that func will still be around
1448 // for the sake of op_profile_did_call.
1449 if (dst == func) {
1450 RefPtr<RegisterID> movedThisRegister = emitMove(newTemporary(), thisRegister);
1451 RefPtr<RegisterID> movedFunc = emitMove(thisRegister, func);
1452
1453 thisRegister = movedThisRegister.release().releaseRef();
1454 func = movedFunc.release().releaseRef();
1455 }
1456 }
1457
1458 // Generate code for arguments.
1459 Vector<RefPtr<RegisterID>, 16> argv;
1460 argv.append(thisRegister);
1461 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next) {
1462 argv.append(newTemporary());
1463 // op_call requires the arguments to be a sequential range of registers
1464 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1465 emitNode(argv.last().get(), n);
1466 }
1467
1468 // Reserve space for call frame.
1469 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1470 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1471 callFrame.append(newTemporary());
1472
1473 if (m_shouldEmitProfileHooks) {
1474 emitOpcode(op_profile_will_call);
1475 instructions().append(func->index());
1476
1477 #if ENABLE(JIT)
1478 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index());
1479 #endif
1480 }
1481
1482 emitExpressionInfo(divot, startOffset, endOffset);
1483
1484 #if ENABLE(JIT)
1485 m_codeBlock->addCallLinkInfo();
1486 #endif
1487
1488 // Emit call.
1489 emitOpcode(opcodeID);
1490 instructions().append(dst->index()); // dst
1491 instructions().append(func->index()); // func
1492 instructions().append(argv.size()); // argCount
1493 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1494
1495 if (m_shouldEmitProfileHooks) {
1496 emitOpcode(op_profile_did_call);
1497 instructions().append(func->index());
1498
1499 if (dst == originalFunc) {
1500 thisRegister->deref();
1501 func->deref();
1502 }
1503 }
1504
1505 return dst;
1506 }
1507
1508 RegisterID* BytecodeGenerator::emitLoadVarargs(RegisterID* argCountDst, RegisterID* arguments)
1509 {
1510 ASSERT(argCountDst->index() < arguments->index());
1511 emitOpcode(op_load_varargs);
1512 instructions().append(argCountDst->index());
1513 instructions().append(arguments->index());
1514 return argCountDst;
1515 }
1516
1517 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* argCountRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1518 {
1519 ASSERT(func->refCount());
1520 ASSERT(thisRegister->refCount());
1521 ASSERT(dst != func);
1522 if (m_shouldEmitProfileHooks) {
1523 emitOpcode(op_profile_will_call);
1524 instructions().append(func->index());
1525
1526 #if ENABLE(JIT)
1527 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index());
1528 #endif
1529 }
1530
1531 emitExpressionInfo(divot, startOffset, endOffset);
1532
1533 // Emit call.
1534 emitOpcode(op_call_varargs);
1535 instructions().append(dst->index()); // dst
1536 instructions().append(func->index()); // func
1537 instructions().append(argCountRegister->index()); // arg count
1538 instructions().append(thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset
1539 if (m_shouldEmitProfileHooks) {
1540 emitOpcode(op_profile_did_call);
1541 instructions().append(func->index());
1542 }
1543 return dst;
1544 }
1545
1546 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1547 {
1548 if (m_codeBlock->needsFullScopeChain()) {
1549 emitOpcode(op_tear_off_activation);
1550 instructions().append(m_activationRegisterIndex);
1551 } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1)
1552 emitOpcode(op_tear_off_arguments);
1553
1554 return emitUnaryNoDstOp(op_ret, src);
1555 }
1556
1557 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1558 {
1559 emitOpcode(opcodeID);
1560 instructions().append(src->index());
1561 return src;
1562 }
1563
1564 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1565 {
1566 ASSERT(func->refCount());
1567
1568 RegisterID* originalFunc = func;
1569 if (m_shouldEmitProfileHooks) {
1570 // If codegen decided to recycle func as this call's destination register,
1571 // we need to undo that optimization here so that func will still be around
1572 // for the sake of op_profile_did_call.
1573 if (dst == func) {
1574 RefPtr<RegisterID> movedFunc = emitMove(newTemporary(), func);
1575 func = movedFunc.release().releaseRef();
1576 }
1577 }
1578
1579 RefPtr<RegisterID> funcProto = newTemporary();
1580
1581 // Generate code for arguments.
1582 Vector<RefPtr<RegisterID>, 16> argv;
1583 argv.append(newTemporary()); // reserve space for "this"
1584 for (ArgumentListNode* n = argumentsNode ? argumentsNode->m_listNode : 0; n; n = n->m_next) {
1585 argv.append(newTemporary());
1586 // op_construct requires the arguments to be a sequential range of registers
1587 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1588 emitNode(argv.last().get(), n);
1589 }
1590
1591 if (m_shouldEmitProfileHooks) {
1592 emitOpcode(op_profile_will_call);
1593 instructions().append(func->index());
1594 }
1595
1596 // Load prototype.
1597 emitExpressionInfo(divot, startOffset, endOffset);
1598 emitGetByIdExceptionInfo(op_construct);
1599 emitGetById(funcProto.get(), func, globalData()->propertyNames->prototype);
1600
1601 // Reserve space for call frame.
1602 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1603 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1604 callFrame.append(newTemporary());
1605
1606 emitExpressionInfo(divot, startOffset, endOffset);
1607
1608 #if ENABLE(JIT)
1609 m_codeBlock->addCallLinkInfo();
1610 #endif
1611
1612 emitOpcode(op_construct);
1613 instructions().append(dst->index()); // dst
1614 instructions().append(func->index()); // func
1615 instructions().append(argv.size()); // argCount
1616 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1617 instructions().append(funcProto->index()); // proto
1618 instructions().append(argv[0]->index()); // thisRegister
1619
1620 emitOpcode(op_construct_verify);
1621 instructions().append(dst->index());
1622 instructions().append(argv[0]->index());
1623
1624 if (m_shouldEmitProfileHooks) {
1625 emitOpcode(op_profile_did_call);
1626 instructions().append(func->index());
1627
1628 if (dst == originalFunc)
1629 func->deref();
1630 }
1631
1632 return dst;
1633 }
1634
1635 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1636 {
1637 emitOpcode(op_strcat);
1638 instructions().append(dst->index());
1639 instructions().append(src->index());
1640 instructions().append(count);
1641
1642 return dst;
1643 }
1644
1645 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1646 {
1647 emitOpcode(op_to_primitive);
1648 instructions().append(dst->index());
1649 instructions().append(src->index());
1650 }
1651
1652 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1653 {
1654 ASSERT(scope->isTemporary());
1655 ControlFlowContext context;
1656 context.isFinallyBlock = false;
1657 m_scopeContextStack.append(context);
1658 m_dynamicScopeDepth++;
1659 createArgumentsIfNecessary();
1660
1661 return emitUnaryNoDstOp(op_push_scope, scope);
1662 }
1663
1664 void BytecodeGenerator::emitPopScope()
1665 {
1666 ASSERT(m_scopeContextStack.size());
1667 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1668
1669 emitOpcode(op_pop_scope);
1670
1671 m_scopeContextStack.removeLast();
1672 m_dynamicScopeDepth--;
1673 }
1674
1675 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
1676 {
1677 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1678 if (debugHookID != DidReachBreakpoint)
1679 return;
1680 #else
1681 if (!m_shouldEmitDebugHooks)
1682 return;
1683 #endif
1684 emitOpcode(op_debug);
1685 instructions().append(debugHookID);
1686 instructions().append(firstLine);
1687 instructions().append(lastLine);
1688 }
1689
1690 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst)
1691 {
1692 ControlFlowContext scope;
1693 scope.isFinallyBlock = true;
1694 FinallyContext context = { target, retAddrDst };
1695 scope.finallyContext = context;
1696 m_scopeContextStack.append(scope);
1697 m_finallyDepth++;
1698 }
1699
1700 void BytecodeGenerator::popFinallyContext()
1701 {
1702 ASSERT(m_scopeContextStack.size());
1703 ASSERT(m_scopeContextStack.last().isFinallyBlock);
1704 ASSERT(m_finallyDepth > 0);
1705 m_scopeContextStack.removeLast();
1706 m_finallyDepth--;
1707 }
1708
1709 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1710 {
1711 // Reclaim free label scopes.
1712 //
1713 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1714 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1715 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1716 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1717 // loop condition is a workaround.
1718 while (m_labelScopes.size()) {
1719 if (m_labelScopes.last().refCount())
1720 break;
1721 m_labelScopes.removeLast();
1722 }
1723
1724 if (!m_labelScopes.size())
1725 return 0;
1726
1727 // We special-case the following, which is a syntax error in Firefox:
1728 // label:
1729 // break;
1730 if (name.isEmpty()) {
1731 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1732 LabelScope* scope = &m_labelScopes[i];
1733 if (scope->type() != LabelScope::NamedLabel) {
1734 ASSERT(scope->breakTarget());
1735 return scope;
1736 }
1737 }
1738 return 0;
1739 }
1740
1741 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1742 LabelScope* scope = &m_labelScopes[i];
1743 if (scope->name() && *scope->name() == name) {
1744 ASSERT(scope->breakTarget());
1745 return scope;
1746 }
1747 }
1748 return 0;
1749 }
1750
1751 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
1752 {
1753 // Reclaim free label scopes.
1754 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1755 m_labelScopes.removeLast();
1756
1757 if (!m_labelScopes.size())
1758 return 0;
1759
1760 if (name.isEmpty()) {
1761 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1762 LabelScope* scope = &m_labelScopes[i];
1763 if (scope->type() == LabelScope::Loop) {
1764 ASSERT(scope->continueTarget());
1765 return scope;
1766 }
1767 }
1768 return 0;
1769 }
1770
1771 // Continue to the loop nested nearest to the label scope that matches
1772 // 'name'.
1773 LabelScope* result = 0;
1774 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1775 LabelScope* scope = &m_labelScopes[i];
1776 if (scope->type() == LabelScope::Loop) {
1777 ASSERT(scope->continueTarget());
1778 result = scope;
1779 }
1780 if (scope->name() && *scope->name() == name)
1781 return result; // may be 0
1782 }
1783 return 0;
1784 }
1785
1786 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
1787 {
1788 while (topScope > bottomScope) {
1789 // First we count the number of dynamic scopes we need to remove to get
1790 // to a finally block.
1791 int nNormalScopes = 0;
1792 while (topScope > bottomScope) {
1793 if (topScope->isFinallyBlock)
1794 break;
1795 ++nNormalScopes;
1796 --topScope;
1797 }
1798
1799 if (nNormalScopes) {
1800 size_t begin = instructions().size();
1801
1802 // We need to remove a number of dynamic scopes to get to the next
1803 // finally block
1804 emitOpcode(op_jmp_scopes);
1805 instructions().append(nNormalScopes);
1806
1807 // If topScope == bottomScope then there isn't actually a finally block
1808 // left to emit, so make the jmp_scopes jump directly to the target label
1809 if (topScope == bottomScope) {
1810 instructions().append(target->bind(begin, instructions().size()));
1811 return target;
1812 }
1813
1814 // Otherwise we just use jmp_scopes to pop a group of scopes and go
1815 // to the next instruction
1816 RefPtr<Label> nextInsn = newLabel();
1817 instructions().append(nextInsn->bind(begin, instructions().size()));
1818 emitLabel(nextInsn.get());
1819 }
1820
1821 while (topScope > bottomScope && topScope->isFinallyBlock) {
1822 emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr);
1823 --topScope;
1824 }
1825 }
1826 return emitJump(target);
1827 }
1828
1829 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
1830 {
1831 ASSERT(scopeDepth() - targetScopeDepth >= 0);
1832 ASSERT(target->isForward());
1833
1834 size_t scopeDelta = scopeDepth() - targetScopeDepth;
1835 ASSERT(scopeDelta <= m_scopeContextStack.size());
1836 if (!scopeDelta)
1837 return emitJump(target);
1838
1839 if (m_finallyDepth)
1840 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
1841
1842 size_t begin = instructions().size();
1843
1844 emitOpcode(op_jmp_scopes);
1845 instructions().append(scopeDelta);
1846 instructions().append(target->bind(begin, instructions().size()));
1847 return target;
1848 }
1849
1850 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
1851 {
1852 size_t begin = instructions().size();
1853
1854 emitOpcode(op_get_pnames);
1855 instructions().append(dst->index());
1856 instructions().append(base->index());
1857 instructions().append(i->index());
1858 instructions().append(size->index());
1859 instructions().append(breakTarget->bind(begin, instructions().size()));
1860 return dst;
1861 }
1862
1863 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
1864 {
1865 size_t begin = instructions().size();
1866
1867 emitOpcode(op_next_pname);
1868 instructions().append(dst->index());
1869 instructions().append(base->index());
1870 instructions().append(i->index());
1871 instructions().append(size->index());
1872 instructions().append(iter->index());
1873 instructions().append(target->bind(begin, instructions().size()));
1874 return dst;
1875 }
1876
1877 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
1878 {
1879 #if ENABLE(JIT)
1880 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
1881 #else
1882 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
1883 #endif
1884
1885 m_codeBlock->addExceptionHandler(info);
1886 emitOpcode(op_catch);
1887 instructions().append(targetRegister->index());
1888 return targetRegister;
1889 }
1890
1891 RegisterID* BytecodeGenerator::emitNewError(RegisterID* dst, ErrorType type, JSValue message)
1892 {
1893 emitOpcode(op_new_error);
1894 instructions().append(dst->index());
1895 instructions().append(static_cast<int>(type));
1896 instructions().append(addConstantValue(message)->index());
1897 return dst;
1898 }
1899
1900 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally)
1901 {
1902 size_t begin = instructions().size();
1903
1904 emitOpcode(op_jsr);
1905 instructions().append(retAddrDst->index());
1906 instructions().append(finally->bind(begin, instructions().size()));
1907 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
1908 return finally;
1909 }
1910
1911 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc)
1912 {
1913 emitOpcode(op_sret);
1914 instructions().append(retAddrSrc->index());
1915 }
1916
1917 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
1918 {
1919 ControlFlowContext context;
1920 context.isFinallyBlock = false;
1921 m_scopeContextStack.append(context);
1922 m_dynamicScopeDepth++;
1923
1924 createArgumentsIfNecessary();
1925
1926 emitOpcode(op_push_new_scope);
1927 instructions().append(dst->index());
1928 instructions().append(addConstant(property));
1929 instructions().append(value->index());
1930 }
1931
1932 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
1933 {
1934 SwitchInfo info = { instructions().size(), type };
1935 switch (type) {
1936 case SwitchInfo::SwitchImmediate:
1937 emitOpcode(op_switch_imm);
1938 break;
1939 case SwitchInfo::SwitchCharacter:
1940 emitOpcode(op_switch_char);
1941 break;
1942 case SwitchInfo::SwitchString:
1943 emitOpcode(op_switch_string);
1944 break;
1945 default:
1946 ASSERT_NOT_REACHED();
1947 }
1948
1949 instructions().append(0); // place holder for table index
1950 instructions().append(0); // place holder for default target
1951 instructions().append(scrutineeRegister->index());
1952 m_switchContextStack.append(info);
1953 }
1954
1955 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
1956 {
1957 UNUSED_PARAM(max);
1958 ASSERT(node->isNumber());
1959 double value = static_cast<NumberNode*>(node)->value();
1960 int32_t key = static_cast<int32_t>(value);
1961 ASSERT(key == value);
1962 ASSERT(key >= min);
1963 ASSERT(key <= max);
1964 return key - min;
1965 }
1966
1967 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1968 {
1969 jumpTable.min = min;
1970 jumpTable.branchOffsets.resize(max - min + 1);
1971 jumpTable.branchOffsets.fill(0);
1972 for (uint32_t i = 0; i < clauseCount; ++i) {
1973 // We're emitting this after the clause labels should have been fixed, so
1974 // the labels should not be "forward" references
1975 ASSERT(!labels[i]->isForward());
1976 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
1977 }
1978 }
1979
1980 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
1981 {
1982 UNUSED_PARAM(max);
1983 ASSERT(node->isString());
1984 UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep();
1985 ASSERT(clause->length() == 1);
1986
1987 int32_t key = clause->characters()[0];
1988 ASSERT(key >= min);
1989 ASSERT(key <= max);
1990 return key - min;
1991 }
1992
1993 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1994 {
1995 jumpTable.min = min;
1996 jumpTable.branchOffsets.resize(max - min + 1);
1997 jumpTable.branchOffsets.fill(0);
1998 for (uint32_t i = 0; i < clauseCount; ++i) {
1999 // We're emitting this after the clause labels should have been fixed, so
2000 // the labels should not be "forward" references
2001 ASSERT(!labels[i]->isForward());
2002 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2003 }
2004 }
2005
2006 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2007 {
2008 for (uint32_t i = 0; i < clauseCount; ++i) {
2009 // We're emitting this after the clause labels should have been fixed, so
2010 // the labels should not be "forward" references
2011 ASSERT(!labels[i]->isForward());
2012
2013 ASSERT(nodes[i]->isString());
2014 UString::Rep* clause = static_cast<StringNode*>(nodes[i])->value().ustring().rep();
2015 OffsetLocation location;
2016 location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2017 jumpTable.offsetTable.add(clause, location);
2018 }
2019 }
2020
2021 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2022 {
2023 SwitchInfo switchInfo = m_switchContextStack.last();
2024 m_switchContextStack.removeLast();
2025 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2026 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2027 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2028
2029 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2030 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2031 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2032 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2033 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2034
2035 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2036 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2037 } else {
2038 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2039 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2040 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2041
2042 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2043 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2044 }
2045 }
2046
2047 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2048 {
2049 // It would be nice to do an even better job of identifying exactly where the expression is.
2050 // And we could make the caller pass the node pointer in, if there was some way of getting
2051 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2052 // is still good enough to get us an accurate line number.
2053 emitExpressionInfo(0, 0, 0);
2054 RegisterID* exception = emitNewError(newTemporary(), SyntaxError, jsString(globalData(), "Expression too deep"));
2055 emitThrow(exception);
2056 return exception;
2057 }
2058
2059 } // namespace JSC