]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecode/CodeBlock.h
JavaScriptCore-584.tar.gz
[apple/javascriptcore.git] / bytecode / CodeBlock.h
1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #ifndef CodeBlock_h
31 #define CodeBlock_h
32
33 #include "EvalCodeCache.h"
34 #include "Instruction.h"
35 #include "JITCode.h"
36 #include "JSGlobalObject.h"
37 #include "JumpTable.h"
38 #include "Nodes.h"
39 #include "PtrAndFlags.h"
40 #include "RegExp.h"
41 #include "UString.h"
42 #include <wtf/FastAllocBase.h>
43 #include <wtf/RefPtr.h>
44 #include <wtf/Vector.h>
45
46 #if ENABLE(JIT)
47 #include "StructureStubInfo.h"
48 #endif
49
50 // Register numbers used in bytecode operations have different meaning accoring to their ranges:
51 // 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
52 // 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
53 // 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
54 static const int FirstConstantRegisterIndex = 0x40000000;
55
56 namespace JSC {
57
58 enum HasSeenShouldRepatch {
59 hasSeenShouldRepatch
60 };
61
62 class ExecState;
63
64 enum CodeType { GlobalCode, EvalCode, FunctionCode };
65
66 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
67
68 struct HandlerInfo {
69 uint32_t start;
70 uint32_t end;
71 uint32_t target;
72 uint32_t scopeDepth;
73 #if ENABLE(JIT)
74 CodeLocationLabel nativeCode;
75 #endif
76 };
77
78 struct ExpressionRangeInfo {
79 enum {
80 MaxOffset = (1 << 7) - 1,
81 MaxDivot = (1 << 25) - 1
82 };
83 uint32_t instructionOffset : 25;
84 uint32_t divotPoint : 25;
85 uint32_t startOffset : 7;
86 uint32_t endOffset : 7;
87 };
88
89 struct LineInfo {
90 uint32_t instructionOffset;
91 int32_t lineNumber;
92 };
93
94 // Both op_construct and op_instanceof require a use of op_get_by_id to get
95 // the prototype property from an object. The exception messages for exceptions
96 // thrown by these instances op_get_by_id need to reflect this.
97 struct GetByIdExceptionInfo {
98 unsigned bytecodeOffset : 31;
99 bool isOpConstruct : 1;
100 };
101
102 #if ENABLE(JIT)
103 struct CallLinkInfo {
104 CallLinkInfo()
105 : callee(0)
106 {
107 }
108
109 unsigned bytecodeIndex;
110 CodeLocationNearCall callReturnLocation;
111 CodeLocationDataLabelPtr hotPathBegin;
112 CodeLocationNearCall hotPathOther;
113 PtrAndFlags<CodeBlock, HasSeenShouldRepatch> ownerCodeBlock;
114 CodeBlock* callee;
115 unsigned position;
116
117 void setUnlinked() { callee = 0; }
118 bool isLinked() { return callee; }
119
120 bool seenOnce()
121 {
122 return ownerCodeBlock.isFlagSet(hasSeenShouldRepatch);
123 }
124
125 void setSeen()
126 {
127 ownerCodeBlock.setFlag(hasSeenShouldRepatch);
128 }
129 };
130
131 struct MethodCallLinkInfo {
132 MethodCallLinkInfo()
133 : cachedStructure(0)
134 {
135 }
136
137 bool seenOnce()
138 {
139 return cachedPrototypeStructure.isFlagSet(hasSeenShouldRepatch);
140 }
141
142 void setSeen()
143 {
144 cachedPrototypeStructure.setFlag(hasSeenShouldRepatch);
145 }
146
147 CodeLocationCall callReturnLocation;
148 CodeLocationDataLabelPtr structureLabel;
149 Structure* cachedStructure;
150 PtrAndFlags<Structure, HasSeenShouldRepatch> cachedPrototypeStructure;
151 };
152
153 struct FunctionRegisterInfo {
154 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
155 : bytecodeOffset(bytecodeOffset)
156 , functionRegisterIndex(functionRegisterIndex)
157 {
158 }
159
160 unsigned bytecodeOffset;
161 int functionRegisterIndex;
162 };
163
164 struct GlobalResolveInfo {
165 GlobalResolveInfo(unsigned bytecodeOffset)
166 : structure(0)
167 , offset(0)
168 , bytecodeOffset(bytecodeOffset)
169 {
170 }
171
172 Structure* structure;
173 unsigned offset;
174 unsigned bytecodeOffset;
175 };
176
177 // This structure is used to map from a call return location
178 // (given as an offset in bytes into the JIT code) back to
179 // the bytecode index of the corresponding bytecode operation.
180 // This is then used to look up the corresponding handler.
181 struct CallReturnOffsetToBytecodeIndex {
182 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
183 : callReturnOffset(callReturnOffset)
184 , bytecodeIndex(bytecodeIndex)
185 {
186 }
187
188 unsigned callReturnOffset;
189 unsigned bytecodeIndex;
190 };
191
192 // valueAtPosition helpers for the binaryChop algorithm below.
193
194 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
195 {
196 return structureStubInfo->callReturnLocation.executableAddress();
197 }
198
199 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
200 {
201 return callLinkInfo->callReturnLocation.executableAddress();
202 }
203
204 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
205 {
206 return methodCallLinkInfo->callReturnLocation.executableAddress();
207 }
208
209 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
210 {
211 return pc->callReturnOffset;
212 }
213
214 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
215 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
216 // Optimized for cases where the array contains the key, checked by assertions.
217 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
218 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
219 {
220 // The array must contain at least one element (pre-condition, array does conatin key).
221 // If the array only contains one element, no need to do the comparison.
222 while (size > 1) {
223 // Pick an element to check, half way through the array, and read the value.
224 int pos = (size - 1) >> 1;
225 KeyType val = valueAtPosition(&array[pos]);
226
227 // If the key matches, success!
228 if (val == key)
229 return &array[pos];
230 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
231 // chopping off the right hand half of the array.
232 else if (key < val)
233 size = pos;
234 // Discard all values in the left hand half of the array, up to and including the item at pos.
235 else {
236 size -= (pos + 1);
237 array += (pos + 1);
238 }
239
240 // 'size' should never reach zero.
241 ASSERT(size);
242 }
243
244 // If we reach this point we've chopped down to one element, no need to check it matches
245 ASSERT(size == 1);
246 ASSERT(key == valueAtPosition(&array[0]));
247 return &array[0];
248 }
249 #endif
250
251 struct ExceptionInfo : FastAllocBase {
252 Vector<ExpressionRangeInfo> m_expressionInfo;
253 Vector<LineInfo> m_lineInfo;
254 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
255
256 #if ENABLE(JIT)
257 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
258 #endif
259 };
260
261 class CodeBlock : public FastAllocBase {
262 friend class JIT;
263 protected:
264 CodeBlock(ScriptExecutable* ownerExecutable, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset, SymbolTable* symbolTable);
265 public:
266 virtual ~CodeBlock();
267
268 void markAggregate(MarkStack&);
269 void refStructures(Instruction* vPC) const;
270 void derefStructures(Instruction* vPC) const;
271 #if ENABLE(JIT_OPTIMIZE_CALL)
272 void unlinkCallers();
273 #endif
274
275 static void dumpStatistics();
276
277 #if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
278 void dump(ExecState*) const;
279 void printStructures(const Instruction*) const;
280 void printStructure(const char* name, const Instruction*, int operand) const;
281 #endif
282
283 inline bool isKnownNotImmediate(int index)
284 {
285 if (index == m_thisRegister)
286 return true;
287
288 if (isConstantRegisterIndex(index))
289 return getConstant(index).isCell();
290
291 return false;
292 }
293
294 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
295 {
296 return index >= m_numVars;
297 }
298
299 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
300 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
301 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
302 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
303
304 #if ENABLE(JIT)
305 void addCaller(CallLinkInfo* caller)
306 {
307 caller->callee = this;
308 caller->position = m_linkedCallerList.size();
309 m_linkedCallerList.append(caller);
310 }
311
312 void removeCaller(CallLinkInfo* caller)
313 {
314 unsigned pos = caller->position;
315 unsigned lastPos = m_linkedCallerList.size() - 1;
316
317 if (pos != lastPos) {
318 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
319 m_linkedCallerList[pos]->position = pos;
320 }
321 m_linkedCallerList.shrink(lastPos);
322 }
323
324 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
325 {
326 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
327 }
328
329 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
330 {
331 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
332 }
333
334 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
335 {
336 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
337 }
338
339 unsigned getBytecodeIndex(CallFrame* callFrame, ReturnAddressPtr returnAddress)
340 {
341 reparseForExceptionInfoIfNecessary(callFrame);
342 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(callReturnIndexVector().begin(), callReturnIndexVector().size(), ownerExecutable()->generatedJITCode().offsetOf(returnAddress.value()))->bytecodeIndex;
343 }
344
345 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
346 #endif
347
348 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
349 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
350
351 Vector<Instruction>& instructions() { return m_instructions; }
352 void discardBytecode() { m_instructions.clear(); }
353
354 #ifndef NDEBUG
355 unsigned instructionCount() { return m_instructionCount; }
356 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
357 #endif
358
359 #if ENABLE(JIT)
360 JITCode& getJITCode() { return ownerExecutable()->generatedJITCode(); }
361 ExecutablePool* executablePool() { return ownerExecutable()->getExecutablePool(); }
362 #endif
363
364 ScriptExecutable* ownerExecutable() const { return m_ownerExecutable; }
365
366 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
367
368 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
369 int thisRegister() const { return m_thisRegister; }
370
371 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
372 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
373 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
374 bool usesEval() const { return m_usesEval; }
375 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
376 bool usesArguments() const { return m_usesArguments; }
377
378 CodeType codeType() const { return m_codeType; }
379
380 SourceProvider* source() const { return m_source.get(); }
381 unsigned sourceOffset() const { return m_sourceOffset; }
382
383 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
384 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
385 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
386 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
387
388 #if !ENABLE(JIT)
389 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
390 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
391 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
392 #else
393 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
394 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
395 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
396
397 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
398 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
399 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
400
401 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
402 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
403 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
404
405 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
406 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
407
408 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
409 #endif
410
411 // Exception handling support
412
413 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
414 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
415 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
416
417 bool hasExceptionInfo() const { return m_exceptionInfo; }
418 void clearExceptionInfo() { m_exceptionInfo.clear(); }
419 ExceptionInfo* extractExceptionInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo.release(); }
420
421 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
422 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
423
424 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
425 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
426 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
427
428 #if ENABLE(JIT)
429 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
430 #endif
431
432 // Constant Pool
433
434 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
435 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
436 Identifier& identifier(int index) { return m_identifiers[index]; }
437
438 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
439 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
440 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
441 ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; }
442 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); }
443
444 unsigned addFunctionDecl(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionDecls.size(); m_functionDecls.append(n); return size; }
445 FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); }
446 int numberOfFunctionDecls() { return m_functionDecls.size(); }
447 unsigned addFunctionExpr(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionExprs.size(); m_functionExprs.append(n); return size; }
448 FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); }
449
450 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
451 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
452
453
454 // Jump Tables
455
456 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
457 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
458 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
459
460 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
461 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
462 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
463
464 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
465 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
466 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
467
468
469 SymbolTable* symbolTable() { return m_symbolTable; }
470 SharedSymbolTable* sharedSymbolTable() { ASSERT(m_codeType == FunctionCode); return static_cast<SharedSymbolTable*>(m_symbolTable); }
471
472 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
473
474 void shrinkToFit();
475
476 // FIXME: Make these remaining members private.
477
478 int m_numCalleeRegisters;
479 int m_numVars;
480 int m_numParameters;
481
482 private:
483 #if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
484 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
485
486 CString registerName(ExecState*, int r) const;
487 void printUnaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
488 void printBinaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
489 void printConditionalJump(ExecState*, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator&, int location, const char* op) const;
490 void printGetByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
491 void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
492 #endif
493
494 void reparseForExceptionInfoIfNecessary(CallFrame*);
495
496 void createRareDataIfNecessary()
497 {
498 if (!m_rareData)
499 m_rareData.set(new RareData);
500 }
501
502 ScriptExecutable* m_ownerExecutable;
503 JSGlobalData* m_globalData;
504
505 Vector<Instruction> m_instructions;
506 #ifndef NDEBUG
507 unsigned m_instructionCount;
508 #endif
509
510 int m_thisRegister;
511
512 bool m_needsFullScopeChain;
513 bool m_usesEval;
514 bool m_usesArguments;
515 bool m_isNumericCompareFunction;
516
517 CodeType m_codeType;
518
519 RefPtr<SourceProvider> m_source;
520 unsigned m_sourceOffset;
521
522 #if !ENABLE(JIT)
523 Vector<unsigned> m_propertyAccessInstructions;
524 Vector<unsigned> m_globalResolveInstructions;
525 #else
526 Vector<StructureStubInfo> m_structureStubInfos;
527 Vector<GlobalResolveInfo> m_globalResolveInfos;
528 Vector<CallLinkInfo> m_callLinkInfos;
529 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
530 Vector<CallLinkInfo*> m_linkedCallerList;
531 #endif
532
533 Vector<unsigned> m_jumpTargets;
534
535 // Constant Pool
536 Vector<Identifier> m_identifiers;
537 Vector<Register> m_constantRegisters;
538 Vector<RefPtr<FunctionExecutable> > m_functionDecls;
539 Vector<RefPtr<FunctionExecutable> > m_functionExprs;
540
541 SymbolTable* m_symbolTable;
542
543 OwnPtr<ExceptionInfo> m_exceptionInfo;
544
545 struct RareData : FastAllocBase {
546 Vector<HandlerInfo> m_exceptionHandlers;
547
548 // Rare Constants
549 Vector<RefPtr<RegExp> > m_regexps;
550
551 // Jump Tables
552 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
553 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
554 Vector<StringJumpTable> m_stringSwitchJumpTables;
555
556 EvalCodeCache m_evalCodeCache;
557
558 #if ENABLE(JIT)
559 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
560 #endif
561 };
562 OwnPtr<RareData> m_rareData;
563 };
564
565 // Program code is not marked by any function, so we make the global object
566 // responsible for marking it.
567
568 class GlobalCodeBlock : public CodeBlock {
569 public:
570 GlobalCodeBlock(ScriptExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, JSGlobalObject* globalObject)
571 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, &m_unsharedSymbolTable)
572 , m_globalObject(globalObject)
573 {
574 m_globalObject->codeBlocks().add(this);
575 }
576
577 ~GlobalCodeBlock()
578 {
579 if (m_globalObject)
580 m_globalObject->codeBlocks().remove(this);
581 }
582
583 void clearGlobalObject() { m_globalObject = 0; }
584
585 private:
586 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
587 SymbolTable m_unsharedSymbolTable;
588 };
589
590 class ProgramCodeBlock : public GlobalCodeBlock {
591 public:
592 ProgramCodeBlock(ProgramExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
593 : GlobalCodeBlock(ownerExecutable, codeType, sourceProvider, 0, globalObject)
594 {
595 }
596 };
597
598 class EvalCodeBlock : public GlobalCodeBlock {
599 public:
600 EvalCodeBlock(EvalExecutable* ownerExecutable, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
601 : GlobalCodeBlock(ownerExecutable, EvalCode, sourceProvider, 0, globalObject)
602 , m_baseScopeDepth(baseScopeDepth)
603 {
604 }
605
606 int baseScopeDepth() const { return m_baseScopeDepth; }
607
608 const Identifier& variable(unsigned index) { return m_variables[index]; }
609 unsigned numVariables() { return m_variables.size(); }
610 void adoptVariables(Vector<Identifier>& variables)
611 {
612 ASSERT(m_variables.isEmpty());
613 m_variables.swap(variables);
614 }
615
616 private:
617 int m_baseScopeDepth;
618 Vector<Identifier> m_variables;
619 };
620
621 class FunctionCodeBlock : public CodeBlock {
622 public:
623 // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new
624 // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared
625 // symbol table, so we just pass as a raw pointer with a ref count of 1. We then manually deref
626 // in the destructor.
627 FunctionCodeBlock(FunctionExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset)
628 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, new SharedSymbolTable)
629 {
630 }
631 ~FunctionCodeBlock()
632 {
633 sharedSymbolTable()->deref();
634 }
635 };
636
637 inline Register& ExecState::r(int index)
638 {
639 CodeBlock* codeBlock = this->codeBlock();
640 if (codeBlock->isConstantRegisterIndex(index))
641 return codeBlock->constantRegister(index);
642 return this[index];
643 }
644
645 } // namespace JSC
646
647 #endif // CodeBlock_h