]> git.saurik.com Git - apple/javascriptcore.git/blame - bytecode/CodeBlock.h
JavaScriptCore-525.tar.gz
[apple/javascriptcore.git] / bytecode / CodeBlock.h
CommitLineData
9dae56ea
A
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JSGlobalObject.h"
36#include "JumpTable.h"
37#include "Nodes.h"
38#include "RegExp.h"
39#include "UString.h"
40#include <wtf/RefPtr.h>
41#include <wtf/Vector.h>
42
43#if ENABLE(JIT)
44#include "StructureStubInfo.h"
45#endif
46
47namespace JSC {
48
49 class ExecState;
50
51 enum CodeType { GlobalCode, EvalCode, FunctionCode };
52
53 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
54
55 struct HandlerInfo {
56 uint32_t start;
57 uint32_t end;
58 uint32_t target;
59 uint32_t scopeDepth;
60#if ENABLE(JIT)
61 void* nativeCode;
62#endif
63 };
64
65#if ENABLE(JIT)
66 // The code, and the associated pool from which it was allocated.
67 struct JITCodeRef {
68 void* code;
69#ifndef NDEBUG
70 unsigned codeSize;
71#endif
72 RefPtr<ExecutablePool> executablePool;
73
74 JITCodeRef()
75 : code(0)
76#ifndef NDEBUG
77 , codeSize(0)
78#endif
79 {
80 }
81
82 JITCodeRef(void* code, PassRefPtr<ExecutablePool> executablePool)
83 : code(code)
84#ifndef NDEBUG
85 , codeSize(0)
86#endif
87 , executablePool(executablePool)
88 {
89 }
90 };
91#endif
92
93 struct ExpressionRangeInfo {
94 enum {
95 MaxOffset = (1 << 7) - 1,
96 MaxDivot = (1 << 25) - 1
97 };
98 uint32_t instructionOffset : 25;
99 uint32_t divotPoint : 25;
100 uint32_t startOffset : 7;
101 uint32_t endOffset : 7;
102 };
103
104 struct LineInfo {
105 uint32_t instructionOffset;
106 int32_t lineNumber;
107 };
108
109 // Both op_construct and op_instanceof require a use of op_get_by_id to get
110 // the prototype property from an object. The exception messages for exceptions
111 // thrown by these instances op_get_by_id need to reflect this.
112 struct GetByIdExceptionInfo {
113 unsigned bytecodeOffset : 31;
114 bool isOpConstruct : 1;
115 };
116
117#if ENABLE(JIT)
118 struct CallLinkInfo {
119 CallLinkInfo()
120 : callReturnLocation(0)
121 , hotPathBegin(0)
122 , hotPathOther(0)
123 , coldPathOther(0)
124 , callee(0)
125 {
126 }
127
128 unsigned bytecodeIndex;
129 void* callReturnLocation;
130 void* hotPathBegin;
131 void* hotPathOther;
132 void* coldPathOther;
133 CodeBlock* callee;
134 unsigned position;
135
136 void setUnlinked() { callee = 0; }
137 bool isLinked() { return callee; }
138 };
139
140 struct FunctionRegisterInfo {
141 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
142 : bytecodeOffset(bytecodeOffset)
143 , functionRegisterIndex(functionRegisterIndex)
144 {
145 }
146
147 unsigned bytecodeOffset;
148 int functionRegisterIndex;
149 };
150
151 struct GlobalResolveInfo {
152 GlobalResolveInfo(unsigned bytecodeOffset)
153 : structure(0)
154 , offset(0)
155 , bytecodeOffset(bytecodeOffset)
156 {
157 }
158
159 Structure* structure;
160 unsigned offset;
161 unsigned bytecodeOffset;
162 };
163
164 struct PC {
165 PC(ptrdiff_t nativePCOffset, unsigned bytecodeIndex)
166 : nativePCOffset(nativePCOffset)
167 , bytecodeIndex(bytecodeIndex)
168 {
169 }
170
171 ptrdiff_t nativePCOffset;
172 unsigned bytecodeIndex;
173 };
174
175 // valueAtPosition helpers for the binaryChop algorithm below.
176
177 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
178 {
179 return structureStubInfo->callReturnLocation;
180 }
181
182 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
183 {
184 return callLinkInfo->callReturnLocation;
185 }
186
187 inline ptrdiff_t getNativePCOffset(PC* pc)
188 {
189 return pc->nativePCOffset;
190 }
191
192 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
193 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
194 // Optimized for cases where the array contains the key, checked by assertions.
195 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
196 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
197 {
198 // The array must contain at least one element (pre-condition, array does conatin key).
199 // If the array only contains one element, no need to do the comparison.
200 while (size > 1) {
201 // Pick an element to check, half way through the array, and read the value.
202 int pos = (size - 1) >> 1;
203 KeyType val = valueAtPosition(&array[pos]);
204
205 // If the key matches, success!
206 if (val == key)
207 return &array[pos];
208 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
209 // chopping off the right hand half of the array.
210 else if (key < val)
211 size = pos;
212 // Discard all values in the left hand half of the array, up to and including the item at pos.
213 else {
214 size -= (pos + 1);
215 array += (pos + 1);
216 }
217
218 // 'size' should never reach zero.
219 ASSERT(size);
220 }
221
222 // If we reach this point we've chopped down to one element, no need to check it matches
223 ASSERT(size == 1);
224 ASSERT(key == valueAtPosition(&array[0]));
225 return &array[0];
226 }
227#endif
228
229 class CodeBlock {
230 friend class JIT;
231 public:
232 CodeBlock(ScopeNode* ownerNode, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset);
233 ~CodeBlock();
234
235 void mark();
236 void refStructures(Instruction* vPC) const;
237 void derefStructures(Instruction* vPC) const;
238#if ENABLE(JIT)
239 void unlinkCallers();
240#endif
241
242 static void dumpStatistics();
243
244#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
245 void dump(ExecState*) const;
246 void printStructures(const Instruction*) const;
247 void printStructure(const char* name, const Instruction*, int operand) const;
248#endif
249
250 inline bool isKnownNotImmediate(int index)
251 {
252 if (index == m_thisRegister)
253 return true;
254
255 if (isConstantRegisterIndex(index))
256 return getConstant(index).isCell();
257
258 return false;
259 }
260
261 ALWAYS_INLINE bool isConstantRegisterIndex(int index)
262 {
263 return index >= m_numVars && index < m_numVars + m_numConstants;
264 }
265
266 ALWAYS_INLINE JSValuePtr getConstant(int index)
267 {
268 return m_constantRegisters[index - m_numVars].getJSValue();
269 }
270
271 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
272 {
273 return index >= m_numVars + m_numConstants;
274 }
275
276 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
277 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
278 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
279 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
280
281#if ENABLE(JIT)
282 void addCaller(CallLinkInfo* caller)
283 {
284 caller->callee = this;
285 caller->position = m_linkedCallerList.size();
286 m_linkedCallerList.append(caller);
287 }
288
289 void removeCaller(CallLinkInfo* caller)
290 {
291 unsigned pos = caller->position;
292 unsigned lastPos = m_linkedCallerList.size() - 1;
293
294 if (pos != lastPos) {
295 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
296 m_linkedCallerList[pos]->position = pos;
297 }
298 m_linkedCallerList.shrink(lastPos);
299 }
300
301 StructureStubInfo& getStubInfo(void* returnAddress)
302 {
303 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress));
304 }
305
306 CallLinkInfo& getCallLinkInfo(void* returnAddress)
307 {
308 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress));
309 }
310
311 unsigned getBytecodeIndex(CallFrame* callFrame, void* nativePC)
312 {
313 reparseForExceptionInfoIfNecessary(callFrame);
314 ptrdiff_t nativePCOffset = reinterpret_cast<void**>(nativePC) - reinterpret_cast<void**>(m_jitCode.code);
315 return binaryChop<PC, ptrdiff_t, getNativePCOffset>(m_exceptionInfo->m_pcVector.begin(), m_exceptionInfo->m_pcVector.size(), nativePCOffset)->bytecodeIndex;
316 }
317
318 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
319#endif
320
321 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
322 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
323
324 Vector<Instruction>& instructions() { return m_instructions; }
325#ifndef NDEBUG
326 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
327#endif
328
329#if ENABLE(JIT)
330 void setJITCode(JITCodeRef& jitCode);
331 void* jitCode() { return m_jitCode.code; }
332 ExecutablePool* executablePool() { return m_jitCode.executablePool.get(); }
333#endif
334
335 ScopeNode* ownerNode() const { return m_ownerNode; }
336
337 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
338
339 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
340 int thisRegister() const { return m_thisRegister; }
341
342 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
343 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
344 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
345 bool usesEval() const { return m_usesEval; }
346 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
347 bool usesArguments() const { return m_usesArguments; }
348
349 CodeType codeType() const { return m_codeType; }
350
351 SourceProvider* source() const { return m_source.get(); }
352 unsigned sourceOffset() const { return m_sourceOffset; }
353
354 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
355 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
356 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
357 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
358
359#if !ENABLE(JIT)
360 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
361 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
362 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
363#else
364 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
365 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
366 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
367
368 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
369 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
370 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
371
372 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
373 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
374 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
375
376 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
377#endif
378
379 // Exception handling support
380
381 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
382 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
383 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
384
385 bool hasExceptionInfo() const { return m_exceptionInfo; }
386 void clearExceptionInfo() { m_exceptionInfo.clear(); }
387
388 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
389 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
390
391 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
392 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
393 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
394
395#if ENABLE(JIT)
396 Vector<PC>& pcVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_pcVector; }
397#endif
398
399 // Constant Pool
400
401 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
402 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
403 Identifier& identifier(int index) { return m_identifiers[index]; }
404
405 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
406 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
407 Register& constantRegister(int index) { return m_constantRegisters[index]; }
408
409 unsigned addFunctionExpression(FuncExprNode* n) { unsigned size = m_functionExpressions.size(); m_functionExpressions.append(n); return size; }
410 FuncExprNode* functionExpression(int index) const { return m_functionExpressions[index].get(); }
411
412 unsigned addFunction(FuncDeclNode* n) { createRareDataIfNecessary(); unsigned size = m_rareData->m_functions.size(); m_rareData->m_functions.append(n); return size; }
413 FuncDeclNode* function(int index) const { ASSERT(m_rareData); return m_rareData->m_functions[index].get(); }
414
415 bool hasFunctions() const { return m_functionExpressions.size() || (m_rareData && m_rareData->m_functions.size()); }
416
417 unsigned addUnexpectedConstant(JSValuePtr v) { createRareDataIfNecessary(); unsigned size = m_rareData->m_unexpectedConstants.size(); m_rareData->m_unexpectedConstants.append(v); return size; }
418 JSValuePtr unexpectedConstant(int index) const { ASSERT(m_rareData); return m_rareData->m_unexpectedConstants[index]; }
419
420 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
421 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
422
423
424 // Jump Tables
425
426 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
427 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
428 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
429
430 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
431 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
432 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
433
434 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
435 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
436 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
437
438
439 SymbolTable& symbolTable() { return m_symbolTable; }
440
441 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
442
443 void shrinkToFit();
444
445 // FIXME: Make these remaining members private.
446
447 int m_numCalleeRegisters;
448 // NOTE: numConstants holds the number of constant registers allocated
449 // by the code generator, not the number of constant registers used.
450 // (Duplicate constants are uniqued during code generation, and spare
451 // constant registers may be allocated.)
452 int m_numConstants;
453 int m_numVars;
454 int m_numParameters;
455
456 private:
457#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
458 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
459#endif
460
461 void reparseForExceptionInfoIfNecessary(CallFrame*);
462
463 void createRareDataIfNecessary()
464 {
465 if (!m_rareData)
466 m_rareData.set(new RareData);
467 }
468
469 ScopeNode* m_ownerNode;
470 JSGlobalData* m_globalData;
471
472 Vector<Instruction> m_instructions;
473#ifndef NDEBUG
474 unsigned m_instructionCount;
475#endif
476#if ENABLE(JIT)
477 JITCodeRef m_jitCode;
478#endif
479
480 int m_thisRegister;
481
482 bool m_needsFullScopeChain;
483 bool m_usesEval;
484 bool m_usesArguments;
485 bool m_isNumericCompareFunction;
486
487 CodeType m_codeType;
488
489 RefPtr<SourceProvider> m_source;
490 unsigned m_sourceOffset;
491
492#if !ENABLE(JIT)
493 Vector<unsigned> m_propertyAccessInstructions;
494 Vector<unsigned> m_globalResolveInstructions;
495#else
496 Vector<StructureStubInfo> m_structureStubInfos;
497 Vector<GlobalResolveInfo> m_globalResolveInfos;
498 Vector<CallLinkInfo> m_callLinkInfos;
499 Vector<CallLinkInfo*> m_linkedCallerList;
500#endif
501
502 Vector<unsigned> m_jumpTargets;
503
504 // Constant Pool
505 Vector<Identifier> m_identifiers;
506 Vector<Register> m_constantRegisters;
507 Vector<RefPtr<FuncExprNode> > m_functionExpressions;
508
509 SymbolTable m_symbolTable;
510
511 struct ExceptionInfo {
512 Vector<ExpressionRangeInfo> m_expressionInfo;
513 Vector<LineInfo> m_lineInfo;
514 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
515
516#if ENABLE(JIT)
517 Vector<PC> m_pcVector;
518#endif
519 };
520 OwnPtr<ExceptionInfo> m_exceptionInfo;
521
522 struct RareData {
523 Vector<HandlerInfo> m_exceptionHandlers;
524
525 // Rare Constants
526 Vector<RefPtr<FuncDeclNode> > m_functions;
527 Vector<JSValuePtr> m_unexpectedConstants;
528 Vector<RefPtr<RegExp> > m_regexps;
529
530 // Jump Tables
531 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
532 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
533 Vector<StringJumpTable> m_stringSwitchJumpTables;
534
535 EvalCodeCache m_evalCodeCache;
536
537#if ENABLE(JIT)
538 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
539#endif
540 };
541 OwnPtr<RareData> m_rareData;
542 };
543
544 // Program code is not marked by any function, so we make the global object
545 // responsible for marking it.
546
547 class ProgramCodeBlock : public CodeBlock {
548 public:
549 ProgramCodeBlock(ScopeNode* ownerNode, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
550 : CodeBlock(ownerNode, codeType, sourceProvider, 0)
551 , m_globalObject(globalObject)
552 {
553 m_globalObject->codeBlocks().add(this);
554 }
555
556 ~ProgramCodeBlock()
557 {
558 if (m_globalObject)
559 m_globalObject->codeBlocks().remove(this);
560 }
561
562 void clearGlobalObject() { m_globalObject = 0; }
563
564 private:
565 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
566 };
567
568 class EvalCodeBlock : public ProgramCodeBlock {
569 public:
570 EvalCodeBlock(ScopeNode* ownerNode, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
571 : ProgramCodeBlock(ownerNode, EvalCode, globalObject, sourceProvider)
572 , m_baseScopeDepth(baseScopeDepth)
573 {
574 }
575
576 int baseScopeDepth() const { return m_baseScopeDepth; }
577
578 private:
579 int m_baseScopeDepth;
580 };
581
582} // namespace JSC
583
584#endif // CodeBlock_h