2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "EvalCodeCache.h"
34 #include "Instruction.h"
36 #include "JSGlobalObject.h"
37 #include "JumpTable.h"
41 #include <wtf/FastAllocBase.h>
42 #include <wtf/RefPtr.h>
43 #include <wtf/Vector.h>
46 #include "StructureStubInfo.h"
49 // Register numbers used in bytecode operations have different meaning accoring to their ranges:
50 // 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
51 // 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
52 // 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
53 static const int FirstConstantRegisterIndex
= 0x40000000;
57 enum HasSeenShouldRepatch
{
63 enum CodeType
{ GlobalCode
, EvalCode
, FunctionCode
};
65 static ALWAYS_INLINE
int missingThisObjectMarker() { return std::numeric_limits
<int>::max(); }
73 CodeLocationLabel nativeCode
;
77 struct ExpressionRangeInfo
{
79 MaxOffset
= (1 << 7) - 1,
80 MaxDivot
= (1 << 25) - 1
82 uint32_t instructionOffset
: 25;
83 uint32_t divotPoint
: 25;
84 uint32_t startOffset
: 7;
85 uint32_t endOffset
: 7;
89 uint32_t instructionOffset
;
93 // Both op_construct and op_instanceof require a use of op_get_by_id to get
94 // the prototype property from an object. The exception messages for exceptions
95 // thrown by these instances op_get_by_id need to reflect this.
96 struct GetByIdExceptionInfo
{
97 unsigned bytecodeOffset
: 31;
98 bool isOpConstruct
: 1;
102 struct CallLinkInfo
{
106 , hasSeenShouldRepatch(0)
110 unsigned bytecodeIndex
;
111 CodeLocationNearCall callReturnLocation
;
112 CodeLocationDataLabelPtr hotPathBegin
;
113 CodeLocationNearCall hotPathOther
;
114 CodeBlock
* ownerCodeBlock
;
116 unsigned position
: 31;
117 unsigned hasSeenShouldRepatch
: 1;
119 void setUnlinked() { callee
= 0; }
120 bool isLinked() { return callee
; }
124 return hasSeenShouldRepatch
;
129 hasSeenShouldRepatch
= true;
133 struct MethodCallLinkInfo
{
136 , cachedPrototypeStructure(0)
142 ASSERT(!cachedStructure
);
143 return cachedPrototypeStructure
;
148 ASSERT(!cachedStructure
&& !cachedPrototypeStructure
);
149 // We use the values of cachedStructure & cachedPrototypeStructure to indicate the
151 // - In the initial state, both are null.
152 // - Once this transition has been taken once, cachedStructure is
153 // null and cachedPrototypeStructure is set to a nun-null value.
154 // - Once the call is linked both structures are set to non-null values.
155 cachedPrototypeStructure
= (Structure
*)1;
158 CodeLocationCall callReturnLocation
;
159 CodeLocationDataLabelPtr structureLabel
;
160 Structure
* cachedStructure
;
161 Structure
* cachedPrototypeStructure
;
164 struct FunctionRegisterInfo
{
165 FunctionRegisterInfo(unsigned bytecodeOffset
, int functionRegisterIndex
)
166 : bytecodeOffset(bytecodeOffset
)
167 , functionRegisterIndex(functionRegisterIndex
)
171 unsigned bytecodeOffset
;
172 int functionRegisterIndex
;
175 struct GlobalResolveInfo
{
176 GlobalResolveInfo(unsigned bytecodeOffset
)
179 , bytecodeOffset(bytecodeOffset
)
183 Structure
* structure
;
185 unsigned bytecodeOffset
;
188 // This structure is used to map from a call return location
189 // (given as an offset in bytes into the JIT code) back to
190 // the bytecode index of the corresponding bytecode operation.
191 // This is then used to look up the corresponding handler.
192 struct CallReturnOffsetToBytecodeIndex
{
193 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset
, unsigned bytecodeIndex
)
194 : callReturnOffset(callReturnOffset
)
195 , bytecodeIndex(bytecodeIndex
)
199 unsigned callReturnOffset
;
200 unsigned bytecodeIndex
;
203 // valueAtPosition helpers for the binaryChop algorithm below.
205 inline void* getStructureStubInfoReturnLocation(StructureStubInfo
* structureStubInfo
)
207 return structureStubInfo
->callReturnLocation
.executableAddress();
210 inline void* getCallLinkInfoReturnLocation(CallLinkInfo
* callLinkInfo
)
212 return callLinkInfo
->callReturnLocation
.executableAddress();
215 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo
* methodCallLinkInfo
)
217 return methodCallLinkInfo
->callReturnLocation
.executableAddress();
220 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex
* pc
)
222 return pc
->callReturnOffset
;
225 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
226 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
227 // Optimized for cases where the array contains the key, checked by assertions.
228 template<typename ArrayType
, typename KeyType
, KeyType(*valueAtPosition
)(ArrayType
*)>
229 inline ArrayType
* binaryChop(ArrayType
* array
, size_t size
, KeyType key
)
231 // The array must contain at least one element (pre-condition, array does conatin key).
232 // If the array only contains one element, no need to do the comparison.
234 // Pick an element to check, half way through the array, and read the value.
235 int pos
= (size
- 1) >> 1;
236 KeyType val
= valueAtPosition(&array
[pos
]);
238 // If the key matches, success!
241 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
242 // chopping off the right hand half of the array.
245 // Discard all values in the left hand half of the array, up to and including the item at pos.
251 // 'size' should never reach zero.
255 // If we reach this point we've chopped down to one element, no need to check it matches
257 ASSERT(key
== valueAtPosition(&array
[0]));
262 struct ExceptionInfo
: FastAllocBase
{
263 Vector
<ExpressionRangeInfo
> m_expressionInfo
;
264 Vector
<LineInfo
> m_lineInfo
;
265 Vector
<GetByIdExceptionInfo
> m_getByIdExceptionInfo
;
268 Vector
<CallReturnOffsetToBytecodeIndex
> m_callReturnIndexVector
;
272 class CodeBlock
: public FastAllocBase
{
275 CodeBlock(ScriptExecutable
* ownerExecutable
, CodeType
, PassRefPtr
<SourceProvider
>, unsigned sourceOffset
, SymbolTable
* symbolTable
);
277 virtual ~CodeBlock();
279 void markAggregate(MarkStack
&);
280 void refStructures(Instruction
* vPC
) const;
281 void derefStructures(Instruction
* vPC
) const;
282 #if ENABLE(JIT_OPTIMIZE_CALL)
283 void unlinkCallers();
286 static void dumpStatistics();
288 #if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
289 void dump(ExecState
*) const;
290 void printStructures(const Instruction
*) const;
291 void printStructure(const char* name
, const Instruction
*, int operand
) const;
294 inline bool isKnownNotImmediate(int index
)
296 if (index
== m_thisRegister
)
299 if (isConstantRegisterIndex(index
))
300 return getConstant(index
).isCell();
305 ALWAYS_INLINE
bool isTemporaryRegisterIndex(int index
)
307 return index
>= m_numVars
;
310 HandlerInfo
* handlerForBytecodeOffset(unsigned bytecodeOffset
);
311 int lineNumberForBytecodeOffset(CallFrame
*, unsigned bytecodeOffset
);
312 int expressionRangeForBytecodeOffset(CallFrame
*, unsigned bytecodeOffset
, int& divot
, int& startOffset
, int& endOffset
);
313 bool getByIdExceptionInfoForBytecodeOffset(CallFrame
*, unsigned bytecodeOffset
, OpcodeID
&);
316 void addCaller(CallLinkInfo
* caller
)
318 caller
->callee
= this;
319 caller
->position
= m_linkedCallerList
.size();
320 m_linkedCallerList
.append(caller
);
323 void removeCaller(CallLinkInfo
* caller
)
325 unsigned pos
= caller
->position
;
326 unsigned lastPos
= m_linkedCallerList
.size() - 1;
328 if (pos
!= lastPos
) {
329 m_linkedCallerList
[pos
] = m_linkedCallerList
[lastPos
];
330 m_linkedCallerList
[pos
]->position
= pos
;
332 m_linkedCallerList
.shrink(lastPos
);
335 StructureStubInfo
& getStubInfo(ReturnAddressPtr returnAddress
)
337 return *(binaryChop
<StructureStubInfo
, void*, getStructureStubInfoReturnLocation
>(m_structureStubInfos
.begin(), m_structureStubInfos
.size(), returnAddress
.value()));
340 CallLinkInfo
& getCallLinkInfo(ReturnAddressPtr returnAddress
)
342 return *(binaryChop
<CallLinkInfo
, void*, getCallLinkInfoReturnLocation
>(m_callLinkInfos
.begin(), m_callLinkInfos
.size(), returnAddress
.value()));
345 MethodCallLinkInfo
& getMethodCallLinkInfo(ReturnAddressPtr returnAddress
)
347 return *(binaryChop
<MethodCallLinkInfo
, void*, getMethodCallLinkInfoReturnLocation
>(m_methodCallLinkInfos
.begin(), m_methodCallLinkInfos
.size(), returnAddress
.value()));
350 unsigned getBytecodeIndex(CallFrame
* callFrame
, ReturnAddressPtr returnAddress
)
352 reparseForExceptionInfoIfNecessary(callFrame
);
353 return binaryChop
<CallReturnOffsetToBytecodeIndex
, unsigned, getCallReturnOffset
>(callReturnIndexVector().begin(), callReturnIndexVector().size(), ownerExecutable()->generatedJITCode().offsetOf(returnAddress
.value()))->bytecodeIndex
;
356 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset
, int& functionRegisterIndex
);
358 #if ENABLE(INTERPRETER)
359 unsigned bytecodeOffset(CallFrame
*, Instruction
* returnAddress
)
361 return static_cast<Instruction
*>(returnAddress
) - instructions().begin();
365 void setIsNumericCompareFunction(bool isNumericCompareFunction
) { m_isNumericCompareFunction
= isNumericCompareFunction
; }
366 bool isNumericCompareFunction() { return m_isNumericCompareFunction
; }
368 Vector
<Instruction
>& instructions() { return m_instructions
; }
369 void discardBytecode() { m_instructions
.clear(); }
372 unsigned instructionCount() { return m_instructionCount
; }
373 void setInstructionCount(unsigned instructionCount
) { m_instructionCount
= instructionCount
; }
377 JITCode
& getJITCode() { return ownerExecutable()->generatedJITCode(); }
378 ExecutablePool
* executablePool() { return ownerExecutable()->getExecutablePool(); }
381 ScriptExecutable
* ownerExecutable() const { return m_ownerExecutable
; }
383 void setGlobalData(JSGlobalData
* globalData
) { m_globalData
= globalData
; }
385 void setThisRegister(int thisRegister
) { m_thisRegister
= thisRegister
; }
386 int thisRegister() const { return m_thisRegister
; }
388 void setNeedsFullScopeChain(bool needsFullScopeChain
) { m_needsFullScopeChain
= needsFullScopeChain
; }
389 bool needsFullScopeChain() const { return m_needsFullScopeChain
; }
390 void setUsesEval(bool usesEval
) { m_usesEval
= usesEval
; }
391 bool usesEval() const { return m_usesEval
; }
392 void setUsesArguments(bool usesArguments
) { m_usesArguments
= usesArguments
; }
393 bool usesArguments() const { return m_usesArguments
; }
395 CodeType
codeType() const { return m_codeType
; }
397 SourceProvider
* source() const { return m_source
.get(); }
398 unsigned sourceOffset() const { return m_sourceOffset
; }
400 size_t numberOfJumpTargets() const { return m_jumpTargets
.size(); }
401 void addJumpTarget(unsigned jumpTarget
) { m_jumpTargets
.append(jumpTarget
); }
402 unsigned jumpTarget(int index
) const { return m_jumpTargets
[index
]; }
403 unsigned lastJumpTarget() const { return m_jumpTargets
.last(); }
405 #if ENABLE(INTERPRETER)
406 void addPropertyAccessInstruction(unsigned propertyAccessInstruction
) { m_propertyAccessInstructions
.append(propertyAccessInstruction
); }
407 void addGlobalResolveInstruction(unsigned globalResolveInstruction
) { m_globalResolveInstructions
.append(globalResolveInstruction
); }
408 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset
);
411 size_t numberOfStructureStubInfos() const { return m_structureStubInfos
.size(); }
412 void addStructureStubInfo(const StructureStubInfo
& stubInfo
) { m_structureStubInfos
.append(stubInfo
); }
413 StructureStubInfo
& structureStubInfo(int index
) { return m_structureStubInfos
[index
]; }
415 void addGlobalResolveInfo(unsigned globalResolveInstruction
) { m_globalResolveInfos
.append(GlobalResolveInfo(globalResolveInstruction
)); }
416 GlobalResolveInfo
& globalResolveInfo(int index
) { return m_globalResolveInfos
[index
]; }
417 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset
);
419 size_t numberOfCallLinkInfos() const { return m_callLinkInfos
.size(); }
420 void addCallLinkInfo() { m_callLinkInfos
.append(CallLinkInfo()); }
421 CallLinkInfo
& callLinkInfo(int index
) { return m_callLinkInfos
[index
]; }
423 void addMethodCallLinkInfos(unsigned n
) { m_methodCallLinkInfos
.grow(n
); }
424 MethodCallLinkInfo
& methodCallLinkInfo(int index
) { return m_methodCallLinkInfos
[index
]; }
426 void addFunctionRegisterInfo(unsigned bytecodeOffset
, int functionIndex
) { createRareDataIfNecessary(); m_rareData
->m_functionRegisterInfos
.append(FunctionRegisterInfo(bytecodeOffset
, functionIndex
)); }
429 // Exception handling support
431 size_t numberOfExceptionHandlers() const { return m_rareData
? m_rareData
->m_exceptionHandlers
.size() : 0; }
432 void addExceptionHandler(const HandlerInfo
& hanler
) { createRareDataIfNecessary(); return m_rareData
->m_exceptionHandlers
.append(hanler
); }
433 HandlerInfo
& exceptionHandler(int index
) { ASSERT(m_rareData
); return m_rareData
->m_exceptionHandlers
[index
]; }
435 bool hasExceptionInfo() const { return m_exceptionInfo
; }
436 void clearExceptionInfo() { m_exceptionInfo
.clear(); }
437 ExceptionInfo
* extractExceptionInfo() { ASSERT(m_exceptionInfo
); return m_exceptionInfo
.release(); }
439 void addExpressionInfo(const ExpressionRangeInfo
& expressionInfo
) { ASSERT(m_exceptionInfo
); m_exceptionInfo
->m_expressionInfo
.append(expressionInfo
); }
440 void addGetByIdExceptionInfo(const GetByIdExceptionInfo
& info
) { ASSERT(m_exceptionInfo
); m_exceptionInfo
->m_getByIdExceptionInfo
.append(info
); }
442 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo
); return m_exceptionInfo
->m_lineInfo
.size(); }
443 void addLineInfo(const LineInfo
& lineInfo
) { ASSERT(m_exceptionInfo
); m_exceptionInfo
->m_lineInfo
.append(lineInfo
); }
444 LineInfo
& lastLineInfo() { ASSERT(m_exceptionInfo
); return m_exceptionInfo
->m_lineInfo
.last(); }
447 Vector
<CallReturnOffsetToBytecodeIndex
>& callReturnIndexVector() { ASSERT(m_exceptionInfo
); return m_exceptionInfo
->m_callReturnIndexVector
; }
452 size_t numberOfIdentifiers() const { return m_identifiers
.size(); }
453 void addIdentifier(const Identifier
& i
) { return m_identifiers
.append(i
); }
454 Identifier
& identifier(int index
) { return m_identifiers
[index
]; }
456 size_t numberOfConstantRegisters() const { return m_constantRegisters
.size(); }
457 void addConstantRegister(const Register
& r
) { return m_constantRegisters
.append(r
); }
458 Register
& constantRegister(int index
) { return m_constantRegisters
[index
- FirstConstantRegisterIndex
]; }
459 ALWAYS_INLINE
bool isConstantRegisterIndex(int index
) const { return index
>= FirstConstantRegisterIndex
; }
460 ALWAYS_INLINE JSValue
getConstant(int index
) const { return m_constantRegisters
[index
- FirstConstantRegisterIndex
].jsValue(); }
462 unsigned addFunctionDecl(NonNullPassRefPtr
<FunctionExecutable
> n
) { unsigned size
= m_functionDecls
.size(); m_functionDecls
.append(n
); return size
; }
463 FunctionExecutable
* functionDecl(int index
) { return m_functionDecls
[index
].get(); }
464 int numberOfFunctionDecls() { return m_functionDecls
.size(); }
465 unsigned addFunctionExpr(NonNullPassRefPtr
<FunctionExecutable
> n
) { unsigned size
= m_functionExprs
.size(); m_functionExprs
.append(n
); return size
; }
466 FunctionExecutable
* functionExpr(int index
) { return m_functionExprs
[index
].get(); }
468 unsigned addRegExp(RegExp
* r
) { createRareDataIfNecessary(); unsigned size
= m_rareData
->m_regexps
.size(); m_rareData
->m_regexps
.append(r
); return size
; }
469 RegExp
* regexp(int index
) const { ASSERT(m_rareData
); return m_rareData
->m_regexps
[index
].get(); }
474 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData
? m_rareData
->m_immediateSwitchJumpTables
.size() : 0; }
475 SimpleJumpTable
& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData
->m_immediateSwitchJumpTables
.append(SimpleJumpTable()); return m_rareData
->m_immediateSwitchJumpTables
.last(); }
476 SimpleJumpTable
& immediateSwitchJumpTable(int tableIndex
) { ASSERT(m_rareData
); return m_rareData
->m_immediateSwitchJumpTables
[tableIndex
]; }
478 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData
? m_rareData
->m_characterSwitchJumpTables
.size() : 0; }
479 SimpleJumpTable
& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData
->m_characterSwitchJumpTables
.append(SimpleJumpTable()); return m_rareData
->m_characterSwitchJumpTables
.last(); }
480 SimpleJumpTable
& characterSwitchJumpTable(int tableIndex
) { ASSERT(m_rareData
); return m_rareData
->m_characterSwitchJumpTables
[tableIndex
]; }
482 size_t numberOfStringSwitchJumpTables() const { return m_rareData
? m_rareData
->m_stringSwitchJumpTables
.size() : 0; }
483 StringJumpTable
& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData
->m_stringSwitchJumpTables
.append(StringJumpTable()); return m_rareData
->m_stringSwitchJumpTables
.last(); }
484 StringJumpTable
& stringSwitchJumpTable(int tableIndex
) { ASSERT(m_rareData
); return m_rareData
->m_stringSwitchJumpTables
[tableIndex
]; }
487 SymbolTable
* symbolTable() { return m_symbolTable
; }
488 SharedSymbolTable
* sharedSymbolTable() { ASSERT(m_codeType
== FunctionCode
); return static_cast<SharedSymbolTable
*>(m_symbolTable
); }
490 EvalCodeCache
& evalCodeCache() { createRareDataIfNecessary(); return m_rareData
->m_evalCodeCache
; }
494 // FIXME: Make these remaining members private.
496 int m_numCalleeRegisters
;
501 #if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
502 void dump(ExecState
*, const Vector
<Instruction
>::const_iterator
& begin
, Vector
<Instruction
>::const_iterator
&) const;
504 CString
registerName(ExecState
*, int r
) const;
505 void printUnaryOp(ExecState
*, int location
, Vector
<Instruction
>::const_iterator
&, const char* op
) const;
506 void printBinaryOp(ExecState
*, int location
, Vector
<Instruction
>::const_iterator
&, const char* op
) const;
507 void printConditionalJump(ExecState
*, const Vector
<Instruction
>::const_iterator
&, Vector
<Instruction
>::const_iterator
&, int location
, const char* op
) const;
508 void printGetByIdOp(ExecState
*, int location
, Vector
<Instruction
>::const_iterator
&, const char* op
) const;
509 void printPutByIdOp(ExecState
*, int location
, Vector
<Instruction
>::const_iterator
&, const char* op
) const;
512 void reparseForExceptionInfoIfNecessary(CallFrame
*);
514 void createRareDataIfNecessary()
517 m_rareData
.set(new RareData
);
520 ScriptExecutable
* m_ownerExecutable
;
521 JSGlobalData
* m_globalData
;
523 Vector
<Instruction
> m_instructions
;
525 unsigned m_instructionCount
;
530 bool m_needsFullScopeChain
;
532 bool m_usesArguments
;
533 bool m_isNumericCompareFunction
;
537 RefPtr
<SourceProvider
> m_source
;
538 unsigned m_sourceOffset
;
540 #if ENABLE(INTERPRETER)
541 Vector
<unsigned> m_propertyAccessInstructions
;
542 Vector
<unsigned> m_globalResolveInstructions
;
545 Vector
<StructureStubInfo
> m_structureStubInfos
;
546 Vector
<GlobalResolveInfo
> m_globalResolveInfos
;
547 Vector
<CallLinkInfo
> m_callLinkInfos
;
548 Vector
<MethodCallLinkInfo
> m_methodCallLinkInfos
;
549 Vector
<CallLinkInfo
*> m_linkedCallerList
;
552 Vector
<unsigned> m_jumpTargets
;
555 Vector
<Identifier
> m_identifiers
;
556 Vector
<Register
> m_constantRegisters
;
557 Vector
<RefPtr
<FunctionExecutable
> > m_functionDecls
;
558 Vector
<RefPtr
<FunctionExecutable
> > m_functionExprs
;
560 SymbolTable
* m_symbolTable
;
562 OwnPtr
<ExceptionInfo
> m_exceptionInfo
;
564 struct RareData
: FastAllocBase
{
565 Vector
<HandlerInfo
> m_exceptionHandlers
;
568 Vector
<RefPtr
<RegExp
> > m_regexps
;
571 Vector
<SimpleJumpTable
> m_immediateSwitchJumpTables
;
572 Vector
<SimpleJumpTable
> m_characterSwitchJumpTables
;
573 Vector
<StringJumpTable
> m_stringSwitchJumpTables
;
575 EvalCodeCache m_evalCodeCache
;
578 Vector
<FunctionRegisterInfo
> m_functionRegisterInfos
;
581 OwnPtr
<RareData
> m_rareData
;
584 // Program code is not marked by any function, so we make the global object
585 // responsible for marking it.
587 class GlobalCodeBlock
: public CodeBlock
{
589 GlobalCodeBlock(ScriptExecutable
* ownerExecutable
, CodeType codeType
, PassRefPtr
<SourceProvider
> sourceProvider
, unsigned sourceOffset
, JSGlobalObject
* globalObject
)
590 : CodeBlock(ownerExecutable
, codeType
, sourceProvider
, sourceOffset
, &m_unsharedSymbolTable
)
591 , m_globalObject(globalObject
)
593 m_globalObject
->codeBlocks().add(this);
599 m_globalObject
->codeBlocks().remove(this);
602 void clearGlobalObject() { m_globalObject
= 0; }
605 JSGlobalObject
* m_globalObject
; // For program and eval nodes, the global object that marks the constant pool.
606 SymbolTable m_unsharedSymbolTable
;
609 class ProgramCodeBlock
: public GlobalCodeBlock
{
611 ProgramCodeBlock(ProgramExecutable
* ownerExecutable
, CodeType codeType
, JSGlobalObject
* globalObject
, PassRefPtr
<SourceProvider
> sourceProvider
)
612 : GlobalCodeBlock(ownerExecutable
, codeType
, sourceProvider
, 0, globalObject
)
617 class EvalCodeBlock
: public GlobalCodeBlock
{
619 EvalCodeBlock(EvalExecutable
* ownerExecutable
, JSGlobalObject
* globalObject
, PassRefPtr
<SourceProvider
> sourceProvider
, int baseScopeDepth
)
620 : GlobalCodeBlock(ownerExecutable
, EvalCode
, sourceProvider
, 0, globalObject
)
621 , m_baseScopeDepth(baseScopeDepth
)
625 int baseScopeDepth() const { return m_baseScopeDepth
; }
627 const Identifier
& variable(unsigned index
) { return m_variables
[index
]; }
628 unsigned numVariables() { return m_variables
.size(); }
629 void adoptVariables(Vector
<Identifier
>& variables
)
631 ASSERT(m_variables
.isEmpty());
632 m_variables
.swap(variables
);
636 int m_baseScopeDepth
;
637 Vector
<Identifier
> m_variables
;
640 class FunctionCodeBlock
: public CodeBlock
{
642 // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new
643 // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared
644 // symbol table, so we just pass as a raw pointer with a ref count of 1. We then manually deref
645 // in the destructor.
646 FunctionCodeBlock(FunctionExecutable
* ownerExecutable
, CodeType codeType
, PassRefPtr
<SourceProvider
> sourceProvider
, unsigned sourceOffset
)
647 : CodeBlock(ownerExecutable
, codeType
, sourceProvider
, sourceOffset
, new SharedSymbolTable
)
652 sharedSymbolTable()->deref();
656 inline Register
& ExecState::r(int index
)
658 CodeBlock
* codeBlock
= this->codeBlock();
659 if (codeBlock
->isConstantRegisterIndex(index
))
660 return codeBlock
->constantRegister(index
);
666 #endif // CodeBlock_h