2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CachedTranscendentalFunction.h"
33 #include "DateInstanceCache.h"
34 #include "ExecutableAllocator.h"
36 #include "Intrinsic.h"
37 #include "JITThunks.h"
38 #include "JITThunks.h"
39 #include "JSCJSValue.h"
41 #include "LLIntData.h"
42 #include "MacroAssemblerCodeRef.h"
43 #include "NumericStrings.h"
44 #include "ProfilerDatabase.h"
45 #include "PrivateName.h"
46 #include "PrototypeMap.h"
47 #include "SmallStrings.h"
49 #include "ThunkGenerators.h"
50 #include "TypedArrayDescriptor.h"
52 #include "WeakRandom.h"
53 #include <wtf/BumpPointerAllocator.h>
54 #include <wtf/DateMath.h>
55 #include <wtf/Forward.h>
56 #include <wtf/HashMap.h>
57 #include <wtf/RefCountedArray.h>
58 #include <wtf/SimpleStats.h>
59 #include <wtf/ThreadSafeRefCounted.h>
60 #include <wtf/ThreadSpecific.h>
61 #include <wtf/WTFThreadData.h>
62 #if ENABLE(REGEXP_TRACING)
63 #include <wtf/ListHashSet.h>
70 class CommonIdentifiers
;
73 class IdentifierTable
;
78 class LLIntOffsetsExtractor
;
80 class NativeExecutable
;
84 class SourceProviderCache
;
88 #if ENABLE(REGEXP_TRACING)
91 class UnlinkedCodeBlock
;
92 class UnlinkedEvalCodeBlock
;
93 class UnlinkedFunctionExecutable
;
94 class UnlinkedProgramCodeBlock
;
100 #endif // ENABLE(DFG_JIT)
105 struct LocalTimeOffsetCache
{
106 LocalTimeOffsetCache()
115 offset
= LocalTimeOffset();
121 LocalTimeOffset offset
;
128 class ConservativeRoots
;
131 #pragma warning(push)
132 #pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning
134 struct ScratchBuffer
{
137 u
.m_activeLength
= 0;
140 static ScratchBuffer
* create(size_t size
)
142 ScratchBuffer
* result
= new (fastMalloc(ScratchBuffer::allocationSize(size
))) ScratchBuffer
;
147 static size_t allocationSize(size_t bufferSize
) { return sizeof(ScratchBuffer
) + bufferSize
; }
148 void setActiveLength(size_t activeLength
) { u
.m_activeLength
= activeLength
; }
149 size_t activeLength() const { return u
.m_activeLength
; };
150 size_t* activeLengthPtr() { return &u
.m_activeLength
; };
151 void* dataBuffer() { return m_buffer
; }
154 size_t m_activeLength
;
155 double pad
; // Make sure m_buffer is double aligned.
157 #if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2)
158 void* m_buffer
[0] __attribute__((aligned(8)));
168 class VM
: public ThreadSafeRefCounted
<VM
> {
170 // WebCore has a one-to-one mapping of threads to VMs;
171 // either create() or createLeaked() should only be called once
172 // on a thread, this is the 'default' VM (it uses the
173 // thread's default string uniquing table from wtfThreadData).
174 // API contexts created using the new context group aware interface
175 // create APIContextGroup objects which require less locking of JSC
176 // than the old singleton APIShared VM created for use by
178 enum VMType
{ Default
, APIContextGroup
, APIShared
};
181 JS_EXPORT_PRIVATE
virtual ~ClientData() = 0;
184 bool isSharedInstance() { return vmType
== APIShared
; }
185 bool usingAPI() { return vmType
!= Default
; }
187 JS_EXPORT_PRIVATE
static bool sharedInstanceExists();
189 static bool sharedInstanceExists();
191 JS_EXPORT_PRIVATE
static VM
& sharedInstance();
193 JS_EXPORT_PRIVATE
static PassRefPtr
<VM
> create(HeapType
= SmallHeap
);
194 JS_EXPORT_PRIVATE
static PassRefPtr
<VM
> createLeaked(HeapType
= SmallHeap
);
195 static PassRefPtr
<VM
> createContextGroup(HeapType
= SmallHeap
);
196 JS_EXPORT_PRIVATE
~VM();
198 void makeUsableFromMultipleThreads() { heap
.machineThreads().makeUsableFromMultipleThreads(); }
201 RefPtr
<JSLock
> m_apiLock
;
204 #if ENABLE(ASSEMBLER)
205 // executableAllocator should be destructed after the heap, as the heap can call executableAllocator
206 // in its destructor.
207 ExecutableAllocator executableAllocator
;
210 // The heap should be just after executableAllocator and before other members to ensure that it's
211 // destructed after all the objects that reference it.
215 OwnPtr
<DFG::LongLivedState
> m_dfgState
;
216 #endif // ENABLE(DFG_JIT)
219 ClientData
* clientData
;
220 ExecState
* topCallFrame
;
223 const HashTable
* arrayConstructorTable
;
224 const HashTable
* arrayPrototypeTable
;
225 const HashTable
* booleanPrototypeTable
;
226 const HashTable
* dateTable
;
227 const HashTable
* dateConstructorTable
;
228 const HashTable
* errorPrototypeTable
;
229 const HashTable
* globalObjectTable
;
230 const HashTable
* jsonTable
;
231 const HashTable
* mathTable
;
232 const HashTable
* numberConstructorTable
;
233 const HashTable
* numberPrototypeTable
;
234 const HashTable
* objectConstructorTable
;
235 const HashTable
* privateNamePrototypeTable
;
236 const HashTable
* regExpTable
;
237 const HashTable
* regExpConstructorTable
;
238 const HashTable
* regExpPrototypeTable
;
239 const HashTable
* stringConstructorTable
;
241 Strong
<Structure
> structureStructure
;
242 Strong
<Structure
> structureRareDataStructure
;
243 Strong
<Structure
> debuggerActivationStructure
;
244 Strong
<Structure
> terminatedExecutionErrorStructure
;
245 Strong
<Structure
> stringStructure
;
246 Strong
<Structure
> notAnObjectStructure
;
247 Strong
<Structure
> propertyNameIteratorStructure
;
248 Strong
<Structure
> getterSetterStructure
;
249 Strong
<Structure
> apiWrapperStructure
;
250 Strong
<Structure
> JSScopeStructure
;
251 Strong
<Structure
> executableStructure
;
252 Strong
<Structure
> nativeExecutableStructure
;
253 Strong
<Structure
> evalExecutableStructure
;
254 Strong
<Structure
> programExecutableStructure
;
255 Strong
<Structure
> functionExecutableStructure
;
256 Strong
<Structure
> regExpStructure
;
257 Strong
<Structure
> sharedSymbolTableStructure
;
258 Strong
<Structure
> structureChainStructure
;
259 Strong
<Structure
> sparseArrayValueMapStructure
;
260 Strong
<Structure
> withScopeStructure
;
261 Strong
<Structure
> unlinkedFunctionExecutableStructure
;
262 Strong
<Structure
> unlinkedProgramCodeBlockStructure
;
263 Strong
<Structure
> unlinkedEvalCodeBlockStructure
;
264 Strong
<Structure
> unlinkedFunctionCodeBlockStructure
;
265 Strong
<Structure
> propertyTableStructure
;
267 IdentifierTable
* identifierTable
;
268 CommonIdentifiers
* propertyNames
;
269 const MarkedArgumentBuffer
* emptyList
; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark.
270 SmallStrings smallStrings
;
271 NumericStrings numericStrings
;
272 DateInstanceCache dateInstanceCache
;
273 WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT
;
274 Vector
<CodeBlock
*> codeBlocksBeingCompiled
;
275 void startedCompiling(CodeBlock
* codeBlock
)
277 codeBlocksBeingCompiled
.append(codeBlock
);
280 void finishedCompiling(CodeBlock
* codeBlock
)
282 ASSERT_UNUSED(codeBlock
, codeBlock
== codeBlocksBeingCompiled
.last());
283 codeBlocksBeingCompiled
.removeLast();
286 void setInDefineOwnProperty(bool inDefineOwnProperty
)
288 m_inDefineOwnProperty
= inDefineOwnProperty
;
291 bool isInDefineOwnProperty()
293 return m_inDefineOwnProperty
;
296 LegacyProfiler
* enabledProfiler()
298 return m_enabledProfiler
;
301 #if ENABLE(JIT) && ENABLE(LLINT)
302 bool canUseJIT() { return m_canUseJIT
; }
304 bool canUseJIT() { return true; } // jit only
306 bool canUseJIT() { return false; } // interpreter only
310 bool canUseRegExpJIT() { return m_canUseRegExpJIT
; }
312 bool canUseRegExpJIT() { return false; } // interpreter only
315 SourceProviderCache
* addSourceProviderCache(SourceProvider
*);
316 void clearSourceProviderCaches();
318 PrototypeMap prototypeMap
;
320 OwnPtr
<ParserArena
> parserArena
;
321 typedef HashMap
<RefPtr
<SourceProvider
>, RefPtr
<SourceProviderCache
> > SourceProviderCacheMap
;
322 SourceProviderCacheMap sourceProviderCacheMap
;
323 OwnPtr
<Keywords
> keywords
;
324 Interpreter
* interpreter
;
326 OwnPtr
<JITThunks
> jitStubs
;
327 MacroAssemblerCodeRef
getCTIStub(ThunkGenerator generator
)
329 return jitStubs
->ctiStub(this, generator
);
331 NativeExecutable
* getHostFunction(NativeFunction
, Intrinsic
);
333 NativeExecutable
* getHostFunction(NativeFunction
, NativeFunction constructor
);
336 JS_EXPORT_PRIVATE
void clearExceptionStack();
337 RefCountedArray
<StackFrame
>& exceptionStack() { return m_exceptionStack
; }
339 const ClassInfo
* const jsArrayClassInfo
;
340 const ClassInfo
* const jsFinalObjectClassInfo
;
342 ReturnAddressPtr exceptionLocation
;
343 JSValue hostCallReturnValue
;
344 ExecState
* callFrameForThrow
;
345 void* targetMachinePCForThrow
;
346 Instruction
* targetInterpreterPCForThrow
;
348 uint32_t osrExitIndex
;
349 void* osrExitJumpDestination
;
350 Vector
<ScratchBuffer
*> scratchBuffers
;
351 size_t sizeOfLastScratchBuffer
;
353 ScratchBuffer
* scratchBufferForSize(size_t size
)
358 if (size
> sizeOfLastScratchBuffer
) {
359 // Protect against a N^2 memory usage pathology by ensuring
360 // that at worst, we get a geometric series, meaning that the
361 // total memory usage is somewhere around
362 // max(scratch buffer size) * 4.
363 sizeOfLastScratchBuffer
= size
* 2;
365 scratchBuffers
.append(ScratchBuffer::create(sizeOfLastScratchBuffer
));
368 ScratchBuffer
* result
= scratchBuffers
.last();
369 result
->setActiveLength(0);
373 void gatherConservativeRoots(ConservativeRoots
&);
376 JSGlobalObject
* dynamicGlobalObject
;
378 HashSet
<JSObject
*> stringRecursionCheckVisitedObjects
;
380 LocalTimeOffsetCache localTimeOffsetCache
;
382 String cachedDateString
;
383 double cachedDateStringValue
;
385 LegacyProfiler
* m_enabledProfiler
;
386 OwnPtr
<Profiler::Database
> m_perBytecodeProfiler
;
387 RegExpCache
* m_regExpCache
;
388 BumpPointerAllocator m_regExpAllocator
;
390 #if ENABLE(REGEXP_TRACING)
391 typedef ListHashSet
<RefPtr
<RegExp
> > RTTraceList
;
392 RTTraceList
* m_rtTraceList
;
396 ThreadIdentifier exclusiveThread
;
399 CachedTranscendentalFunction
<std::sin
> cachedSin
;
401 JS_EXPORT_PRIVATE
void resetDateCache();
403 JS_EXPORT_PRIVATE
void startSampling();
404 JS_EXPORT_PRIVATE
void stopSampling();
405 JS_EXPORT_PRIVATE
void dumpSampleData(ExecState
* exec
);
406 RegExpCache
* regExpCache() { return m_regExpCache
; }
407 #if ENABLE(REGEXP_TRACING)
408 void addRegExpToTrace(PassRefPtr
<RegExp
> regExp
);
410 JS_EXPORT_PRIVATE
void dumpRegExpTrace();
412 bool isCollectorBusy() { return heap
.isBusy(); }
413 JS_EXPORT_PRIVATE
void releaseExecutableMemory();
415 #if ENABLE(GC_VALIDATION)
416 bool isInitializingObject() const;
417 void setInitializingObjectClass(const ClassInfo
*);
420 unsigned m_newStringsSinceLastHashCons
;
422 static const unsigned s_minNumberOfNewStringsToHashCons
= 100;
424 bool haveEnoughNewStringsToHashCons() { return m_newStringsSinceLastHashCons
> s_minNumberOfNewStringsToHashCons
; }
425 void resetNewStringsSinceLastHashCons() { m_newStringsSinceLastHashCons
= 0; }
427 #define registerTypedArrayFunction(type, capitalizedType) \
428 void registerTypedArrayDescriptor(const capitalizedType##Array*, const TypedArrayDescriptor& descriptor) \
430 ASSERT(!m_##type##ArrayDescriptor.m_classInfo || m_##type##ArrayDescriptor.m_classInfo == descriptor.m_classInfo); \
431 m_##type##ArrayDescriptor = descriptor; \
432 ASSERT(m_##type##ArrayDescriptor.m_classInfo); \
434 const TypedArrayDescriptor& type##ArrayDescriptor() const { ASSERT(m_##type##ArrayDescriptor.m_classInfo); return m_##type##ArrayDescriptor; }
436 registerTypedArrayFunction(int8
, Int8
);
437 registerTypedArrayFunction(int16
, Int16
);
438 registerTypedArrayFunction(int32
, Int32
);
439 registerTypedArrayFunction(uint8
, Uint8
);
440 registerTypedArrayFunction(uint8Clamped
, Uint8Clamped
);
441 registerTypedArrayFunction(uint16
, Uint16
);
442 registerTypedArrayFunction(uint32
, Uint32
);
443 registerTypedArrayFunction(float32
, Float32
);
444 registerTypedArrayFunction(float64
, Float64
);
445 #undef registerTypedArrayFunction
447 const TypedArrayDescriptor
* typedArrayDescriptor(TypedArrayType type
) const
453 return &int8ArrayDescriptor();
454 case TypedArrayInt16
:
455 return &int16ArrayDescriptor();
456 case TypedArrayInt32
:
457 return &int32ArrayDescriptor();
458 case TypedArrayUint8
:
459 return &uint8ArrayDescriptor();
460 case TypedArrayUint8Clamped
:
461 return &uint8ClampedArrayDescriptor();
462 case TypedArrayUint16
:
463 return &uint16ArrayDescriptor();
464 case TypedArrayUint32
:
465 return &uint32ArrayDescriptor();
466 case TypedArrayFloat32
:
467 return &float32ArrayDescriptor();
468 case TypedArrayFloat64
:
469 return &float64ArrayDescriptor();
476 JSLock
& apiLock() { return *m_apiLock
; }
477 CodeCache
* codeCache() { return m_codeCache
.get(); }
479 JS_EXPORT_PRIVATE
void discardAllCode();
482 friend class LLIntOffsetsExtractor
;
484 VM(VMType
, HeapType
);
485 static VM
*& sharedInstanceInternal();
486 void createNativeThunk();
487 #if ENABLE(ASSEMBLER)
488 bool m_canUseAssembler
;
494 bool m_canUseRegExpJIT
;
496 #if ENABLE(GC_VALIDATION)
497 const ClassInfo
* m_initializingObjectClass
;
499 bool m_inDefineOwnProperty
;
500 RefPtr
<CodeCache
> m_codeCache
;
501 RefCountedArray
<StackFrame
> m_exceptionStack
;
503 TypedArrayDescriptor m_int8ArrayDescriptor
;
504 TypedArrayDescriptor m_int16ArrayDescriptor
;
505 TypedArrayDescriptor m_int32ArrayDescriptor
;
506 TypedArrayDescriptor m_uint8ArrayDescriptor
;
507 TypedArrayDescriptor m_uint8ClampedArrayDescriptor
;
508 TypedArrayDescriptor m_uint16ArrayDescriptor
;
509 TypedArrayDescriptor m_uint32ArrayDescriptor
;
510 TypedArrayDescriptor m_float32ArrayDescriptor
;
511 TypedArrayDescriptor m_float64ArrayDescriptor
;
514 #if ENABLE(GC_VALIDATION)
515 inline bool VM::isInitializingObject() const
517 return !!m_initializingObjectClass
;
520 inline void VM::setInitializingObjectClass(const ClassInfo
* initializingObjectClass
)
522 m_initializingObjectClass
= initializingObjectClass
;
526 inline Heap
* WeakSet::heap() const