2 * Copyright (C) 2008, 2009, 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "ControlFlowProfiler.h"
33 #include "DateInstanceCache.h"
34 #include "ExecutableAllocator.h"
35 #include "FunctionHasExecutedCache.h"
37 #include "Intrinsic.h"
38 #include "JITThunks.h"
39 #include "JSCJSValue.h"
41 #include "LLIntData.h"
42 #include "MacroAssemblerCodeRef.h"
43 #include "NumericStrings.h"
44 #include "PrivateName.h"
45 #include "PrototypeMap.h"
46 #include "SmallStrings.h"
47 #include "SourceCode.h"
49 #include "ThunkGenerators.h"
50 #include "TypedArrayController.h"
51 #include "VMEntryRecord.h"
53 #include "Watchpoint.h"
54 #include "WeakRandom.h"
56 #include <wtf/BumpPointerAllocator.h>
57 #include <wtf/DateMath.h>
58 #include <wtf/Forward.h>
59 #include <wtf/HashMap.h>
60 #include <wtf/HashSet.h>
61 #include <wtf/SimpleStats.h>
62 #include <wtf/StackBounds.h>
63 #include <wtf/ThreadSafeRefCounted.h>
64 #include <wtf/ThreadSpecific.h>
65 #include <wtf/WTFThreadData.h>
66 #include <wtf/text/SymbolRegistry.h>
67 #include <wtf/text/WTFString.h>
68 #if ENABLE(REGEXP_TRACING)
69 #include <wtf/ListHashSet.h>
74 class ArityCheckFailReturnThunks
;
75 class BuiltinExecutables
;
78 class CommonIdentifiers
;
83 class TypeProfilerLog
;
89 class LLIntOffsetsExtractor
;
91 class NativeExecutable
;
93 class ScriptExecutable
;
95 class SourceProviderCache
;
99 #if ENABLE(REGEXP_TRACING)
102 class UnlinkedCodeBlock
;
103 class UnlinkedEvalCodeBlock
;
104 class UnlinkedFunctionExecutable
;
105 class UnlinkedProgramCodeBlock
;
106 class VirtualRegister
;
113 class LongLivedState
;
115 #endif // ENABLE(DFG_JIT)
120 #endif // ENABLE(FTL_JIT)
121 namespace CommonSlowPaths
{
122 struct ArityCheckData
;
131 struct LocalTimeOffsetCache
{
132 LocalTimeOffsetCache()
136 , timeType(WTF::UTCTime
)
142 offset
= LocalTimeOffset();
146 timeType
= WTF::UTCTime
;
149 LocalTimeOffset offset
;
153 WTF::TimeType timeType
;
156 class ConservativeRoots
;
159 #pragma warning(push)
160 #pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning
162 struct ScratchBuffer
{
165 u
.m_activeLength
= 0;
168 static ScratchBuffer
* create(size_t size
)
170 ScratchBuffer
* result
= new (fastMalloc(ScratchBuffer::allocationSize(size
))) ScratchBuffer
;
175 static size_t allocationSize(size_t bufferSize
) { return sizeof(ScratchBuffer
) + bufferSize
; }
176 void setActiveLength(size_t activeLength
) { u
.m_activeLength
= activeLength
; }
177 size_t activeLength() const { return u
.m_activeLength
; };
178 size_t* activeLengthPtr() { return &u
.m_activeLength
; };
179 void* dataBuffer() { return m_buffer
; }
182 size_t m_activeLength
;
183 double pad
; // Make sure m_buffer is double aligned.
185 #if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2)
186 void* m_buffer
[0] __attribute__((aligned(8)));
195 class VM
: public ThreadSafeRefCounted
<VM
> {
197 // WebCore has a one-to-one mapping of threads to VMs;
198 // either create() or createLeaked() should only be called once
199 // on a thread, this is the 'default' VM (it uses the
200 // thread's default string uniquing table from wtfThreadData).
201 // API contexts created using the new context group aware interface
202 // create APIContextGroup objects which require less locking of JSC
203 // than the old singleton APIShared VM created for use by
205 enum VMType
{ Default
, APIContextGroup
, APIShared
};
208 JS_EXPORT_PRIVATE
virtual ~ClientData() = 0;
211 bool isSharedInstance() { return vmType
== APIShared
; }
212 bool usingAPI() { return vmType
!= Default
; }
213 JS_EXPORT_PRIVATE
static bool sharedInstanceExists();
214 JS_EXPORT_PRIVATE
static VM
& sharedInstance();
216 JS_EXPORT_PRIVATE
static Ref
<VM
> create(HeapType
= SmallHeap
);
217 JS_EXPORT_PRIVATE
static Ref
<VM
> createLeaked(HeapType
= SmallHeap
);
218 static Ref
<VM
> createContextGroup(HeapType
= SmallHeap
);
219 JS_EXPORT_PRIVATE
~VM();
222 RefPtr
<JSLock
> m_apiLock
;
225 #if ENABLE(ASSEMBLER)
226 // executableAllocator should be destructed after the heap, as the heap can call executableAllocator
227 // in its destructor.
228 ExecutableAllocator executableAllocator
;
231 // The heap should be just after executableAllocator and before other members to ensure that it's
232 // destructed after all the objects that reference it.
236 std::unique_ptr
<DFG::LongLivedState
> dfgState
;
237 #endif // ENABLE(DFG_JIT)
240 ClientData
* clientData
;
241 VMEntryFrame
* topVMEntryFrame
;
242 ExecState
* topCallFrame
;
243 std::unique_ptr
<Watchdog
> watchdog
;
245 Strong
<Structure
> structureStructure
;
246 Strong
<Structure
> structureRareDataStructure
;
247 Strong
<Structure
> terminatedExecutionErrorStructure
;
248 Strong
<Structure
> stringStructure
;
249 Strong
<Structure
> notAnObjectStructure
;
250 Strong
<Structure
> propertyNameIteratorStructure
;
251 Strong
<Structure
> propertyNameEnumeratorStructure
;
252 Strong
<Structure
> getterSetterStructure
;
253 Strong
<Structure
> customGetterSetterStructure
;
254 Strong
<Structure
> scopedArgumentsTableStructure
;
255 Strong
<Structure
> apiWrapperStructure
;
256 Strong
<Structure
> JSScopeStructure
;
257 Strong
<Structure
> executableStructure
;
258 Strong
<Structure
> nativeExecutableStructure
;
259 Strong
<Structure
> evalExecutableStructure
;
260 Strong
<Structure
> programExecutableStructure
;
261 Strong
<Structure
> functionExecutableStructure
;
262 Strong
<Structure
> regExpStructure
;
263 Strong
<Structure
> symbolStructure
;
264 Strong
<Structure
> symbolTableStructure
;
265 Strong
<Structure
> structureChainStructure
;
266 Strong
<Structure
> sparseArrayValueMapStructure
;
267 Strong
<Structure
> templateRegistryKeyStructure
;
268 Strong
<Structure
> arrayBufferNeuteringWatchpointStructure
;
269 Strong
<Structure
> unlinkedFunctionExecutableStructure
;
270 Strong
<Structure
> unlinkedProgramCodeBlockStructure
;
271 Strong
<Structure
> unlinkedEvalCodeBlockStructure
;
272 Strong
<Structure
> unlinkedFunctionCodeBlockStructure
;
273 Strong
<Structure
> propertyTableStructure
;
274 Strong
<Structure
> weakMapDataStructure
;
275 Strong
<Structure
> inferredValueStructure
;
276 Strong
<Structure
> functionRareDataStructure
;
277 Strong
<Structure
> exceptionStructure
;
279 Strong
<Structure
> promiseDeferredStructure
;
281 Strong
<JSCell
> iterationTerminator
;
282 Strong
<JSCell
> emptyPropertyNameEnumerator
;
284 AtomicStringTable
* m_atomicStringTable
;
285 WTF::SymbolRegistry m_symbolRegistry
;
286 CommonIdentifiers
* propertyNames
;
287 const MarkedArgumentBuffer
* emptyList
; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark.
288 SmallStrings smallStrings
;
289 NumericStrings numericStrings
;
290 DateInstanceCache dateInstanceCache
;
291 WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT
;
292 WeakGCMap
<StringImpl
*, JSString
, PtrHash
<StringImpl
*>> stringCache
;
293 Strong
<JSString
> lastCachedString
;
295 AtomicStringTable
* atomicStringTable() const { return m_atomicStringTable
; }
296 WTF::SymbolRegistry
& symbolRegistry() { return m_symbolRegistry
; }
298 void setInDefineOwnProperty(bool inDefineOwnProperty
)
300 m_inDefineOwnProperty
= inDefineOwnProperty
;
303 bool isInDefineOwnProperty()
305 return m_inDefineOwnProperty
;
308 LegacyProfiler
* enabledProfiler() { return m_enabledProfiler
; }
309 void setEnabledProfiler(LegacyProfiler
*);
311 void* enabledProfilerAddress() { return &m_enabledProfiler
; }
314 bool canUseJIT() { return m_canUseJIT
; }
316 bool canUseJIT() { return false; } // interpreter only
320 bool canUseRegExpJIT() { return m_canUseRegExpJIT
; }
322 bool canUseRegExpJIT() { return false; } // interpreter only
325 SourceProviderCache
* addSourceProviderCache(SourceProvider
*);
326 void clearSourceProviderCaches();
328 PrototypeMap prototypeMap
;
330 typedef HashMap
<RefPtr
<SourceProvider
>, RefPtr
<SourceProviderCache
>> SourceProviderCacheMap
;
331 SourceProviderCacheMap sourceProviderCacheMap
;
332 std::unique_ptr
<Keywords
> keywords
;
333 Interpreter
* interpreter
;
335 std::unique_ptr
<JITThunks
> jitStubs
;
336 MacroAssemblerCodeRef
getCTIStub(ThunkGenerator generator
)
338 return jitStubs
->ctiStub(this, generator
);
340 NativeExecutable
* getHostFunction(NativeFunction
, Intrinsic
);
342 std::unique_ptr
<ArityCheckFailReturnThunks
> arityCheckFailReturnThunks
;
343 #endif // ENABLE(JIT)
344 std::unique_ptr
<CommonSlowPaths::ArityCheckData
> arityCheckData
;
346 std::unique_ptr
<FTL::Thunks
> ftlThunks
;
348 NativeExecutable
* getHostFunction(NativeFunction
, NativeFunction constructor
);
350 static ptrdiff_t exceptionOffset()
352 return OBJECT_OFFSETOF(VM
, m_exception
);
355 static ptrdiff_t vmEntryFrameForThrowOffset()
357 return OBJECT_OFFSETOF(VM
, vmEntryFrameForThrow
);
360 static ptrdiff_t topVMEntryFrameOffset()
362 return OBJECT_OFFSETOF(VM
, topVMEntryFrame
);
365 static ptrdiff_t callFrameForThrowOffset()
367 return OBJECT_OFFSETOF(VM
, callFrameForThrow
);
370 static ptrdiff_t targetMachinePCForThrowOffset()
372 return OBJECT_OFFSETOF(VM
, targetMachinePCForThrow
);
375 void clearException() { m_exception
= nullptr; }
376 void clearLastException() { m_lastException
= nullptr; }
378 void setException(Exception
* exception
)
380 m_exception
= exception
;
381 m_lastException
= exception
;
384 Exception
* exception() const { return m_exception
; }
385 JSCell
** addressOfException() { return reinterpret_cast<JSCell
**>(&m_exception
); }
387 Exception
* lastException() const { return m_lastException
; }
388 JSCell
** addressOfLastException() { return reinterpret_cast<JSCell
**>(&m_lastException
); }
390 JS_EXPORT_PRIVATE
void throwException(ExecState
*, Exception
*);
391 JS_EXPORT_PRIVATE JSValue
throwException(ExecState
*, JSValue
);
392 JS_EXPORT_PRIVATE JSObject
* throwException(ExecState
*, JSObject
*);
394 void* stackPointerAtVMEntry() const { return m_stackPointerAtVMEntry
; }
395 void setStackPointerAtVMEntry(void*);
397 size_t reservedZoneSize() const { return m_reservedZoneSize
; }
398 size_t updateReservedZoneSize(size_t reservedZoneSize
);
401 void updateFTLLargestStackSize(size_t);
402 void** addressOfFTLStackLimit() { return &m_ftlStackLimit
; }
406 void* jsStackLimit() { return m_jsStackLimit
; }
407 void setJSStackLimit(void* limit
) { m_jsStackLimit
= limit
; }
409 void* stackLimit() { return m_stackLimit
; }
410 void** addressOfStackLimit() { return &m_stackLimit
; }
412 bool isSafeToRecurse(size_t neededStackInBytes
= 0) const
414 ASSERT(wtfThreadData().stack().isGrowingDownward());
415 int8_t* curr
= reinterpret_cast<int8_t*>(&curr
);
416 int8_t* limit
= reinterpret_cast<int8_t*>(m_stackLimit
);
417 return curr
>= limit
&& static_cast<size_t>(curr
- limit
) >= neededStackInBytes
;
420 void* lastStackTop() { return m_lastStackTop
; }
421 void setLastStackTop(void* lastStackTop
) { m_lastStackTop
= lastStackTop
; }
423 const ClassInfo
* const jsArrayClassInfo
;
424 const ClassInfo
* const jsFinalObjectClassInfo
;
426 JSValue hostCallReturnValue
;
427 unsigned varargsLength
;
428 ExecState
* newCallFrameReturnValue
;
429 VMEntryFrame
* vmEntryFrameForThrow
;
430 ExecState
* callFrameForThrow
;
431 void* targetMachinePCForThrow
;
432 Instruction
* targetInterpreterPCForThrow
;
433 uint32_t osrExitIndex
;
434 void* osrExitJumpDestination
;
435 Vector
<ScratchBuffer
*> scratchBuffers
;
436 size_t sizeOfLastScratchBuffer
;
438 ScratchBuffer
* scratchBufferForSize(size_t size
)
443 if (size
> sizeOfLastScratchBuffer
) {
444 // Protect against a N^2 memory usage pathology by ensuring
445 // that at worst, we get a geometric series, meaning that the
446 // total memory usage is somewhere around
447 // max(scratch buffer size) * 4.
448 sizeOfLastScratchBuffer
= size
* 2;
450 ScratchBuffer
* newBuffer
= ScratchBuffer::create(sizeOfLastScratchBuffer
);
451 RELEASE_ASSERT(newBuffer
);
452 scratchBuffers
.append(newBuffer
);
455 ScratchBuffer
* result
= scratchBuffers
.last();
456 result
->setActiveLength(0);
460 void gatherConservativeRoots(ConservativeRoots
&);
462 VMEntryScope
* entryScope
;
464 JSObject
* stringRecursionCheckFirstObject
{ nullptr };
465 HashSet
<JSObject
*> stringRecursionCheckVisitedObjects
;
467 LocalTimeOffsetCache localTimeOffsetCache
;
469 String cachedDateString
;
470 double cachedDateStringValue
;
472 std::unique_ptr
<Profiler::Database
> m_perBytecodeProfiler
;
473 RefPtr
<TypedArrayController
> m_typedArrayController
;
474 RegExpCache
* m_regExpCache
;
475 BumpPointerAllocator m_regExpAllocator
;
477 #if ENABLE(REGEXP_TRACING)
478 typedef ListHashSet
<RegExp
*> RTTraceList
;
479 RTTraceList
* m_rtTraceList
;
482 bool hasExclusiveThread() const { return m_apiLock
->hasExclusiveThread(); }
483 std::thread::id
exclusiveThread() const { return m_apiLock
->exclusiveThread(); }
484 void setExclusiveThread(std::thread::id threadId
) { m_apiLock
->setExclusiveThread(threadId
); }
486 JS_EXPORT_PRIVATE
void resetDateCache();
488 JS_EXPORT_PRIVATE
void startSampling();
489 JS_EXPORT_PRIVATE
void stopSampling();
490 JS_EXPORT_PRIVATE
void dumpSampleData(ExecState
*);
491 RegExpCache
* regExpCache() { return m_regExpCache
; }
492 #if ENABLE(REGEXP_TRACING)
493 void addRegExpToTrace(RegExp
*);
495 JS_EXPORT_PRIVATE
void dumpRegExpTrace();
497 bool isCollectorBusy() { return heap
.isBusy(); }
498 JS_EXPORT_PRIVATE
void releaseExecutableMemory();
500 #if ENABLE(GC_VALIDATION)
501 bool isInitializingObject() const;
502 void setInitializingObjectClass(const ClassInfo
*);
505 unsigned m_newStringsSinceLastHashCons
;
507 static const unsigned s_minNumberOfNewStringsToHashCons
= 100;
509 bool haveEnoughNewStringsToHashCons() { return m_newStringsSinceLastHashCons
> s_minNumberOfNewStringsToHashCons
; }
510 void resetNewStringsSinceLastHashCons() { m_newStringsSinceLastHashCons
= 0; }
512 bool currentThreadIsHoldingAPILock() const { return m_apiLock
->currentThreadIsHoldingLock(); }
514 JSLock
& apiLock() { return *m_apiLock
; }
515 CodeCache
* codeCache() { return m_codeCache
.get(); }
517 void prepareToDiscardCode();
519 JS_EXPORT_PRIVATE
void discardAllCode();
521 void registerWatchpointForImpureProperty(const Identifier
&, Watchpoint
*);
522 // FIXME: Use AtomicString once it got merged with Identifier.
523 JS_EXPORT_PRIVATE
void addImpureProperty(const String
&);
525 BuiltinExecutables
* builtinExecutables() { return m_builtinExecutables
.get(); }
527 bool enableTypeProfiler();
528 bool disableTypeProfiler();
529 TypeProfilerLog
* typeProfilerLog() { return m_typeProfilerLog
.get(); }
530 TypeProfiler
* typeProfiler() { return m_typeProfiler
.get(); }
531 JS_EXPORT_PRIVATE
void dumpTypeProfilerData();
533 FunctionHasExecutedCache
* functionHasExecutedCache() { return &m_functionHasExecutedCache
; }
535 ControlFlowProfiler
* controlFlowProfiler() { return m_controlFlowProfiler
.get(); }
536 bool enableControlFlowProfiler();
537 bool disableControlFlowProfiler();
540 friend class LLIntOffsetsExtractor
;
541 friend class ClearExceptionScope
;
542 friend class RecursiveAllocationScope
;
544 VM(VMType
, HeapType
);
545 static VM
*& sharedInstanceInternal();
546 void createNativeThunk();
548 void updateStackLimit();
550 #if ENABLE(ASSEMBLER)
551 bool m_canUseAssembler
;
557 bool m_canUseRegExpJIT
;
559 #if ENABLE(GC_VALIDATION)
560 const ClassInfo
* m_initializingObjectClass
;
562 void* m_stackPointerAtVMEntry
;
563 size_t m_reservedZoneSize
;
567 void* m_jsStackLimit
;
572 void* m_jsStackLimit
;
575 void* m_ftlStackLimit
;
576 size_t m_largestFTLStackSize
;
579 void* m_lastStackTop
;
580 Exception
* m_exception
{ nullptr };
581 Exception
* m_lastException
{ nullptr };
582 bool m_inDefineOwnProperty
;
583 std::unique_ptr
<CodeCache
> m_codeCache
;
584 LegacyProfiler
* m_enabledProfiler
;
585 std::unique_ptr
<BuiltinExecutables
> m_builtinExecutables
;
586 HashMap
<String
, RefPtr
<WatchpointSet
>> m_impurePropertyWatchpointSets
;
587 std::unique_ptr
<TypeProfiler
> m_typeProfiler
;
588 std::unique_ptr
<TypeProfilerLog
> m_typeProfilerLog
;
589 unsigned m_typeProfilerEnabledCount
;
590 FunctionHasExecutedCache m_functionHasExecutedCache
;
591 std::unique_ptr
<ControlFlowProfiler
> m_controlFlowProfiler
;
592 unsigned m_controlFlowProfilerEnabledCount
;
595 #if ENABLE(GC_VALIDATION)
596 inline bool VM::isInitializingObject() const
598 return !!m_initializingObjectClass
;
601 inline void VM::setInitializingObjectClass(const ClassInfo
* initializingObjectClass
)
603 m_initializingObjectClass
= initializingObjectClass
;
607 inline Heap
* WeakSet::heap() const
613 extern "C" void sanitizeStackForVMImpl(VM
*);
616 void sanitizeStackForVM(VM
*);
617 void logSanitizeStack(VM
*);