2 * Copyright (C) 2008, 2011, 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "ArityCheckFailReturnThunks.h"
34 #include "ArrayBufferNeuteringWatchpoint.h"
35 #include "BuiltinExecutables.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "Exception.h"
46 #include "FTLThunks.h"
47 #include "FunctionConstructor.h"
48 #include "GCActivityCallback.h"
49 #include "GetterSetter.h"
51 #include "HeapIterationScope.h"
52 #include "HostCallReturnValue.h"
53 #include "Identifier.h"
54 #include "IncrementalSweeper.h"
55 #include "Interpreter.h"
57 #include "JSAPIValueWrapper.h"
59 #include "JSCInlines.h"
60 #include "JSFunction.h"
61 #include "JSGlobalObjectFunctions.h"
62 #include "JSLexicalEnvironment.h"
64 #include "JSNameScope.h"
65 #include "JSNotAnObject.h"
66 #include "JSPromiseDeferred.h"
67 #include "JSPropertyNameEnumerator.h"
68 #include "JSTemplateRegistryKey.h"
69 #include "JSWithScope.h"
75 #include "ProfilerDatabase.h"
76 #include "PropertyMapHashTable.h"
77 #include "RegExpCache.h"
78 #include "RegExpObject.h"
79 #include "RuntimeType.h"
80 #include "SimpleTypedArrayController.h"
81 #include "SourceProviderCache.h"
82 #include "StackVisitor.h"
83 #include "StrictEvalActivation.h"
84 #include "StrongInlines.h"
85 #include "StructureInlines.h"
86 #include "TypeProfiler.h"
87 #include "TypeProfilerLog.h"
88 #include "UnlinkedCodeBlock.h"
89 #include "WeakGCMapInlines.h"
90 #include "WeakMapData.h"
91 #include <wtf/CurrentTime.h>
92 #include <wtf/ProcessID.h>
93 #include <wtf/RetainPtr.h>
94 #include <wtf/StringPrintStream.h>
95 #include <wtf/Threading.h>
96 #include <wtf/WTFThreadData.h>
97 #include <wtf/text/AtomicStringTable.h>
98 #include <wtf/text/SymbolRegistry.h>
101 #include "ConservativeRoots.h"
104 #if ENABLE(REGEXP_TRACING)
109 #include <CoreFoundation/CoreFoundation.h>
116 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
117 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
118 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
120 #if ENABLE(ASSEMBLER)
121 static bool enableAssembler(ExecutableAllocator
& executableAllocator
)
123 if (!Options::useJIT() && !Options::useRegExpJIT())
126 if (!executableAllocator
.isValid()) {
127 if (Options::crashIfCantAllocateJITMemory())
132 #if USE(CF) || OS(UNIX)
133 char* canUseJITString
= getenv("JavaScriptCoreUseJIT");
134 return !canUseJITString
|| atoi(canUseJITString
);
139 #endif // ENABLE(!ASSEMBLER)
141 VM::VM(VMType vmType
, HeapType heapType
)
142 : m_apiLock(adoptRef(new JSLock(this)))
143 #if ENABLE(ASSEMBLER)
144 , executableAllocator(*this)
146 , heap(this, heapType
)
149 , topVMEntryFrame(nullptr)
150 , topCallFrame(CallFrame::noCaller())
151 , m_atomicStringTable(vmType
== Default
? wtfThreadData().atomicStringTable() : new AtomicStringTable
)
152 , propertyNames(nullptr)
153 , emptyList(new MarkedArgumentBuffer
)
155 , prototypeMap(*this)
156 , keywords(std::make_unique
<Keywords
>(*this))
158 , jsArrayClassInfo(JSArray::info())
159 , jsFinalObjectClassInfo(JSFinalObject::info())
160 , sizeOfLastScratchBuffer(0)
162 , m_regExpCache(new RegExpCache(this))
163 #if ENABLE(REGEXP_TRACING)
164 , m_rtTraceList(new RTTraceList())
166 , m_newStringsSinceLastHashCons(0)
167 #if ENABLE(ASSEMBLER)
168 , m_canUseAssembler(enableAssembler(executableAllocator
))
171 , m_canUseJIT(m_canUseAssembler
&& Options::useJIT())
174 , m_canUseRegExpJIT(m_canUseAssembler
&& Options::useRegExpJIT())
176 #if ENABLE(GC_VALIDATION)
177 , m_initializingObjectClass(0)
179 , m_stackPointerAtVMEntry(0)
186 , m_largestFTLStackSize(0)
188 , m_inDefineOwnProperty(false)
189 , m_codeCache(std::make_unique
<CodeCache
>())
190 , m_enabledProfiler(nullptr)
191 , m_builtinExecutables(std::make_unique
<BuiltinExecutables
>(*this))
192 , m_typeProfilerEnabledCount(0)
193 , m_controlFlowProfilerEnabledCount(0)
195 interpreter
= new Interpreter(*this);
196 StackBounds stack
= wtfThreadData().stack();
197 updateReservedZoneSize(Options::reservedZoneSize());
199 interpreter
->stack().setReservedZoneSize(Options::reservedZoneSize());
201 setLastStackTop(stack
.origin());
203 // Need to be careful to keep everything consistent here
204 JSLockHolder
lock(this);
205 AtomicStringTable
* existingEntryAtomicStringTable
= wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable
);
206 propertyNames
= new CommonIdentifiers(this);
207 structureStructure
.set(*this, Structure::createStructure(*this));
208 structureRareDataStructure
.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
209 terminatedExecutionErrorStructure
.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
210 stringStructure
.set(*this, JSString::createStructure(*this, 0, jsNull()));
211 notAnObjectStructure
.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
212 propertyNameEnumeratorStructure
.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
213 getterSetterStructure
.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
214 customGetterSetterStructure
.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
215 scopedArgumentsTableStructure
.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
216 apiWrapperStructure
.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
217 JSScopeStructure
.set(*this, JSScope::createStructure(*this, 0, jsNull()));
218 executableStructure
.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
219 nativeExecutableStructure
.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
220 evalExecutableStructure
.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
221 programExecutableStructure
.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
222 functionExecutableStructure
.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
223 regExpStructure
.set(*this, RegExp::createStructure(*this, 0, jsNull()));
224 symbolStructure
.set(*this, Symbol::createStructure(*this, 0, jsNull()));
225 symbolTableStructure
.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
226 structureChainStructure
.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
227 sparseArrayValueMapStructure
.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
228 templateRegistryKeyStructure
.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
229 arrayBufferNeuteringWatchpointStructure
.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
230 unlinkedFunctionExecutableStructure
.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
231 unlinkedProgramCodeBlockStructure
.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
232 unlinkedEvalCodeBlockStructure
.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
233 unlinkedFunctionCodeBlockStructure
.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
234 propertyTableStructure
.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
235 weakMapDataStructure
.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
236 inferredValueStructure
.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
237 functionRareDataStructure
.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
238 exceptionStructure
.set(*this, Exception::createStructure(*this, 0, jsNull()));
240 promiseDeferredStructure
.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
242 iterationTerminator
.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
243 smallStrings
.initializeCommonStrings(*this);
245 wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable
);
248 jitStubs
= std::make_unique
<JITThunks
>();
249 arityCheckFailReturnThunks
= std::make_unique
<ArityCheckFailReturnThunks
>();
251 arityCheckData
= std::make_unique
<CommonSlowPaths::ArityCheckData
>();
254 ftlThunks
= std::make_unique
<FTL::Thunks
>();
255 #endif // ENABLE(FTL_JIT)
257 interpreter
->initialize(this->canUseJIT());
260 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
263 heap
.notifyIsSafeToCollect();
265 LLInt::Data::performAssertions(*this);
267 if (Options::enableProfiler()) {
268 m_perBytecodeProfiler
= std::make_unique
<Profiler::Database
>(*this);
270 StringPrintStream pathOut
;
271 const char* profilerPath
= getenv("JSC_PROFILER_PATH");
273 pathOut
.print(profilerPath
, "/");
274 pathOut
.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler
->databaseID(), ".json");
275 m_perBytecodeProfiler
->registerToSaveAtExit(pathOut
.toCString().data());
280 dfgState
= std::make_unique
<DFG::LongLivedState
>();
283 // Initialize this last, as a free way of asserting that VM initialization itself
285 m_typedArrayController
= adoptRef(new SimpleTypedArrayController());
287 if (Options::enableTypeProfiler())
288 enableTypeProfiler();
289 if (Options::enableControlFlowProfiler())
290 enableControlFlowProfiler();
295 // Never GC, ever again.
296 heap
.incrementDeferralDepth();
299 // Make sure concurrent compilations are done, but don't install them, since there is
300 // no point to doing so.
301 for (unsigned i
= DFG::numberOfWorklists(); i
--;) {
302 if (DFG::Worklist
* worklist
= DFG::worklistForIndexOrNull(i
)) {
303 worklist
->waitUntilAllPlansForVMAreReady(*this);
304 worklist
->removeAllReadyPlansForVM(*this);
307 #endif // ENABLE(DFG_JIT)
309 waitForAsynchronousDisassembly();
311 // Clear this first to ensure that nobody tries to remove themselves from it.
312 m_perBytecodeProfiler
= nullptr;
314 ASSERT(m_apiLock
->currentThreadIsHoldingLock());
315 m_apiLock
->willDestroyVM(this);
316 heap
.lastChanceToFinalize();
320 interpreter
= reinterpret_cast<Interpreter
*>(0xbbadbeef);
325 delete propertyNames
;
326 if (vmType
!= Default
)
327 delete m_atomicStringTable
;
330 delete m_regExpCache
;
331 #if ENABLE(REGEXP_TRACING)
332 delete m_rtTraceList
;
336 for (unsigned i
= 0; i
< scratchBuffers
.size(); ++i
)
337 fastFree(scratchBuffers
[i
]);
341 Ref
<VM
> VM::createContextGroup(HeapType heapType
)
343 return adoptRef(*new VM(APIContextGroup
, heapType
));
346 Ref
<VM
> VM::create(HeapType heapType
)
348 return adoptRef(*new VM(Default
, heapType
));
351 Ref
<VM
> VM::createLeaked(HeapType heapType
)
353 return create(heapType
);
356 bool VM::sharedInstanceExists()
358 return sharedInstanceInternal();
361 VM
& VM::sharedInstance()
363 GlobalJSLock globalLock
;
364 VM
*& instance
= sharedInstanceInternal();
366 instance
= adoptRef(new VM(APIShared
, SmallHeap
)).leakRef();
370 VM
*& VM::sharedInstanceInternal()
372 static VM
* sharedInstance
;
373 return sharedInstance
;
377 static ThunkGenerator
thunkGeneratorForIntrinsic(Intrinsic intrinsic
)
380 case CharCodeAtIntrinsic
:
381 return charCodeAtThunkGenerator
;
382 case CharAtIntrinsic
:
383 return charAtThunkGenerator
;
385 return clz32ThunkGenerator
;
386 case FromCharCodeIntrinsic
:
387 return fromCharCodeThunkGenerator
;
389 return sqrtThunkGenerator
;
391 return powThunkGenerator
;
393 return absThunkGenerator
;
395 return floorThunkGenerator
;
397 return ceilThunkGenerator
;
399 return roundThunkGenerator
;
401 return expThunkGenerator
;
403 return logThunkGenerator
;
405 return imulThunkGenerator
;
411 NativeExecutable
* VM::getHostFunction(NativeFunction function
, NativeFunction constructor
)
413 return jitStubs
->hostFunctionStub(this, function
, constructor
);
415 NativeExecutable
* VM::getHostFunction(NativeFunction function
, Intrinsic intrinsic
)
418 return jitStubs
->hostFunctionStub(this, function
, intrinsic
!= NoIntrinsic
? thunkGeneratorForIntrinsic(intrinsic
) : 0, intrinsic
);
421 #else // !ENABLE(JIT)
423 NativeExecutable
* VM::getHostFunction(NativeFunction function
, NativeFunction constructor
)
425 return NativeExecutable::create(*this,
426 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline
), JITCode::HostCallThunk
)), function
,
427 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline
), JITCode::HostCallThunk
)), constructor
,
431 #endif // !ENABLE(JIT)
433 VM::ClientData::~ClientData()
437 void VM::resetDateCache()
439 localTimeOffsetCache
.reset();
440 cachedDateString
= String();
441 cachedDateStringValue
= std::numeric_limits
<double>::quiet_NaN();
442 dateInstanceCache
.reset();
445 void VM::startSampling()
447 interpreter
->startSampling();
450 void VM::stopSampling()
452 interpreter
->stopSampling();
455 void VM::prepareToDiscardCode()
458 for (unsigned i
= DFG::numberOfWorklists(); i
--;) {
459 if (DFG::Worklist
* worklist
= DFG::worklistForIndexOrNull(i
))
460 worklist
->completeAllPlansForVM(*this);
462 #endif // ENABLE(DFG_JIT)
465 void VM::discardAllCode()
467 prepareToDiscardCode();
468 m_codeCache
->clear();
469 m_regExpCache
->invalidateCode();
470 heap
.deleteAllCompiledCode();
471 heap
.deleteAllUnlinkedFunctionCode();
472 heap
.reportAbandonedObjectGraph();
475 void VM::dumpSampleData(ExecState
* exec
)
477 interpreter
->dumpSampleData(exec
);
478 #if ENABLE(ASSEMBLER)
479 ExecutableAllocator::dumpProfile();
483 SourceProviderCache
* VM::addSourceProviderCache(SourceProvider
* sourceProvider
)
485 auto addResult
= sourceProviderCacheMap
.add(sourceProvider
, nullptr);
486 if (addResult
.isNewEntry
)
487 addResult
.iterator
->value
= adoptRef(new SourceProviderCache
);
488 return addResult
.iterator
->value
.get();
491 void VM::clearSourceProviderCaches()
493 sourceProviderCacheMap
.clear();
496 struct StackPreservingRecompiler
: public MarkedBlock::VoidFunctor
{
497 HashSet
<FunctionExecutable
*> currentlyExecutingFunctions
;
498 inline void visit(JSCell
* cell
)
500 if (!cell
->inherits(FunctionExecutable::info()))
502 FunctionExecutable
* executable
= jsCast
<FunctionExecutable
*>(cell
);
503 if (currentlyExecutingFunctions
.contains(executable
))
505 executable
->clearCode();
507 IterationStatus
operator()(JSCell
* cell
)
510 return IterationStatus::Continue
;
514 void VM::releaseExecutableMemory()
516 prepareToDiscardCode();
519 StackPreservingRecompiler recompiler
;
520 HeapIterationScope
iterationScope(heap
);
521 HashSet
<JSCell
*> roots
;
522 heap
.getConservativeRegisterRoots(roots
);
523 HashSet
<JSCell
*>::iterator end
= roots
.end();
524 for (HashSet
<JSCell
*>::iterator ptr
= roots
.begin(); ptr
!= end
; ++ptr
) {
525 ScriptExecutable
* executable
= 0;
527 if (cell
->inherits(ScriptExecutable::info()))
528 executable
= static_cast<ScriptExecutable
*>(*ptr
);
529 else if (cell
->inherits(JSFunction::info())) {
530 JSFunction
* function
= jsCast
<JSFunction
*>(*ptr
);
531 if (function
->isHostFunction())
533 executable
= function
->jsExecutable();
536 ASSERT(executable
->inherits(ScriptExecutable::info()));
537 executable
->unlinkCalls();
538 if (executable
->inherits(FunctionExecutable::info()))
539 recompiler
.currentlyExecutingFunctions
.add(static_cast<FunctionExecutable
*>(executable
));
542 heap
.objectSpace().forEachLiveCell
<StackPreservingRecompiler
>(iterationScope
, recompiler
);
544 m_regExpCache
->invalidateCode();
545 heap
.collectAllGarbage();
548 void VM::throwException(ExecState
* exec
, Exception
* exception
)
550 if (Options::breakOnThrow()) {
551 dataLog("In call frame ", RawPointer(exec
), " for code block ", *exec
->codeBlock(), "\n");
555 ASSERT(exec
== topCallFrame
|| exec
== exec
->lexicalGlobalObject()->globalExec() || exec
== exec
->vmEntryGlobalObject()->globalExec());
556 setException(exception
);
559 JSValue
VM::throwException(ExecState
* exec
, JSValue thrownValue
)
561 Exception
* exception
= jsDynamicCast
<Exception
*>(thrownValue
);
563 exception
= Exception::create(*this, thrownValue
);
565 throwException(exec
, exception
);
566 return JSValue(exception
);
569 JSObject
* VM::throwException(ExecState
* exec
, JSObject
* error
)
571 return asObject(throwException(exec
, JSValue(error
)));
574 void VM::setStackPointerAtVMEntry(void* sp
)
576 m_stackPointerAtVMEntry
= sp
;
580 size_t VM::updateReservedZoneSize(size_t reservedZoneSize
)
582 size_t oldReservedZoneSize
= m_reservedZoneSize
;
583 m_reservedZoneSize
= reservedZoneSize
;
587 return oldReservedZoneSize
;
591 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
592 // where the guard page is a barrier between committed and uncommitted memory.
593 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
594 // This is how the system grows the stack.
595 // When using the C stack on Windows we need to precommit the needed stack space.
596 // Otherwise we might crash later if we access uncommitted stack memory.
597 // This can happen if we allocate stack space larger than the page guard size (4K).
598 // The system does not get the chance to move the guard page, and commit more memory,
599 // and we crash if uncommitted memory is accessed.
600 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
601 // when needed, see http://support.microsoft.com/kb/100775.
602 // By touching every page up to the stack limit with a dummy operation,
603 // we force the system to move the guard page, and commit memory.
605 static void preCommitStackMemory(void* stackLimit
)
607 const int pageSize
= 4096;
608 for (volatile char* p
= reinterpret_cast<char*>(&stackLimit
); p
> stackLimit
; p
-= pageSize
) {
615 inline void VM::updateStackLimit()
618 void* lastStackLimit
= m_stackLimit
;
621 if (m_stackPointerAtVMEntry
) {
622 ASSERT(wtfThreadData().stack().isGrowingDownward());
623 char* startOfStack
= reinterpret_cast<char*>(m_stackPointerAtVMEntry
);
625 m_stackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
+ m_largestFTLStackSize
);
626 m_ftlStackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
+ 2 * m_largestFTLStackSize
);
628 m_stackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
);
632 m_stackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
+ m_largestFTLStackSize
);
633 m_ftlStackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
+ 2 * m_largestFTLStackSize
);
635 m_stackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
);
640 if (lastStackLimit
!= m_stackLimit
)
641 preCommitStackMemory(m_stackLimit
);
646 void VM::updateFTLLargestStackSize(size_t stackSize
)
648 if (stackSize
> m_largestFTLStackSize
) {
649 m_largestFTLStackSize
= stackSize
;
655 void releaseExecutableMemory(VM
& vm
)
657 vm
.releaseExecutableMemory();
661 void VM::gatherConservativeRoots(ConservativeRoots
& conservativeRoots
)
663 for (size_t i
= 0; i
< scratchBuffers
.size(); i
++) {
664 ScratchBuffer
* scratchBuffer
= scratchBuffers
[i
];
665 if (scratchBuffer
->activeLength()) {
666 void* bufferStart
= scratchBuffer
->dataBuffer();
667 conservativeRoots
.add(bufferStart
, static_cast<void*>(static_cast<char*>(bufferStart
) + scratchBuffer
->activeLength()));
673 void logSanitizeStack(VM
* vm
)
675 if (Options::verboseSanitizeStack() && vm
->topCallFrame
) {
678 "Sanitizing stack with top call frame at ", RawPointer(vm
->topCallFrame
),
679 ", current stack pointer at ", RawPointer(&dummy
), ", in ",
680 pointerDump(vm
->topCallFrame
->codeBlock()), " and last code origin = ",
681 vm
->topCallFrame
->codeOrigin(), "\n");
685 #if ENABLE(REGEXP_TRACING)
686 void VM::addRegExpToTrace(RegExp
* regExp
)
689 m_rtTraceList
->add(regExp
);
692 void VM::dumpRegExpTrace()
694 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
695 RTTraceList::iterator iter
= ++m_rtTraceList
->begin();
697 if (iter
!= m_rtTraceList
->end()) {
698 dataLogF("\nRegExp Tracing\n");
699 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
700 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
701 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
703 unsigned reCount
= 0;
705 for (; iter
!= m_rtTraceList
->end(); ++iter
, ++reCount
) {
706 (*iter
)->printTraceData();
710 dataLogF("%d Regular Expressions\n", reCount
);
713 m_rtTraceList
->clear();
716 void VM::dumpRegExpTrace()
721 void VM::registerWatchpointForImpureProperty(const Identifier
& propertyName
, Watchpoint
* watchpoint
)
723 auto result
= m_impurePropertyWatchpointSets
.add(propertyName
.string(), nullptr);
724 if (result
.isNewEntry
)
725 result
.iterator
->value
= adoptRef(new WatchpointSet(IsWatched
));
726 result
.iterator
->value
->add(watchpoint
);
729 void VM::addImpureProperty(const String
& propertyName
)
731 if (RefPtr
<WatchpointSet
> watchpointSet
= m_impurePropertyWatchpointSets
.take(propertyName
))
732 watchpointSet
->fireAll("Impure property added");
735 class SetEnabledProfilerFunctor
{
737 bool operator()(CodeBlock
* codeBlock
)
739 if (JITCode::isOptimizingJIT(codeBlock
->jitType()))
740 codeBlock
->jettison(Profiler::JettisonDueToLegacyProfiler
);
745 void VM::setEnabledProfiler(LegacyProfiler
* profiler
)
747 m_enabledProfiler
= profiler
;
748 if (m_enabledProfiler
) {
749 prepareToDiscardCode();
750 SetEnabledProfilerFunctor functor
;
751 heap
.forEachCodeBlock(functor
);
755 static bool enableProfilerWithRespectToCount(unsigned& counter
, std::function
<void()> doEnableWork
)
757 bool needsToRecompile
= false;
760 needsToRecompile
= true;
764 return needsToRecompile
;
767 static bool disableProfilerWithRespectToCount(unsigned& counter
, std::function
<void()> doDisableWork
)
769 RELEASE_ASSERT(counter
> 0);
770 bool needsToRecompile
= false;
774 needsToRecompile
= true;
777 return needsToRecompile
;
780 bool VM::enableTypeProfiler()
782 auto enableTypeProfiler
= [this] () {
783 this->m_typeProfiler
= std::make_unique
<TypeProfiler
>();
784 this->m_typeProfilerLog
= std::make_unique
<TypeProfilerLog
>();
787 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount
, enableTypeProfiler
);
790 bool VM::disableTypeProfiler()
792 auto disableTypeProfiler
= [this] () {
793 this->m_typeProfiler
.reset(nullptr);
794 this->m_typeProfilerLog
.reset(nullptr);
797 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount
, disableTypeProfiler
);
800 bool VM::enableControlFlowProfiler()
802 auto enableControlFlowProfiler
= [this] () {
803 this->m_controlFlowProfiler
= std::make_unique
<ControlFlowProfiler
>();
806 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount
, enableControlFlowProfiler
);
809 bool VM::disableControlFlowProfiler()
811 auto disableControlFlowProfiler
= [this] () {
812 this->m_controlFlowProfiler
.reset(nullptr);
815 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount
, disableControlFlowProfiler
);
818 void VM::dumpTypeProfilerData()
823 typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
824 typeProfiler()->dumpTypeProfilerData(*this);
827 void sanitizeStackForVM(VM
* vm
)
829 logSanitizeStack(vm
);
831 vm
->interpreter
->stack().sanitizeStack();
833 sanitizeStackForVMImpl(vm
);