]> git.saurik.com Git - apple/javascriptcore.git/blob - runtime/VM.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / runtime / VM.cpp
1 /*
2 * Copyright (C) 2008, 2011, 2013-2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArityCheckFailReturnThunks.h"
34 #include "ArrayBufferNeuteringWatchpoint.h"
35 #include "BuiltinExecutables.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "Exception.h"
46 #include "FTLThunks.h"
47 #include "FunctionConstructor.h"
48 #include "GCActivityCallback.h"
49 #include "GetterSetter.h"
50 #include "Heap.h"
51 #include "HeapIterationScope.h"
52 #include "HostCallReturnValue.h"
53 #include "Identifier.h"
54 #include "IncrementalSweeper.h"
55 #include "Interpreter.h"
56 #include "JITCode.h"
57 #include "JSAPIValueWrapper.h"
58 #include "JSArray.h"
59 #include "JSCInlines.h"
60 #include "JSFunction.h"
61 #include "JSGlobalObjectFunctions.h"
62 #include "JSLexicalEnvironment.h"
63 #include "JSLock.h"
64 #include "JSNameScope.h"
65 #include "JSNotAnObject.h"
66 #include "JSPromiseDeferred.h"
67 #include "JSPropertyNameEnumerator.h"
68 #include "JSTemplateRegistryKey.h"
69 #include "JSWithScope.h"
70 #include "Lexer.h"
71 #include "Lookup.h"
72 #include "MapData.h"
73 #include "Nodes.h"
74 #include "Parser.h"
75 #include "ProfilerDatabase.h"
76 #include "PropertyMapHashTable.h"
77 #include "RegExpCache.h"
78 #include "RegExpObject.h"
79 #include "RuntimeType.h"
80 #include "SimpleTypedArrayController.h"
81 #include "SourceProviderCache.h"
82 #include "StackVisitor.h"
83 #include "StrictEvalActivation.h"
84 #include "StrongInlines.h"
85 #include "StructureInlines.h"
86 #include "TypeProfiler.h"
87 #include "TypeProfilerLog.h"
88 #include "UnlinkedCodeBlock.h"
89 #include "WeakGCMapInlines.h"
90 #include "WeakMapData.h"
91 #include <wtf/CurrentTime.h>
92 #include <wtf/ProcessID.h>
93 #include <wtf/RetainPtr.h>
94 #include <wtf/StringPrintStream.h>
95 #include <wtf/Threading.h>
96 #include <wtf/WTFThreadData.h>
97 #include <wtf/text/AtomicStringTable.h>
98 #include <wtf/text/SymbolRegistry.h>
99
100 #if ENABLE(DFG_JIT)
101 #include "ConservativeRoots.h"
102 #endif
103
104 #if ENABLE(REGEXP_TRACING)
105 #include "RegExp.h"
106 #endif
107
108 #if USE(CF)
109 #include <CoreFoundation/CoreFoundation.h>
110 #endif
111
112 using namespace WTF;
113
114 namespace JSC {
115
116 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
117 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
118 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
119
120 #if ENABLE(ASSEMBLER)
121 static bool enableAssembler(ExecutableAllocator& executableAllocator)
122 {
123 if (!Options::useJIT() && !Options::useRegExpJIT())
124 return false;
125
126 if (!executableAllocator.isValid()) {
127 if (Options::crashIfCantAllocateJITMemory())
128 CRASH();
129 return false;
130 }
131
132 #if USE(CF) || OS(UNIX)
133 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
134 return !canUseJITString || atoi(canUseJITString);
135 #else
136 return true;
137 #endif
138 }
139 #endif // ENABLE(!ASSEMBLER)
140
141 VM::VM(VMType vmType, HeapType heapType)
142 : m_apiLock(adoptRef(new JSLock(this)))
143 #if ENABLE(ASSEMBLER)
144 , executableAllocator(*this)
145 #endif
146 , heap(this, heapType)
147 , vmType(vmType)
148 , clientData(0)
149 , topVMEntryFrame(nullptr)
150 , topCallFrame(CallFrame::noCaller())
151 , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
152 , propertyNames(nullptr)
153 , emptyList(new MarkedArgumentBuffer)
154 , stringCache(*this)
155 , prototypeMap(*this)
156 , keywords(std::make_unique<Keywords>(*this))
157 , interpreter(0)
158 , jsArrayClassInfo(JSArray::info())
159 , jsFinalObjectClassInfo(JSFinalObject::info())
160 , sizeOfLastScratchBuffer(0)
161 , entryScope(0)
162 , m_regExpCache(new RegExpCache(this))
163 #if ENABLE(REGEXP_TRACING)
164 , m_rtTraceList(new RTTraceList())
165 #endif
166 , m_newStringsSinceLastHashCons(0)
167 #if ENABLE(ASSEMBLER)
168 , m_canUseAssembler(enableAssembler(executableAllocator))
169 #endif
170 #if ENABLE(JIT)
171 , m_canUseJIT(m_canUseAssembler && Options::useJIT())
172 #endif
173 #if ENABLE(YARR_JIT)
174 , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
175 #endif
176 #if ENABLE(GC_VALIDATION)
177 , m_initializingObjectClass(0)
178 #endif
179 , m_stackPointerAtVMEntry(0)
180 , m_stackLimit(0)
181 #if !ENABLE(JIT)
182 , m_jsStackLimit(0)
183 #endif
184 #if ENABLE(FTL_JIT)
185 , m_ftlStackLimit(0)
186 , m_largestFTLStackSize(0)
187 #endif
188 , m_inDefineOwnProperty(false)
189 , m_codeCache(std::make_unique<CodeCache>())
190 , m_enabledProfiler(nullptr)
191 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
192 , m_typeProfilerEnabledCount(0)
193 , m_controlFlowProfilerEnabledCount(0)
194 {
195 interpreter = new Interpreter(*this);
196 StackBounds stack = wtfThreadData().stack();
197 updateReservedZoneSize(Options::reservedZoneSize());
198 #if !ENABLE(JIT)
199 interpreter->stack().setReservedZoneSize(Options::reservedZoneSize());
200 #endif
201 setLastStackTop(stack.origin());
202
203 // Need to be careful to keep everything consistent here
204 JSLockHolder lock(this);
205 AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
206 propertyNames = new CommonIdentifiers(this);
207 structureStructure.set(*this, Structure::createStructure(*this));
208 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
209 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
210 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
211 notAnObjectStructure.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
212 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
213 getterSetterStructure.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
214 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
215 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
216 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
217 JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
218 executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
219 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
220 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
221 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
222 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
223 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
224 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
225 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
226 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
227 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
228 templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
229 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
230 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
231 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
232 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
233 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
234 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
235 weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
236 inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
237 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
238 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
239 #if ENABLE(PROMISES)
240 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
241 #endif
242 iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
243 smallStrings.initializeCommonStrings(*this);
244
245 wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
246
247 #if ENABLE(JIT)
248 jitStubs = std::make_unique<JITThunks>();
249 arityCheckFailReturnThunks = std::make_unique<ArityCheckFailReturnThunks>();
250 #endif
251 arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
252
253 #if ENABLE(FTL_JIT)
254 ftlThunks = std::make_unique<FTL::Thunks>();
255 #endif // ENABLE(FTL_JIT)
256
257 interpreter->initialize(this->canUseJIT());
258
259 #if ENABLE(JIT)
260 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
261 #endif
262
263 heap.notifyIsSafeToCollect();
264
265 LLInt::Data::performAssertions(*this);
266
267 if (Options::enableProfiler()) {
268 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
269
270 StringPrintStream pathOut;
271 const char* profilerPath = getenv("JSC_PROFILER_PATH");
272 if (profilerPath)
273 pathOut.print(profilerPath, "/");
274 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
275 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
276 }
277
278 #if ENABLE(DFG_JIT)
279 if (canUseJIT())
280 dfgState = std::make_unique<DFG::LongLivedState>();
281 #endif
282
283 // Initialize this last, as a free way of asserting that VM initialization itself
284 // won't use this.
285 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
286
287 if (Options::enableTypeProfiler())
288 enableTypeProfiler();
289 if (Options::enableControlFlowProfiler())
290 enableControlFlowProfiler();
291 }
292
293 VM::~VM()
294 {
295 // Never GC, ever again.
296 heap.incrementDeferralDepth();
297
298 #if ENABLE(DFG_JIT)
299 // Make sure concurrent compilations are done, but don't install them, since there is
300 // no point to doing so.
301 for (unsigned i = DFG::numberOfWorklists(); i--;) {
302 if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i)) {
303 worklist->waitUntilAllPlansForVMAreReady(*this);
304 worklist->removeAllReadyPlansForVM(*this);
305 }
306 }
307 #endif // ENABLE(DFG_JIT)
308
309 waitForAsynchronousDisassembly();
310
311 // Clear this first to ensure that nobody tries to remove themselves from it.
312 m_perBytecodeProfiler = nullptr;
313
314 ASSERT(m_apiLock->currentThreadIsHoldingLock());
315 m_apiLock->willDestroyVM(this);
316 heap.lastChanceToFinalize();
317
318 delete interpreter;
319 #ifndef NDEBUG
320 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
321 #endif
322
323 delete emptyList;
324
325 delete propertyNames;
326 if (vmType != Default)
327 delete m_atomicStringTable;
328
329 delete clientData;
330 delete m_regExpCache;
331 #if ENABLE(REGEXP_TRACING)
332 delete m_rtTraceList;
333 #endif
334
335 #if ENABLE(DFG_JIT)
336 for (unsigned i = 0; i < scratchBuffers.size(); ++i)
337 fastFree(scratchBuffers[i]);
338 #endif
339 }
340
341 Ref<VM> VM::createContextGroup(HeapType heapType)
342 {
343 return adoptRef(*new VM(APIContextGroup, heapType));
344 }
345
346 Ref<VM> VM::create(HeapType heapType)
347 {
348 return adoptRef(*new VM(Default, heapType));
349 }
350
351 Ref<VM> VM::createLeaked(HeapType heapType)
352 {
353 return create(heapType);
354 }
355
356 bool VM::sharedInstanceExists()
357 {
358 return sharedInstanceInternal();
359 }
360
361 VM& VM::sharedInstance()
362 {
363 GlobalJSLock globalLock;
364 VM*& instance = sharedInstanceInternal();
365 if (!instance)
366 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
367 return *instance;
368 }
369
370 VM*& VM::sharedInstanceInternal()
371 {
372 static VM* sharedInstance;
373 return sharedInstance;
374 }
375
376 #if ENABLE(JIT)
377 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
378 {
379 switch (intrinsic) {
380 case CharCodeAtIntrinsic:
381 return charCodeAtThunkGenerator;
382 case CharAtIntrinsic:
383 return charAtThunkGenerator;
384 case Clz32Intrinsic:
385 return clz32ThunkGenerator;
386 case FromCharCodeIntrinsic:
387 return fromCharCodeThunkGenerator;
388 case SqrtIntrinsic:
389 return sqrtThunkGenerator;
390 case PowIntrinsic:
391 return powThunkGenerator;
392 case AbsIntrinsic:
393 return absThunkGenerator;
394 case FloorIntrinsic:
395 return floorThunkGenerator;
396 case CeilIntrinsic:
397 return ceilThunkGenerator;
398 case RoundIntrinsic:
399 return roundThunkGenerator;
400 case ExpIntrinsic:
401 return expThunkGenerator;
402 case LogIntrinsic:
403 return logThunkGenerator;
404 case IMulIntrinsic:
405 return imulThunkGenerator;
406 default:
407 return 0;
408 }
409 }
410
411 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
412 {
413 return jitStubs->hostFunctionStub(this, function, constructor);
414 }
415 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic)
416 {
417 ASSERT(canUseJIT());
418 return jitStubs->hostFunctionStub(this, function, intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0, intrinsic);
419 }
420
421 #else // !ENABLE(JIT)
422
423 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
424 {
425 return NativeExecutable::create(*this,
426 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
427 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
428 NoIntrinsic);
429 }
430
431 #endif // !ENABLE(JIT)
432
433 VM::ClientData::~ClientData()
434 {
435 }
436
437 void VM::resetDateCache()
438 {
439 localTimeOffsetCache.reset();
440 cachedDateString = String();
441 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
442 dateInstanceCache.reset();
443 }
444
445 void VM::startSampling()
446 {
447 interpreter->startSampling();
448 }
449
450 void VM::stopSampling()
451 {
452 interpreter->stopSampling();
453 }
454
455 void VM::prepareToDiscardCode()
456 {
457 #if ENABLE(DFG_JIT)
458 for (unsigned i = DFG::numberOfWorklists(); i--;) {
459 if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i))
460 worklist->completeAllPlansForVM(*this);
461 }
462 #endif // ENABLE(DFG_JIT)
463 }
464
465 void VM::discardAllCode()
466 {
467 prepareToDiscardCode();
468 m_codeCache->clear();
469 m_regExpCache->invalidateCode();
470 heap.deleteAllCompiledCode();
471 heap.deleteAllUnlinkedFunctionCode();
472 heap.reportAbandonedObjectGraph();
473 }
474
475 void VM::dumpSampleData(ExecState* exec)
476 {
477 interpreter->dumpSampleData(exec);
478 #if ENABLE(ASSEMBLER)
479 ExecutableAllocator::dumpProfile();
480 #endif
481 }
482
483 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
484 {
485 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
486 if (addResult.isNewEntry)
487 addResult.iterator->value = adoptRef(new SourceProviderCache);
488 return addResult.iterator->value.get();
489 }
490
491 void VM::clearSourceProviderCaches()
492 {
493 sourceProviderCacheMap.clear();
494 }
495
496 struct StackPreservingRecompiler : public MarkedBlock::VoidFunctor {
497 HashSet<FunctionExecutable*> currentlyExecutingFunctions;
498 inline void visit(JSCell* cell)
499 {
500 if (!cell->inherits(FunctionExecutable::info()))
501 return;
502 FunctionExecutable* executable = jsCast<FunctionExecutable*>(cell);
503 if (currentlyExecutingFunctions.contains(executable))
504 return;
505 executable->clearCode();
506 }
507 IterationStatus operator()(JSCell* cell)
508 {
509 visit(cell);
510 return IterationStatus::Continue;
511 }
512 };
513
514 void VM::releaseExecutableMemory()
515 {
516 prepareToDiscardCode();
517
518 if (entryScope) {
519 StackPreservingRecompiler recompiler;
520 HeapIterationScope iterationScope(heap);
521 HashSet<JSCell*> roots;
522 heap.getConservativeRegisterRoots(roots);
523 HashSet<JSCell*>::iterator end = roots.end();
524 for (HashSet<JSCell*>::iterator ptr = roots.begin(); ptr != end; ++ptr) {
525 ScriptExecutable* executable = 0;
526 JSCell* cell = *ptr;
527 if (cell->inherits(ScriptExecutable::info()))
528 executable = static_cast<ScriptExecutable*>(*ptr);
529 else if (cell->inherits(JSFunction::info())) {
530 JSFunction* function = jsCast<JSFunction*>(*ptr);
531 if (function->isHostFunction())
532 continue;
533 executable = function->jsExecutable();
534 } else
535 continue;
536 ASSERT(executable->inherits(ScriptExecutable::info()));
537 executable->unlinkCalls();
538 if (executable->inherits(FunctionExecutable::info()))
539 recompiler.currentlyExecutingFunctions.add(static_cast<FunctionExecutable*>(executable));
540
541 }
542 heap.objectSpace().forEachLiveCell<StackPreservingRecompiler>(iterationScope, recompiler);
543 }
544 m_regExpCache->invalidateCode();
545 heap.collectAllGarbage();
546 }
547
548 void VM::throwException(ExecState* exec, Exception* exception)
549 {
550 if (Options::breakOnThrow()) {
551 dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
552 CRASH();
553 }
554
555 ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
556 setException(exception);
557 }
558
559 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
560 {
561 Exception* exception = jsDynamicCast<Exception*>(thrownValue);
562 if (!exception)
563 exception = Exception::create(*this, thrownValue);
564
565 throwException(exec, exception);
566 return JSValue(exception);
567 }
568
569 JSObject* VM::throwException(ExecState* exec, JSObject* error)
570 {
571 return asObject(throwException(exec, JSValue(error)));
572 }
573
574 void VM::setStackPointerAtVMEntry(void* sp)
575 {
576 m_stackPointerAtVMEntry = sp;
577 updateStackLimit();
578 }
579
580 size_t VM::updateReservedZoneSize(size_t reservedZoneSize)
581 {
582 size_t oldReservedZoneSize = m_reservedZoneSize;
583 m_reservedZoneSize = reservedZoneSize;
584
585 updateStackLimit();
586
587 return oldReservedZoneSize;
588 }
589
590 #if PLATFORM(WIN)
591 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
592 // where the guard page is a barrier between committed and uncommitted memory.
593 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
594 // This is how the system grows the stack.
595 // When using the C stack on Windows we need to precommit the needed stack space.
596 // Otherwise we might crash later if we access uncommitted stack memory.
597 // This can happen if we allocate stack space larger than the page guard size (4K).
598 // The system does not get the chance to move the guard page, and commit more memory,
599 // and we crash if uncommitted memory is accessed.
600 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
601 // when needed, see http://support.microsoft.com/kb/100775.
602 // By touching every page up to the stack limit with a dummy operation,
603 // we force the system to move the guard page, and commit memory.
604
605 static void preCommitStackMemory(void* stackLimit)
606 {
607 const int pageSize = 4096;
608 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
609 char ch = *p;
610 *p = ch;
611 }
612 }
613 #endif
614
615 inline void VM::updateStackLimit()
616 {
617 #if PLATFORM(WIN)
618 void* lastStackLimit = m_stackLimit;
619 #endif
620
621 if (m_stackPointerAtVMEntry) {
622 ASSERT(wtfThreadData().stack().isGrowingDownward());
623 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
624 #if ENABLE(FTL_JIT)
625 m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + m_largestFTLStackSize);
626 m_ftlStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + 2 * m_largestFTLStackSize);
627 #else
628 m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize);
629 #endif
630 } else {
631 #if ENABLE(FTL_JIT)
632 m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + m_largestFTLStackSize);
633 m_ftlStackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + 2 * m_largestFTLStackSize);
634 #else
635 m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize);
636 #endif
637 }
638
639 #if PLATFORM(WIN)
640 if (lastStackLimit != m_stackLimit)
641 preCommitStackMemory(m_stackLimit);
642 #endif
643 }
644
645 #if ENABLE(FTL_JIT)
646 void VM::updateFTLLargestStackSize(size_t stackSize)
647 {
648 if (stackSize > m_largestFTLStackSize) {
649 m_largestFTLStackSize = stackSize;
650 updateStackLimit();
651 }
652 }
653 #endif
654
655 void releaseExecutableMemory(VM& vm)
656 {
657 vm.releaseExecutableMemory();
658 }
659
660 #if ENABLE(DFG_JIT)
661 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
662 {
663 for (size_t i = 0; i < scratchBuffers.size(); i++) {
664 ScratchBuffer* scratchBuffer = scratchBuffers[i];
665 if (scratchBuffer->activeLength()) {
666 void* bufferStart = scratchBuffer->dataBuffer();
667 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
668 }
669 }
670 }
671 #endif
672
673 void logSanitizeStack(VM* vm)
674 {
675 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
676 int dummy;
677 dataLog(
678 "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
679 ", current stack pointer at ", RawPointer(&dummy), ", in ",
680 pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
681 vm->topCallFrame->codeOrigin(), "\n");
682 }
683 }
684
685 #if ENABLE(REGEXP_TRACING)
686 void VM::addRegExpToTrace(RegExp* regExp)
687 {
688 gcProtect(regExp);
689 m_rtTraceList->add(regExp);
690 }
691
692 void VM::dumpRegExpTrace()
693 {
694 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
695 RTTraceList::iterator iter = ++m_rtTraceList->begin();
696
697 if (iter != m_rtTraceList->end()) {
698 dataLogF("\nRegExp Tracing\n");
699 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
700 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
701 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
702
703 unsigned reCount = 0;
704
705 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
706 (*iter)->printTraceData();
707 gcUnprotect(*iter);
708 }
709
710 dataLogF("%d Regular Expressions\n", reCount);
711 }
712
713 m_rtTraceList->clear();
714 }
715 #else
716 void VM::dumpRegExpTrace()
717 {
718 }
719 #endif
720
721 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
722 {
723 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
724 if (result.isNewEntry)
725 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
726 result.iterator->value->add(watchpoint);
727 }
728
729 void VM::addImpureProperty(const String& propertyName)
730 {
731 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
732 watchpointSet->fireAll("Impure property added");
733 }
734
735 class SetEnabledProfilerFunctor {
736 public:
737 bool operator()(CodeBlock* codeBlock)
738 {
739 if (JITCode::isOptimizingJIT(codeBlock->jitType()))
740 codeBlock->jettison(Profiler::JettisonDueToLegacyProfiler);
741 return false;
742 }
743 };
744
745 void VM::setEnabledProfiler(LegacyProfiler* profiler)
746 {
747 m_enabledProfiler = profiler;
748 if (m_enabledProfiler) {
749 prepareToDiscardCode();
750 SetEnabledProfilerFunctor functor;
751 heap.forEachCodeBlock(functor);
752 }
753 }
754
755 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
756 {
757 bool needsToRecompile = false;
758 if (!counter) {
759 doEnableWork();
760 needsToRecompile = true;
761 }
762 counter++;
763
764 return needsToRecompile;
765 }
766
767 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
768 {
769 RELEASE_ASSERT(counter > 0);
770 bool needsToRecompile = false;
771 counter--;
772 if (!counter) {
773 doDisableWork();
774 needsToRecompile = true;
775 }
776
777 return needsToRecompile;
778 }
779
780 bool VM::enableTypeProfiler()
781 {
782 auto enableTypeProfiler = [this] () {
783 this->m_typeProfiler = std::make_unique<TypeProfiler>();
784 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
785 };
786
787 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
788 }
789
790 bool VM::disableTypeProfiler()
791 {
792 auto disableTypeProfiler = [this] () {
793 this->m_typeProfiler.reset(nullptr);
794 this->m_typeProfilerLog.reset(nullptr);
795 };
796
797 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
798 }
799
800 bool VM::enableControlFlowProfiler()
801 {
802 auto enableControlFlowProfiler = [this] () {
803 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
804 };
805
806 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
807 }
808
809 bool VM::disableControlFlowProfiler()
810 {
811 auto disableControlFlowProfiler = [this] () {
812 this->m_controlFlowProfiler.reset(nullptr);
813 };
814
815 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
816 }
817
818 void VM::dumpTypeProfilerData()
819 {
820 if (!typeProfiler())
821 return;
822
823 typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
824 typeProfiler()->dumpTypeProfilerData(*this);
825 }
826
827 void sanitizeStackForVM(VM* vm)
828 {
829 logSanitizeStack(vm);
830 #if !ENABLE(JIT)
831 vm->interpreter->stack().sanitizeStack();
832 #else
833 sanitizeStackForVMImpl(vm);
834 #endif
835 }
836
837 } // namespace JSC