2 * Copyright (C) 2008, 2011, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "ArityCheckFailReturnThunks.h"
34 #include "ArrayBufferNeuteringWatchpoint.h"
35 #include "BuiltinExecutables.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "DebuggerActivation.h"
44 #include "ErrorInstance.h"
45 #include "FTLThunks.h"
46 #include "FunctionConstructor.h"
47 #include "GCActivityCallback.h"
48 #include "GetterSetter.h"
50 #include "HeapIterationScope.h"
51 #include "HostCallReturnValue.h"
52 #include "Identifier.h"
53 #include "IncrementalSweeper.h"
54 #include "Interpreter.h"
56 #include "JSAPIValueWrapper.h"
57 #include "JSActivation.h"
59 #include "JSCInlines.h"
60 #include "JSFunction.h"
61 #include "JSGlobalObjectFunctions.h"
63 #include "JSNameScope.h"
64 #include "JSNotAnObject.h"
65 #include "JSPromiseDeferred.h"
66 #include "JSPromiseReaction.h"
67 #include "JSPropertyNameIterator.h"
68 #include "JSWithScope.h"
74 #include "ParserArena.h"
75 #include "ProfilerDatabase.h"
76 #include "PropertyMapHashTable.h"
77 #include "RegExpCache.h"
78 #include "RegExpObject.h"
79 #include "SimpleTypedArrayController.h"
80 #include "SourceProviderCache.h"
81 #include "StrictEvalActivation.h"
82 #include "StrongInlines.h"
83 #include "StructureInlines.h"
84 #include "UnlinkedCodeBlock.h"
85 #include "WeakMapData.h"
86 #include <wtf/ProcessID.h>
87 #include <wtf/RetainPtr.h>
88 #include <wtf/StringPrintStream.h>
89 #include <wtf/Threading.h>
90 #include <wtf/WTFThreadData.h>
91 #include <wtf/text/AtomicStringTable.h>
94 #include "ConservativeRoots.h"
97 #if ENABLE(REGEXP_TRACING)
102 #include <CoreFoundation/CoreFoundation.h>
109 extern const HashTable arrayConstructorTable
;
110 extern const HashTable arrayPrototypeTable
;
111 extern const HashTable booleanPrototypeTable
;
112 extern const HashTable jsonTable
;
113 extern const HashTable dataViewTable
;
114 extern const HashTable dateTable
;
115 extern const HashTable dateConstructorTable
;
116 extern const HashTable errorPrototypeTable
;
117 extern const HashTable globalObjectTable
;
118 extern const HashTable numberConstructorTable
;
119 extern const HashTable numberPrototypeTable
;
120 JS_EXPORTDATA
extern const HashTable objectConstructorTable
;
121 extern const HashTable privateNamePrototypeTable
;
122 extern const HashTable regExpTable
;
123 extern const HashTable regExpConstructorTable
;
124 extern const HashTable regExpPrototypeTable
;
125 extern const HashTable stringConstructorTable
;
127 extern const HashTable promisePrototypeTable
;
128 extern const HashTable promiseConstructorTable
;
131 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
132 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
133 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
135 #if ENABLE(ASSEMBLER)
136 static bool enableAssembler(ExecutableAllocator
& executableAllocator
)
138 if (!Options::useJIT() && !Options::useRegExpJIT())
141 if (!executableAllocator
.isValid()) {
142 if (Options::crashIfCantAllocateJITMemory())
148 CFStringRef canUseJITKey
= CFSTR("JavaScriptCoreUseJIT");
149 RetainPtr
<CFTypeRef
> canUseJIT
= adoptCF(CFPreferencesCopyAppValue(canUseJITKey
, kCFPreferencesCurrentApplication
));
151 return kCFBooleanTrue
== canUseJIT
.get();
154 #if USE(CF) || OS(UNIX)
155 char* canUseJITString
= getenv("JavaScriptCoreUseJIT");
156 return !canUseJITString
|| atoi(canUseJITString
);
161 #endif // ENABLE(!ASSEMBLER)
163 VM::VM(VMType vmType
, HeapType heapType
)
164 : m_apiLock(adoptRef(new JSLock(this)))
165 #if ENABLE(ASSEMBLER)
166 , executableAllocator(*this)
168 , heap(this, heapType
)
171 , topCallFrame(CallFrame::noCaller())
172 , arrayConstructorTable(adoptPtr(new HashTable(JSC::arrayConstructorTable
)))
173 , arrayPrototypeTable(adoptPtr(new HashTable(JSC::arrayPrototypeTable
)))
174 , booleanPrototypeTable(adoptPtr(new HashTable(JSC::booleanPrototypeTable
)))
175 , dataViewTable(adoptPtr(new HashTable(JSC::dataViewTable
)))
176 , dateTable(adoptPtr(new HashTable(JSC::dateTable
)))
177 , dateConstructorTable(adoptPtr(new HashTable(JSC::dateConstructorTable
)))
178 , errorPrototypeTable(adoptPtr(new HashTable(JSC::errorPrototypeTable
)))
179 , globalObjectTable(adoptPtr(new HashTable(JSC::globalObjectTable
)))
180 , jsonTable(adoptPtr(new HashTable(JSC::jsonTable
)))
181 , numberConstructorTable(adoptPtr(new HashTable(JSC::numberConstructorTable
)))
182 , numberPrototypeTable(adoptPtr(new HashTable(JSC::numberPrototypeTable
)))
183 , objectConstructorTable(adoptPtr(new HashTable(JSC::objectConstructorTable
)))
184 , privateNamePrototypeTable(adoptPtr(new HashTable(JSC::privateNamePrototypeTable
)))
185 , regExpTable(adoptPtr(new HashTable(JSC::regExpTable
)))
186 , regExpConstructorTable(adoptPtr(new HashTable(JSC::regExpConstructorTable
)))
187 , regExpPrototypeTable(adoptPtr(new HashTable(JSC::regExpPrototypeTable
)))
188 , stringConstructorTable(adoptPtr(new HashTable(JSC::stringConstructorTable
)))
190 , promisePrototypeTable(adoptPtr(new HashTable(JSC::promisePrototypeTable
)))
191 , promiseConstructorTable(adoptPtr(new HashTable(JSC::promiseConstructorTable
)))
193 , m_atomicStringTable(vmType
== Default
? wtfThreadData().atomicStringTable() : new AtomicStringTable
)
194 , propertyNames(nullptr)
195 , emptyList(new MarkedArgumentBuffer
)
196 , parserArena(adoptPtr(new ParserArena
))
197 , keywords(adoptPtr(new Keywords(*this)))
199 , jsArrayClassInfo(JSArray::info())
200 , jsFinalObjectClassInfo(JSFinalObject::info())
202 , sizeOfLastScratchBuffer(0)
204 , m_regExpCache(new RegExpCache(this))
205 #if ENABLE(REGEXP_TRACING)
206 , m_rtTraceList(new RTTraceList())
208 , m_newStringsSinceLastHashCons(0)
209 #if ENABLE(ASSEMBLER)
210 , m_canUseAssembler(enableAssembler(executableAllocator
))
213 , m_canUseJIT(m_canUseAssembler
&& Options::useJIT())
216 , m_canUseRegExpJIT(m_canUseAssembler
&& Options::useRegExpJIT())
218 #if ENABLE(GC_VALIDATION)
219 , m_initializingObjectClass(0)
221 , m_stackPointerAtVMEntry(0)
228 , m_largestFTLStackSize(0)
230 , m_inDefineOwnProperty(false)
231 , m_codeCache(CodeCache::create())
232 , m_enabledProfiler(nullptr)
233 , m_builtinExecutables(BuiltinExecutables::create(*this))
235 interpreter
= new Interpreter(*this);
236 StackBounds stack
= wtfThreadData().stack();
237 updateReservedZoneSize(Options::reservedZoneSize());
239 interpreter
->stack().setReservedZoneSize(Options::reservedZoneSize());
241 setLastStackTop(stack
.origin());
243 // Need to be careful to keep everything consistent here
244 JSLockHolder
lock(this);
245 AtomicStringTable
* existingEntryAtomicStringTable
= wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable
);
246 propertyNames
= new CommonIdentifiers(this);
247 structureStructure
.set(*this, Structure::createStructure(*this));
248 structureRareDataStructure
.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
249 debuggerActivationStructure
.set(*this, DebuggerActivation::createStructure(*this, 0, jsNull()));
250 terminatedExecutionErrorStructure
.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
251 stringStructure
.set(*this, JSString::createStructure(*this, 0, jsNull()));
252 notAnObjectStructure
.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
253 propertyNameIteratorStructure
.set(*this, JSPropertyNameIterator::createStructure(*this, 0, jsNull()));
254 getterSetterStructure
.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
255 customGetterSetterStructure
.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
256 apiWrapperStructure
.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
257 JSScopeStructure
.set(*this, JSScope::createStructure(*this, 0, jsNull()));
258 executableStructure
.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
259 nativeExecutableStructure
.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
260 evalExecutableStructure
.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
261 programExecutableStructure
.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
262 functionExecutableStructure
.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
263 regExpStructure
.set(*this, RegExp::createStructure(*this, 0, jsNull()));
264 symbolTableStructure
.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
265 structureChainStructure
.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
266 sparseArrayValueMapStructure
.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
267 arrayBufferNeuteringWatchpointStructure
.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
268 withScopeStructure
.set(*this, JSWithScope::createStructure(*this, 0, jsNull()));
269 unlinkedFunctionExecutableStructure
.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
270 unlinkedProgramCodeBlockStructure
.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
271 unlinkedEvalCodeBlockStructure
.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
272 unlinkedFunctionCodeBlockStructure
.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
273 propertyTableStructure
.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
274 mapDataStructure
.set(*this, MapData::createStructure(*this, 0, jsNull()));
275 weakMapDataStructure
.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
277 promiseDeferredStructure
.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
278 promiseReactionStructure
.set(*this, JSPromiseReaction::createStructure(*this, 0, jsNull()));
280 iterationTerminator
.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
281 smallStrings
.initializeCommonStrings(*this);
283 wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable
);
286 jitStubs
= adoptPtr(new JITThunks());
287 arityCheckFailReturnThunks
= std::make_unique
<ArityCheckFailReturnThunks
>();
289 arityCheckData
= std::make_unique
<CommonSlowPaths::ArityCheckData
>();
292 ftlThunks
= std::make_unique
<FTL::Thunks
>();
293 #endif // ENABLE(FTL_JIT)
295 interpreter
->initialize(this->canUseJIT());
298 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
301 heap
.notifyIsSafeToCollect();
303 LLInt::Data::performAssertions(*this);
305 if (Options::enableProfiler()) {
306 m_perBytecodeProfiler
= adoptPtr(new Profiler::Database(*this));
308 StringPrintStream pathOut
;
310 const char* profilerPath
= getenv("JSC_PROFILER_PATH");
312 pathOut
.print(profilerPath
, "/");
314 pathOut
.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler
->databaseID(), ".json");
315 m_perBytecodeProfiler
->registerToSaveAtExit(pathOut
.toCString().data());
320 dfgState
= adoptPtr(new DFG::LongLivedState());
323 // Initialize this last, as a free way of asserting that VM initialization itself
325 m_typedArrayController
= adoptRef(new SimpleTypedArrayController());
330 // Never GC, ever again.
331 heap
.incrementDeferralDepth();
334 // Make sure concurrent compilations are done, but don't install them, since there is
335 // no point to doing so.
336 for (unsigned i
= DFG::numberOfWorklists(); i
--;) {
337 if (DFG::Worklist
* worklist
= DFG::worklistForIndexOrNull(i
)) {
338 worklist
->waitUntilAllPlansForVMAreReady(*this);
339 worklist
->removeAllReadyPlansForVM(*this);
342 #endif // ENABLE(DFG_JIT)
344 // Clear this first to ensure that nobody tries to remove themselves from it.
345 m_perBytecodeProfiler
.clear();
347 ASSERT(m_apiLock
->currentThreadIsHoldingLock());
348 m_apiLock
->willDestroyVM(this);
349 heap
.lastChanceToFinalize();
353 interpreter
= reinterpret_cast<Interpreter
*>(0xbbadbeef);
356 arrayPrototypeTable
->deleteTable();
357 arrayConstructorTable
->deleteTable();
358 booleanPrototypeTable
->deleteTable();
359 dataViewTable
->deleteTable();
360 dateTable
->deleteTable();
361 dateConstructorTable
->deleteTable();
362 errorPrototypeTable
->deleteTable();
363 globalObjectTable
->deleteTable();
364 jsonTable
->deleteTable();
365 numberConstructorTable
->deleteTable();
366 numberPrototypeTable
->deleteTable();
367 objectConstructorTable
->deleteTable();
368 privateNamePrototypeTable
->deleteTable();
369 regExpTable
->deleteTable();
370 regExpConstructorTable
->deleteTable();
371 regExpPrototypeTable
->deleteTable();
372 stringConstructorTable
->deleteTable();
374 promisePrototypeTable
->deleteTable();
375 promiseConstructorTable
->deleteTable();
380 delete propertyNames
;
381 if (vmType
!= Default
)
382 delete m_atomicStringTable
;
385 delete m_regExpCache
;
386 #if ENABLE(REGEXP_TRACING)
387 delete m_rtTraceList
;
391 for (unsigned i
= 0; i
< scratchBuffers
.size(); ++i
)
392 fastFree(scratchBuffers
[i
]);
396 PassRefPtr
<VM
> VM::createContextGroup(HeapType heapType
)
398 return adoptRef(new VM(APIContextGroup
, heapType
));
401 PassRefPtr
<VM
> VM::create(HeapType heapType
)
403 return adoptRef(new VM(Default
, heapType
));
406 PassRefPtr
<VM
> VM::createLeaked(HeapType heapType
)
408 return create(heapType
);
411 bool VM::sharedInstanceExists()
413 return sharedInstanceInternal();
416 VM
& VM::sharedInstance()
418 GlobalJSLock globalLock
;
419 VM
*& instance
= sharedInstanceInternal();
421 instance
= adoptRef(new VM(APIShared
, SmallHeap
)).leakRef();
422 instance
->makeUsableFromMultipleThreads();
427 VM
*& VM::sharedInstanceInternal()
429 static VM
* sharedInstance
;
430 return sharedInstance
;
434 static ThunkGenerator
thunkGeneratorForIntrinsic(Intrinsic intrinsic
)
437 case CharCodeAtIntrinsic
:
438 return charCodeAtThunkGenerator
;
439 case CharAtIntrinsic
:
440 return charAtThunkGenerator
;
441 case FromCharCodeIntrinsic
:
442 return fromCharCodeThunkGenerator
;
444 return sqrtThunkGenerator
;
446 return powThunkGenerator
;
448 return absThunkGenerator
;
450 return floorThunkGenerator
;
452 return ceilThunkGenerator
;
454 return roundThunkGenerator
;
456 return expThunkGenerator
;
458 return logThunkGenerator
;
460 return imulThunkGenerator
;
461 case ArrayIteratorNextKeyIntrinsic
:
462 return arrayIteratorNextKeyThunkGenerator
;
463 case ArrayIteratorNextValueIntrinsic
:
464 return arrayIteratorNextValueThunkGenerator
;
470 NativeExecutable
* VM::getHostFunction(NativeFunction function
, NativeFunction constructor
)
472 return jitStubs
->hostFunctionStub(this, function
, constructor
);
474 NativeExecutable
* VM::getHostFunction(NativeFunction function
, Intrinsic intrinsic
)
477 return jitStubs
->hostFunctionStub(this, function
, intrinsic
!= NoIntrinsic
? thunkGeneratorForIntrinsic(intrinsic
) : 0, intrinsic
);
480 #else // !ENABLE(JIT)
482 NativeExecutable
* VM::getHostFunction(NativeFunction function
, NativeFunction constructor
)
484 return NativeExecutable::create(*this,
485 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline
), JITCode::HostCallThunk
)), function
,
486 adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline
), JITCode::HostCallThunk
)), constructor
,
490 #endif // !ENABLE(JIT)
492 VM::ClientData::~ClientData()
496 void VM::resetDateCache()
498 localTimeOffsetCache
.reset();
499 cachedDateString
= String();
500 cachedDateStringValue
= std::numeric_limits
<double>::quiet_NaN();
501 dateInstanceCache
.reset();
504 void VM::startSampling()
506 interpreter
->startSampling();
509 void VM::stopSampling()
511 interpreter
->stopSampling();
514 void VM::waitForCompilationsToComplete()
517 for (unsigned i
= DFG::numberOfWorklists(); i
--;) {
518 if (DFG::Worklist
* worklist
= DFG::worklistForIndexOrNull(i
))
519 worklist
->completeAllPlansForVM(*this);
521 #endif // ENABLE(DFG_JIT)
524 void VM::discardAllCode()
526 waitForCompilationsToComplete();
527 m_codeCache
->clear();
528 m_regExpCache
->invalidateCode();
529 heap
.deleteAllCompiledCode();
530 heap
.deleteAllUnlinkedFunctionCode();
531 heap
.reportAbandonedObjectGraph();
534 void VM::dumpSampleData(ExecState
* exec
)
536 interpreter
->dumpSampleData(exec
);
537 #if ENABLE(ASSEMBLER)
538 ExecutableAllocator::dumpProfile();
542 SourceProviderCache
* VM::addSourceProviderCache(SourceProvider
* sourceProvider
)
544 auto addResult
= sourceProviderCacheMap
.add(sourceProvider
, nullptr);
545 if (addResult
.isNewEntry
)
546 addResult
.iterator
->value
= adoptRef(new SourceProviderCache
);
547 return addResult
.iterator
->value
.get();
550 void VM::clearSourceProviderCaches()
552 sourceProviderCacheMap
.clear();
555 struct StackPreservingRecompiler
: public MarkedBlock::VoidFunctor
{
556 HashSet
<FunctionExecutable
*> currentlyExecutingFunctions
;
557 void operator()(JSCell
* cell
)
559 if (!cell
->inherits(FunctionExecutable::info()))
561 FunctionExecutable
* executable
= jsCast
<FunctionExecutable
*>(cell
);
562 if (currentlyExecutingFunctions
.contains(executable
))
564 executable
->clearCodeIfNotCompiling();
568 void VM::releaseExecutableMemory()
570 waitForCompilationsToComplete();
573 StackPreservingRecompiler recompiler
;
574 HeapIterationScope
iterationScope(heap
);
575 HashSet
<JSCell
*> roots
;
576 heap
.getConservativeRegisterRoots(roots
);
577 HashSet
<JSCell
*>::iterator end
= roots
.end();
578 for (HashSet
<JSCell
*>::iterator ptr
= roots
.begin(); ptr
!= end
; ++ptr
) {
579 ScriptExecutable
* executable
= 0;
581 if (cell
->inherits(ScriptExecutable::info()))
582 executable
= static_cast<ScriptExecutable
*>(*ptr
);
583 else if (cell
->inherits(JSFunction::info())) {
584 JSFunction
* function
= jsCast
<JSFunction
*>(*ptr
);
585 if (function
->isHostFunction())
587 executable
= function
->jsExecutable();
590 ASSERT(executable
->inherits(ScriptExecutable::info()));
591 executable
->unlinkCalls();
592 if (executable
->inherits(FunctionExecutable::info()))
593 recompiler
.currentlyExecutingFunctions
.add(static_cast<FunctionExecutable
*>(executable
));
596 heap
.objectSpace().forEachLiveCell
<StackPreservingRecompiler
>(iterationScope
, recompiler
);
598 m_regExpCache
->invalidateCode();
599 heap
.collectAllGarbage();
602 static void appendSourceToError(CallFrame
* callFrame
, ErrorInstance
* exception
, unsigned bytecodeOffset
)
604 exception
->clearAppendSourceToMessage();
606 if (!callFrame
->codeBlock()->hasExpressionInfo())
615 CodeBlock
* codeBlock
= callFrame
->codeBlock();
616 codeBlock
->expressionRangeForBytecodeOffset(bytecodeOffset
, divotPoint
, startOffset
, endOffset
, line
, column
);
618 int expressionStart
= divotPoint
- startOffset
;
619 int expressionStop
= divotPoint
+ endOffset
;
621 const String
& sourceString
= codeBlock
->source()->source();
622 if (!expressionStop
|| expressionStart
> static_cast<int>(sourceString
.length()))
625 VM
* vm
= &callFrame
->vm();
626 JSValue jsMessage
= exception
->getDirect(*vm
, vm
->propertyNames
->message
);
627 if (!jsMessage
|| !jsMessage
.isString())
630 String message
= asString(jsMessage
)->value(callFrame
);
632 if (expressionStart
< expressionStop
)
633 message
= makeString(message
, " (evaluating '", codeBlock
->source()->getRange(expressionStart
, expressionStop
), "')");
635 // No range information, so give a few characters of context.
636 const StringImpl
* data
= sourceString
.impl();
637 int dataLength
= sourceString
.length();
638 int start
= expressionStart
;
639 int stop
= expressionStart
;
640 // Get up to 20 characters of context to the left and right of the divot, clamping to the line.
641 // Then strip whitespace.
642 while (start
> 0 && (expressionStart
- start
< 20) && (*data
)[start
- 1] != '\n')
644 while (start
< (expressionStart
- 1) && isStrWhiteSpace((*data
)[start
]))
646 while (stop
< dataLength
&& (stop
- expressionStart
< 20) && (*data
)[stop
] != '\n')
648 while (stop
> expressionStart
&& isStrWhiteSpace((*data
)[stop
- 1]))
650 message
= makeString(message
, " (near '...", codeBlock
->source()->getRange(start
, stop
), "...')");
653 exception
->putDirect(*vm
, vm
->propertyNames
->message
, jsString(vm
, message
));
656 JSValue
VM::throwException(ExecState
* exec
, JSValue error
)
658 if (Options::breakOnThrow()) {
659 dataLog("In call frame ", RawPointer(exec
), " for code block ", *exec
->codeBlock(), "\n");
663 ASSERT(exec
== topCallFrame
|| exec
== exec
->lexicalGlobalObject()->globalExec() || exec
== exec
->vmEntryGlobalObject()->globalExec());
665 Vector
<StackFrame
> stackTrace
;
666 interpreter
->getStackTrace(stackTrace
);
667 m_exceptionStack
= RefCountedArray
<StackFrame
>(stackTrace
);
670 if (stackTrace
.isEmpty() || !error
.isObject())
672 JSObject
* exception
= asObject(error
);
674 StackFrame stackFrame
;
675 for (unsigned i
= 0 ; i
< stackTrace
.size(); ++i
) {
676 stackFrame
= stackTrace
.at(i
);
677 if (stackFrame
.bytecodeOffset
)
680 unsigned bytecodeOffset
= stackFrame
.bytecodeOffset
;
681 if (!hasErrorInfo(exec
, exception
)) {
682 // FIXME: We should only really be adding these properties to VM generated exceptions,
683 // but the inspector currently requires these for all thrown objects.
686 stackFrame
.computeLineAndColumn(line
, column
);
687 exception
->putDirect(*this, Identifier(this, "line"), jsNumber(line
), ReadOnly
| DontDelete
);
688 exception
->putDirect(*this, Identifier(this, "column"), jsNumber(column
), ReadOnly
| DontDelete
);
689 if (!stackFrame
.sourceURL
.isEmpty())
690 exception
->putDirect(*this, Identifier(this, "sourceURL"), jsString(this, stackFrame
.sourceURL
), ReadOnly
| DontDelete
);
692 if (exception
->isErrorInstance() && static_cast<ErrorInstance
*>(exception
)->appendSourceToMessage()) {
693 unsigned stackIndex
= 0;
694 CallFrame
* callFrame
;
695 for (callFrame
= exec
; callFrame
&& !callFrame
->codeBlock(); ) {
697 callFrame
= callFrame
->callerFrameSkippingVMEntrySentinel();
699 if (callFrame
&& callFrame
->codeBlock()) {
700 stackFrame
= stackTrace
.at(stackIndex
);
701 bytecodeOffset
= stackFrame
.bytecodeOffset
;
702 appendSourceToError(callFrame
, static_cast<ErrorInstance
*>(exception
), bytecodeOffset
);
706 if (exception
->hasProperty(exec
, this->propertyNames
->stack
))
709 exception
->putDirect(*this, propertyNames
->stack
, interpreter
->stackTraceAsString(topCallFrame
, stackTrace
), DontEnum
);
713 JSObject
* VM::throwException(ExecState
* exec
, JSObject
* error
)
715 return asObject(throwException(exec
, JSValue(error
)));
717 void VM::getExceptionInfo(JSValue
& exception
, RefCountedArray
<StackFrame
>& exceptionStack
)
719 exception
= m_exception
;
720 exceptionStack
= m_exceptionStack
;
722 void VM::setExceptionInfo(JSValue
& exception
, RefCountedArray
<StackFrame
>& exceptionStack
)
724 m_exception
= exception
;
725 m_exceptionStack
= exceptionStack
;
728 void VM::clearException()
730 m_exception
= JSValue();
732 void VM:: clearExceptionStack()
734 m_exceptionStack
= RefCountedArray
<StackFrame
>();
737 void VM::setStackPointerAtVMEntry(void* sp
)
739 m_stackPointerAtVMEntry
= sp
;
743 size_t VM::updateReservedZoneSize(size_t reservedZoneSize
)
745 size_t oldReservedZoneSize
= m_reservedZoneSize
;
746 m_reservedZoneSize
= reservedZoneSize
;
750 return oldReservedZoneSize
;
754 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
755 // where the guard page is a barrier between committed and uncommitted memory.
756 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
757 // This is how the system grows the stack.
758 // When using the C stack on Windows we need to precommit the needed stack space.
759 // Otherwise we might crash later if we access uncommitted stack memory.
760 // This can happen if we allocate stack space larger than the page guard size (4K).
761 // The system does not get the chance to move the guard page, and commit more memory,
762 // and we crash if uncommitted memory is accessed.
763 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
764 // when needed, see http://support.microsoft.com/kb/100775.
765 // By touching every page up to the stack limit with a dummy operation,
766 // we force the system to move the guard page, and commit memory.
768 static void preCommitStackMemory(void* stackLimit
)
770 const int pageSize
= 4096;
771 for (volatile char* p
= reinterpret_cast<char*>(&stackLimit
); p
> stackLimit
; p
-= pageSize
) {
778 inline void VM::updateStackLimit()
781 void* lastStackLimit
= m_stackLimit
;
784 if (m_stackPointerAtVMEntry
) {
785 ASSERT(wtfThreadData().stack().isGrowingDownward());
786 char* startOfStack
= reinterpret_cast<char*>(m_stackPointerAtVMEntry
);
788 m_stackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
+ m_largestFTLStackSize
);
789 m_ftlStackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
+ 2 * m_largestFTLStackSize
);
791 m_stackLimit
= wtfThreadData().stack().recursionLimit(startOfStack
, Options::maxPerThreadStackUsage(), m_reservedZoneSize
);
795 m_stackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
+ m_largestFTLStackSize
);
796 m_ftlStackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
+ 2 * m_largestFTLStackSize
);
798 m_stackLimit
= wtfThreadData().stack().recursionLimit(m_reservedZoneSize
);
803 if (lastStackLimit
!= m_stackLimit
)
804 preCommitStackMemory(m_stackLimit
);
809 void VM::updateFTLLargestStackSize(size_t stackSize
)
811 if (stackSize
> m_largestFTLStackSize
) {
812 m_largestFTLStackSize
= stackSize
;
818 void releaseExecutableMemory(VM
& vm
)
820 vm
.releaseExecutableMemory();
824 void VM::gatherConservativeRoots(ConservativeRoots
& conservativeRoots
)
826 for (size_t i
= 0; i
< scratchBuffers
.size(); i
++) {
827 ScratchBuffer
* scratchBuffer
= scratchBuffers
[i
];
828 if (scratchBuffer
->activeLength()) {
829 void* bufferStart
= scratchBuffer
->dataBuffer();
830 conservativeRoots
.add(bufferStart
, static_cast<void*>(static_cast<char*>(bufferStart
) + scratchBuffer
->activeLength()));
836 void logSanitizeStack(VM
* vm
)
838 if (Options::verboseSanitizeStack() && vm
->topCallFrame
) {
841 "Sanitizing stack with top call frame at ", RawPointer(vm
->topCallFrame
),
842 ", current stack pointer at ", RawPointer(&dummy
), ", in ",
843 pointerDump(vm
->topCallFrame
->codeBlock()), " and last code origin = ",
844 vm
->topCallFrame
->codeOrigin(), "\n");
848 #if ENABLE(REGEXP_TRACING)
849 void VM::addRegExpToTrace(RegExp
* regExp
)
852 m_rtTraceList
->add(regExp
);
855 void VM::dumpRegExpTrace()
857 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
858 RTTraceList::iterator iter
= ++m_rtTraceList
->begin();
860 if (iter
!= m_rtTraceList
->end()) {
861 dataLogF("\nRegExp Tracing\n");
862 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
863 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
864 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
866 unsigned reCount
= 0;
868 for (; iter
!= m_rtTraceList
->end(); ++iter
, ++reCount
) {
869 (*iter
)->printTraceData();
873 dataLogF("%d Regular Expressions\n", reCount
);
876 m_rtTraceList
->clear();
879 void VM::dumpRegExpTrace()
884 void VM::registerWatchpointForImpureProperty(const Identifier
& propertyName
, Watchpoint
* watchpoint
)
886 auto result
= m_impurePropertyWatchpointSets
.add(propertyName
.string(), nullptr);
887 if (result
.isNewEntry
)
888 result
.iterator
->value
= adoptRef(new WatchpointSet(IsWatched
));
889 result
.iterator
->value
->add(watchpoint
);
892 void VM::addImpureProperty(const String
& propertyName
)
894 if (RefPtr
<WatchpointSet
> watchpointSet
= m_impurePropertyWatchpointSets
.take(propertyName
))
895 watchpointSet
->fireAll();
898 class SetEnabledProfilerFunctor
{
900 bool operator()(CodeBlock
* codeBlock
)
902 if (JITCode::isOptimizingJIT(codeBlock
->jitType()))
903 codeBlock
->jettison(Profiler::JettisonDueToLegacyProfiler
);
908 void VM::setEnabledProfiler(LegacyProfiler
* profiler
)
910 m_enabledProfiler
= profiler
;
911 if (m_enabledProfiler
) {
912 waitForCompilationsToComplete();
913 SetEnabledProfilerFunctor functor
;
914 heap
.forEachCodeBlock(functor
);
918 void sanitizeStackForVM(VM
* vm
)
920 logSanitizeStack(vm
);
922 vm
->interpreter
->stack().sanitizeStack();
924 sanitizeStackForVMImpl(vm
);