]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2008, 2009, 2013, 2014 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * | |
8 | * 1. Redistributions of source code must retain the above copyright | |
9 | * notice, this list of conditions and the following disclaimer. | |
10 | * 2. Redistributions in binary form must reproduce the above copyright | |
11 | * notice, this list of conditions and the following disclaimer in the | |
12 | * documentation and/or other materials provided with the distribution. | |
13 | * 3. Neither the name of Apple Inc. ("Apple") nor the names of | |
14 | * its contributors may be used to endorse or promote products derived | |
15 | * from this software without specific prior written permission. | |
16 | * | |
17 | * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY | |
18 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
19 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
20 | * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY | |
21 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
22 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
23 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | |
24 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | */ | |
28 | ||
29 | #ifndef VM_h | |
30 | #define VM_h | |
31 | ||
32 | #include "DateInstanceCache.h" | |
33 | #include "ExecutableAllocator.h" | |
34 | #include "Heap.h" | |
35 | #include "Intrinsic.h" | |
36 | #include "JITThunks.h" | |
37 | #include "JSCJSValue.h" | |
38 | #include "JSLock.h" | |
39 | #include "LLIntData.h" | |
40 | #include "MacroAssemblerCodeRef.h" | |
41 | #include "NumericStrings.h" | |
42 | #include "PrivateName.h" | |
43 | #include "PrototypeMap.h" | |
44 | #include "SmallStrings.h" | |
45 | #include "SourceCode.h" | |
46 | #include "Strong.h" | |
47 | #include "ThunkGenerators.h" | |
48 | #include "TypedArrayController.h" | |
49 | #include "Watchdog.h" | |
50 | #include "Watchpoint.h" | |
51 | #include "WeakRandom.h" | |
52 | #include <wtf/BumpPointerAllocator.h> | |
53 | #include <wtf/DateMath.h> | |
54 | #include <wtf/Forward.h> | |
55 | #include <wtf/HashMap.h> | |
56 | #include <wtf/HashSet.h> | |
57 | #include <wtf/RefCountedArray.h> | |
58 | #include <wtf/SimpleStats.h> | |
59 | #include <wtf/StackBounds.h> | |
60 | #include <wtf/ThreadSafeRefCounted.h> | |
61 | #include <wtf/ThreadSpecific.h> | |
62 | #include <wtf/WTFThreadData.h> | |
63 | #include <wtf/text/WTFString.h> | |
64 | #if ENABLE(REGEXP_TRACING) | |
65 | #include <wtf/ListHashSet.h> | |
66 | #endif | |
67 | ||
68 | namespace JSC { | |
69 | ||
70 | class ArityCheckFailReturnThunks; | |
71 | class BuiltinExecutables; | |
72 | class CodeBlock; | |
73 | class CodeCache; | |
74 | class CommonIdentifiers; | |
75 | class ExecState; | |
76 | class HandleStack; | |
77 | class Identifier; | |
78 | class Interpreter; | |
79 | class JSGlobalObject; | |
80 | class JSObject; | |
81 | class Keywords; | |
82 | class LLIntOffsetsExtractor; | |
83 | class LegacyProfiler; | |
84 | class NativeExecutable; | |
85 | class ParserArena; | |
86 | class RegExpCache; | |
87 | class SourceProvider; | |
88 | class SourceProviderCache; | |
89 | struct StackFrame; | |
90 | class Stringifier; | |
91 | class Structure; | |
92 | #if ENABLE(REGEXP_TRACING) | |
93 | class RegExp; | |
94 | #endif | |
95 | class UnlinkedCodeBlock; | |
96 | class UnlinkedEvalCodeBlock; | |
97 | class UnlinkedFunctionExecutable; | |
98 | class UnlinkedProgramCodeBlock; | |
99 | class VMEntryScope; | |
100 | class Watchpoint; | |
101 | class WatchpointSet; | |
102 | ||
103 | #if ENABLE(DFG_JIT) | |
104 | namespace DFG { | |
105 | class LongLivedState; | |
106 | } | |
107 | #endif // ENABLE(DFG_JIT) | |
108 | #if ENABLE(FTL_JIT) | |
109 | namespace FTL { | |
110 | class Thunks; | |
111 | } | |
112 | #endif // ENABLE(FTL_JIT) | |
113 | namespace CommonSlowPaths { | |
114 | struct ArityCheckData; | |
115 | } | |
116 | namespace Profiler { | |
117 | class Database; | |
118 | } | |
119 | ||
120 | struct HashTable; | |
121 | struct Instruction; | |
122 | ||
123 | struct LocalTimeOffsetCache { | |
124 | LocalTimeOffsetCache() | |
125 | : start(0.0) | |
126 | , end(-1.0) | |
127 | , increment(0.0) | |
128 | { | |
129 | } | |
130 | ||
131 | void reset() | |
132 | { | |
133 | offset = LocalTimeOffset(); | |
134 | start = 0.0; | |
135 | end = -1.0; | |
136 | increment = 0.0; | |
137 | } | |
138 | ||
139 | LocalTimeOffset offset; | |
140 | double start; | |
141 | double end; | |
142 | double increment; | |
143 | }; | |
144 | ||
145 | class ConservativeRoots; | |
146 | ||
147 | #if COMPILER(MSVC) | |
148 | #pragma warning(push) | |
149 | #pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning | |
150 | #endif | |
151 | struct ScratchBuffer { | |
152 | ScratchBuffer() | |
153 | { | |
154 | u.m_activeLength = 0; | |
155 | } | |
156 | ||
157 | static ScratchBuffer* create(size_t size) | |
158 | { | |
159 | ScratchBuffer* result = new (fastMalloc(ScratchBuffer::allocationSize(size))) ScratchBuffer; | |
160 | ||
161 | return result; | |
162 | } | |
163 | ||
164 | static size_t allocationSize(size_t bufferSize) { return sizeof(ScratchBuffer) + bufferSize; } | |
165 | void setActiveLength(size_t activeLength) { u.m_activeLength = activeLength; } | |
166 | size_t activeLength() const { return u.m_activeLength; }; | |
167 | size_t* activeLengthPtr() { return &u.m_activeLength; }; | |
168 | void* dataBuffer() { return m_buffer; } | |
169 | ||
170 | union { | |
171 | size_t m_activeLength; | |
172 | double pad; // Make sure m_buffer is double aligned. | |
173 | } u; | |
174 | #if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2) | |
175 | void* m_buffer[0] __attribute__((aligned(8))); | |
176 | #else | |
177 | void* m_buffer[0]; | |
178 | #endif | |
179 | }; | |
180 | #if COMPILER(MSVC) | |
181 | #pragma warning(pop) | |
182 | #endif | |
183 | ||
184 | class VM : public ThreadSafeRefCounted<VM> { | |
185 | public: | |
186 | // WebCore has a one-to-one mapping of threads to VMs; | |
187 | // either create() or createLeaked() should only be called once | |
188 | // on a thread, this is the 'default' VM (it uses the | |
189 | // thread's default string uniquing table from wtfThreadData). | |
190 | // API contexts created using the new context group aware interface | |
191 | // create APIContextGroup objects which require less locking of JSC | |
192 | // than the old singleton APIShared VM created for use by | |
193 | // the original API. | |
194 | enum VMType { Default, APIContextGroup, APIShared }; | |
195 | ||
196 | struct ClientData { | |
197 | JS_EXPORT_PRIVATE virtual ~ClientData() = 0; | |
198 | }; | |
199 | ||
200 | bool isSharedInstance() { return vmType == APIShared; } | |
201 | bool usingAPI() { return vmType != Default; } | |
202 | JS_EXPORT_PRIVATE static bool sharedInstanceExists(); | |
203 | JS_EXPORT_PRIVATE static VM& sharedInstance(); | |
204 | ||
205 | JS_EXPORT_PRIVATE static PassRefPtr<VM> create(HeapType = SmallHeap); | |
206 | JS_EXPORT_PRIVATE static PassRefPtr<VM> createLeaked(HeapType = SmallHeap); | |
207 | static PassRefPtr<VM> createContextGroup(HeapType = SmallHeap); | |
208 | JS_EXPORT_PRIVATE ~VM(); | |
209 | ||
210 | void makeUsableFromMultipleThreads() { heap.machineThreads().makeUsableFromMultipleThreads(); } | |
211 | ||
212 | private: | |
213 | RefPtr<JSLock> m_apiLock; | |
214 | ||
215 | public: | |
216 | #if ENABLE(ASSEMBLER) | |
217 | // executableAllocator should be destructed after the heap, as the heap can call executableAllocator | |
218 | // in its destructor. | |
219 | ExecutableAllocator executableAllocator; | |
220 | #endif | |
221 | ||
222 | // The heap should be just after executableAllocator and before other members to ensure that it's | |
223 | // destructed after all the objects that reference it. | |
224 | Heap heap; | |
225 | ||
226 | #if ENABLE(DFG_JIT) | |
227 | OwnPtr<DFG::LongLivedState> dfgState; | |
228 | #endif // ENABLE(DFG_JIT) | |
229 | ||
230 | VMType vmType; | |
231 | ClientData* clientData; | |
232 | ExecState* topCallFrame; | |
233 | std::unique_ptr<Watchdog> watchdog; | |
234 | ||
235 | const OwnPtr<const HashTable> arrayConstructorTable; | |
236 | const OwnPtr<const HashTable> arrayPrototypeTable; | |
237 | const OwnPtr<const HashTable> booleanPrototypeTable; | |
238 | const OwnPtr<const HashTable> dataViewTable; | |
239 | const OwnPtr<const HashTable> dateTable; | |
240 | const OwnPtr<const HashTable> dateConstructorTable; | |
241 | const OwnPtr<const HashTable> errorPrototypeTable; | |
242 | const OwnPtr<const HashTable> globalObjectTable; | |
243 | const OwnPtr<const HashTable> jsonTable; | |
244 | const OwnPtr<const HashTable> numberConstructorTable; | |
245 | const OwnPtr<const HashTable> numberPrototypeTable; | |
246 | const OwnPtr<const HashTable> objectConstructorTable; | |
247 | const OwnPtr<const HashTable> privateNamePrototypeTable; | |
248 | const OwnPtr<const HashTable> regExpTable; | |
249 | const OwnPtr<const HashTable> regExpConstructorTable; | |
250 | const OwnPtr<const HashTable> regExpPrototypeTable; | |
251 | const OwnPtr<const HashTable> stringConstructorTable; | |
252 | #if ENABLE(PROMISES) | |
253 | const OwnPtr<const HashTable> promisePrototypeTable; | |
254 | const OwnPtr<const HashTable> promiseConstructorTable; | |
255 | #endif | |
256 | ||
257 | Strong<Structure> structureStructure; | |
258 | Strong<Structure> structureRareDataStructure; | |
259 | Strong<Structure> debuggerActivationStructure; | |
260 | Strong<Structure> terminatedExecutionErrorStructure; | |
261 | Strong<Structure> stringStructure; | |
262 | Strong<Structure> notAnObjectStructure; | |
263 | Strong<Structure> propertyNameIteratorStructure; | |
264 | Strong<Structure> getterSetterStructure; | |
265 | Strong<Structure> customGetterSetterStructure; | |
266 | Strong<Structure> apiWrapperStructure; | |
267 | Strong<Structure> JSScopeStructure; | |
268 | Strong<Structure> executableStructure; | |
269 | Strong<Structure> nativeExecutableStructure; | |
270 | Strong<Structure> evalExecutableStructure; | |
271 | Strong<Structure> programExecutableStructure; | |
272 | Strong<Structure> functionExecutableStructure; | |
273 | Strong<Structure> regExpStructure; | |
274 | Strong<Structure> symbolTableStructure; | |
275 | Strong<Structure> structureChainStructure; | |
276 | Strong<Structure> sparseArrayValueMapStructure; | |
277 | Strong<Structure> arrayBufferNeuteringWatchpointStructure; | |
278 | Strong<Structure> withScopeStructure; | |
279 | Strong<Structure> unlinkedFunctionExecutableStructure; | |
280 | Strong<Structure> unlinkedProgramCodeBlockStructure; | |
281 | Strong<Structure> unlinkedEvalCodeBlockStructure; | |
282 | Strong<Structure> unlinkedFunctionCodeBlockStructure; | |
283 | Strong<Structure> propertyTableStructure; | |
284 | Strong<Structure> mapDataStructure; | |
285 | Strong<Structure> weakMapDataStructure; | |
286 | #if ENABLE(PROMISES) | |
287 | Strong<Structure> promiseDeferredStructure; | |
288 | Strong<Structure> promiseReactionStructure; | |
289 | #endif | |
290 | Strong<JSCell> iterationTerminator; | |
291 | ||
292 | AtomicStringTable* m_atomicStringTable; | |
293 | CommonIdentifiers* propertyNames; | |
294 | const MarkedArgumentBuffer* emptyList; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark. | |
295 | SmallStrings smallStrings; | |
296 | NumericStrings numericStrings; | |
297 | DateInstanceCache dateInstanceCache; | |
298 | WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT; | |
299 | WeakGCMap<StringImpl*, JSString, PtrHash<StringImpl*>> stringCache; | |
300 | Strong<JSString> lastCachedString; | |
301 | ||
302 | AtomicStringTable* atomicStringTable() const { return m_atomicStringTable; } | |
303 | ||
304 | void setInDefineOwnProperty(bool inDefineOwnProperty) | |
305 | { | |
306 | m_inDefineOwnProperty = inDefineOwnProperty; | |
307 | } | |
308 | ||
309 | bool isInDefineOwnProperty() | |
310 | { | |
311 | return m_inDefineOwnProperty; | |
312 | } | |
313 | ||
314 | LegacyProfiler* enabledProfiler() { return m_enabledProfiler; } | |
315 | void setEnabledProfiler(LegacyProfiler*); | |
316 | ||
317 | void* enabledProfilerAddress() { return &m_enabledProfiler; } | |
318 | ||
319 | #if ENABLE(JIT) | |
320 | bool canUseJIT() { return m_canUseJIT; } | |
321 | #else | |
322 | bool canUseJIT() { return false; } // interpreter only | |
323 | #endif | |
324 | ||
325 | #if ENABLE(YARR_JIT) | |
326 | bool canUseRegExpJIT() { return m_canUseRegExpJIT; } | |
327 | #else | |
328 | bool canUseRegExpJIT() { return false; } // interpreter only | |
329 | #endif | |
330 | ||
331 | SourceProviderCache* addSourceProviderCache(SourceProvider*); | |
332 | void clearSourceProviderCaches(); | |
333 | ||
334 | PrototypeMap prototypeMap; | |
335 | ||
336 | OwnPtr<ParserArena> parserArena; | |
337 | typedef HashMap<RefPtr<SourceProvider>, RefPtr<SourceProviderCache>> SourceProviderCacheMap; | |
338 | SourceProviderCacheMap sourceProviderCacheMap; | |
339 | OwnPtr<Keywords> keywords; | |
340 | Interpreter* interpreter; | |
341 | #if ENABLE(JIT) | |
342 | OwnPtr<JITThunks> jitStubs; | |
343 | MacroAssemblerCodeRef getCTIStub(ThunkGenerator generator) | |
344 | { | |
345 | return jitStubs->ctiStub(this, generator); | |
346 | } | |
347 | NativeExecutable* getHostFunction(NativeFunction, Intrinsic); | |
348 | ||
349 | std::unique_ptr<ArityCheckFailReturnThunks> arityCheckFailReturnThunks; | |
350 | #endif // ENABLE(JIT) | |
351 | std::unique_ptr<CommonSlowPaths::ArityCheckData> arityCheckData; | |
352 | #if ENABLE(FTL_JIT) | |
353 | std::unique_ptr<FTL::Thunks> ftlThunks; | |
354 | #endif | |
355 | NativeExecutable* getHostFunction(NativeFunction, NativeFunction constructor); | |
356 | ||
357 | static ptrdiff_t exceptionOffset() | |
358 | { | |
359 | return OBJECT_OFFSETOF(VM, m_exception); | |
360 | } | |
361 | ||
362 | static ptrdiff_t callFrameForThrowOffset() | |
363 | { | |
364 | return OBJECT_OFFSETOF(VM, callFrameForThrow); | |
365 | } | |
366 | ||
367 | static ptrdiff_t targetMachinePCForThrowOffset() | |
368 | { | |
369 | return OBJECT_OFFSETOF(VM, targetMachinePCForThrow); | |
370 | } | |
371 | ||
372 | JS_EXPORT_PRIVATE void clearException(); | |
373 | JS_EXPORT_PRIVATE void clearExceptionStack(); | |
374 | void getExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack); | |
375 | void setExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack); | |
376 | JSValue exception() const { return m_exception; } | |
377 | JSValue* addressOfException() { return &m_exception; } | |
378 | const RefCountedArray<StackFrame>& exceptionStack() const { return m_exceptionStack; } | |
379 | ||
380 | JS_EXPORT_PRIVATE JSValue throwException(ExecState*, JSValue); | |
381 | JS_EXPORT_PRIVATE JSObject* throwException(ExecState*, JSObject*); | |
382 | ||
383 | void* stackPointerAtVMEntry() const { return m_stackPointerAtVMEntry; } | |
384 | void setStackPointerAtVMEntry(void*); | |
385 | ||
386 | size_t reservedZoneSize() const { return m_reservedZoneSize; } | |
387 | size_t updateReservedZoneSize(size_t reservedZoneSize); | |
388 | ||
389 | #if ENABLE(FTL_JIT) | |
390 | void updateFTLLargestStackSize(size_t); | |
391 | void** addressOfFTLStackLimit() { return &m_ftlStackLimit; } | |
392 | #endif | |
393 | ||
394 | #if !ENABLE(JIT) | |
395 | void* jsStackLimit() { return m_jsStackLimit; } | |
396 | void setJSStackLimit(void* limit) { m_jsStackLimit = limit; } | |
397 | #endif | |
398 | void* stackLimit() { return m_stackLimit; } | |
399 | void** addressOfStackLimit() { return &m_stackLimit; } | |
400 | ||
401 | bool isSafeToRecurse(size_t neededStackInBytes = 0) const | |
402 | { | |
403 | ASSERT(wtfThreadData().stack().isGrowingDownward()); | |
404 | int8_t* curr = reinterpret_cast<int8_t*>(&curr); | |
405 | int8_t* limit = reinterpret_cast<int8_t*>(m_stackLimit); | |
406 | return curr >= limit && static_cast<size_t>(curr - limit) >= neededStackInBytes; | |
407 | } | |
408 | ||
409 | void* lastStackTop() { return m_lastStackTop; } | |
410 | void setLastStackTop(void* lastStackTop) { m_lastStackTop = lastStackTop; } | |
411 | ||
412 | const ClassInfo* const jsArrayClassInfo; | |
413 | const ClassInfo* const jsFinalObjectClassInfo; | |
414 | ||
415 | JSValue hostCallReturnValue; | |
416 | ExecState* newCallFrameReturnValue; | |
417 | unsigned varargsLength; | |
418 | ExecState* callFrameForThrow; | |
419 | void* targetMachinePCForThrow; | |
420 | Instruction* targetInterpreterPCForThrow; | |
421 | uint32_t osrExitIndex; | |
422 | void* osrExitJumpDestination; | |
423 | Vector<ScratchBuffer*> scratchBuffers; | |
424 | size_t sizeOfLastScratchBuffer; | |
425 | ||
426 | ScratchBuffer* scratchBufferForSize(size_t size) | |
427 | { | |
428 | if (!size) | |
429 | return 0; | |
430 | ||
431 | if (size > sizeOfLastScratchBuffer) { | |
432 | // Protect against a N^2 memory usage pathology by ensuring | |
433 | // that at worst, we get a geometric series, meaning that the | |
434 | // total memory usage is somewhere around | |
435 | // max(scratch buffer size) * 4. | |
436 | sizeOfLastScratchBuffer = size * 2; | |
437 | ||
438 | ScratchBuffer* newBuffer = ScratchBuffer::create(sizeOfLastScratchBuffer); | |
439 | RELEASE_ASSERT(newBuffer); | |
440 | scratchBuffers.append(newBuffer); | |
441 | } | |
442 | ||
443 | ScratchBuffer* result = scratchBuffers.last(); | |
444 | result->setActiveLength(0); | |
445 | return result; | |
446 | } | |
447 | ||
448 | void gatherConservativeRoots(ConservativeRoots&); | |
449 | ||
450 | VMEntryScope* entryScope; | |
451 | ||
452 | HashSet<JSObject*> stringRecursionCheckVisitedObjects; | |
453 | ||
454 | LocalTimeOffsetCache localTimeOffsetCache; | |
455 | ||
456 | String cachedDateString; | |
457 | double cachedDateStringValue; | |
458 | ||
459 | OwnPtr<Profiler::Database> m_perBytecodeProfiler; | |
460 | RefPtr<TypedArrayController> m_typedArrayController; | |
461 | RegExpCache* m_regExpCache; | |
462 | BumpPointerAllocator m_regExpAllocator; | |
463 | ||
464 | #if ENABLE(REGEXP_TRACING) | |
465 | typedef ListHashSet<RegExp*> RTTraceList; | |
466 | RTTraceList* m_rtTraceList; | |
467 | #endif | |
468 | ||
469 | bool hasExclusiveThread() const { return m_apiLock->hasExclusiveThread(); } | |
470 | std::thread::id exclusiveThread() const { return m_apiLock->exclusiveThread(); } | |
471 | void setExclusiveThread(std::thread::id threadId) { m_apiLock->setExclusiveThread(threadId); } | |
472 | ||
473 | JS_EXPORT_PRIVATE void resetDateCache(); | |
474 | ||
475 | JS_EXPORT_PRIVATE void startSampling(); | |
476 | JS_EXPORT_PRIVATE void stopSampling(); | |
477 | JS_EXPORT_PRIVATE void dumpSampleData(ExecState* exec); | |
478 | RegExpCache* regExpCache() { return m_regExpCache; } | |
479 | #if ENABLE(REGEXP_TRACING) | |
480 | void addRegExpToTrace(RegExp*); | |
481 | #endif | |
482 | JS_EXPORT_PRIVATE void dumpRegExpTrace(); | |
483 | ||
484 | bool isCollectorBusy() { return heap.isBusy(); } | |
485 | JS_EXPORT_PRIVATE void releaseExecutableMemory(); | |
486 | ||
487 | #if ENABLE(GC_VALIDATION) | |
488 | bool isInitializingObject() const; | |
489 | void setInitializingObjectClass(const ClassInfo*); | |
490 | #endif | |
491 | ||
492 | unsigned m_newStringsSinceLastHashCons; | |
493 | ||
494 | static const unsigned s_minNumberOfNewStringsToHashCons = 100; | |
495 | ||
496 | bool haveEnoughNewStringsToHashCons() { return m_newStringsSinceLastHashCons > s_minNumberOfNewStringsToHashCons; } | |
497 | void resetNewStringsSinceLastHashCons() { m_newStringsSinceLastHashCons = 0; } | |
498 | ||
499 | bool currentThreadIsHoldingAPILock() const { return m_apiLock->currentThreadIsHoldingLock(); } | |
500 | ||
501 | JSLock& apiLock() { return *m_apiLock; } | |
502 | CodeCache* codeCache() { return m_codeCache.get(); } | |
503 | ||
504 | void waitForCompilationsToComplete(); | |
505 | ||
506 | JS_EXPORT_PRIVATE void discardAllCode(); | |
507 | ||
508 | void registerWatchpointForImpureProperty(const Identifier&, Watchpoint*); | |
509 | // FIXME: Use AtomicString once it got merged with Identifier. | |
510 | JS_EXPORT_PRIVATE void addImpureProperty(const String&); | |
511 | ||
512 | BuiltinExecutables* builtinExecutables() { return m_builtinExecutables.get(); } | |
513 | ||
514 | private: | |
515 | friend class LLIntOffsetsExtractor; | |
516 | friend class ClearExceptionScope; | |
517 | friend class RecursiveAllocationScope; | |
518 | ||
519 | VM(VMType, HeapType); | |
520 | static VM*& sharedInstanceInternal(); | |
521 | void createNativeThunk(); | |
522 | ||
523 | void updateStackLimit(); | |
524 | ||
525 | #if ENABLE(ASSEMBLER) | |
526 | bool m_canUseAssembler; | |
527 | #endif | |
528 | #if ENABLE(JIT) | |
529 | bool m_canUseJIT; | |
530 | #endif | |
531 | #if ENABLE(YARR_JIT) | |
532 | bool m_canUseRegExpJIT; | |
533 | #endif | |
534 | #if ENABLE(GC_VALIDATION) | |
535 | const ClassInfo* m_initializingObjectClass; | |
536 | #endif | |
537 | void* m_stackPointerAtVMEntry; | |
538 | size_t m_reservedZoneSize; | |
539 | #if !ENABLE(JIT) | |
540 | struct { | |
541 | void* m_stackLimit; | |
542 | void* m_jsStackLimit; | |
543 | }; | |
544 | #else | |
545 | union { | |
546 | void* m_stackLimit; | |
547 | void* m_jsStackLimit; | |
548 | }; | |
549 | #if ENABLE(FTL_JIT) | |
550 | void* m_ftlStackLimit; | |
551 | size_t m_largestFTLStackSize; | |
552 | #endif | |
553 | #endif | |
554 | void* m_lastStackTop; | |
555 | JSValue m_exception; | |
556 | bool m_inDefineOwnProperty; | |
557 | OwnPtr<CodeCache> m_codeCache; | |
558 | LegacyProfiler* m_enabledProfiler; | |
559 | OwnPtr<BuiltinExecutables> m_builtinExecutables; | |
560 | RefCountedArray<StackFrame> m_exceptionStack; | |
561 | HashMap<String, RefPtr<WatchpointSet>> m_impurePropertyWatchpointSets; | |
562 | }; | |
563 | ||
564 | #if ENABLE(GC_VALIDATION) | |
565 | inline bool VM::isInitializingObject() const | |
566 | { | |
567 | return !!m_initializingObjectClass; | |
568 | } | |
569 | ||
570 | inline void VM::setInitializingObjectClass(const ClassInfo* initializingObjectClass) | |
571 | { | |
572 | m_initializingObjectClass = initializingObjectClass; | |
573 | } | |
574 | #endif | |
575 | ||
576 | inline Heap* WeakSet::heap() const | |
577 | { | |
578 | return &m_vm->heap; | |
579 | } | |
580 | ||
581 | #if ENABLE(JIT) | |
582 | extern "C" void sanitizeStackForVMImpl(VM*); | |
583 | #endif | |
584 | ||
585 | void sanitizeStackForVM(VM*); | |
586 | void logSanitizeStack(VM*); | |
587 | ||
588 | } // namespace JSC | |
589 | ||
590 | #endif // VM_h |