]> git.saurik.com Git - apple/javascriptcore.git/blob - runtime/VM.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / runtime / VM.h
1 /*
2 * Copyright (C) 2008, 2009, 2013-2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #ifndef VM_h
30 #define VM_h
31
32 #include "ControlFlowProfiler.h"
33 #include "DateInstanceCache.h"
34 #include "ExecutableAllocator.h"
35 #include "FunctionHasExecutedCache.h"
36 #include "Heap.h"
37 #include "Intrinsic.h"
38 #include "JITThunks.h"
39 #include "JSCJSValue.h"
40 #include "JSLock.h"
41 #include "LLIntData.h"
42 #include "MacroAssemblerCodeRef.h"
43 #include "NumericStrings.h"
44 #include "PrivateName.h"
45 #include "PrototypeMap.h"
46 #include "SmallStrings.h"
47 #include "SourceCode.h"
48 #include "Strong.h"
49 #include "ThunkGenerators.h"
50 #include "TypedArrayController.h"
51 #include "VMEntryRecord.h"
52 #include "Watchdog.h"
53 #include "Watchpoint.h"
54 #include "WeakRandom.h"
55 #include <wtf/Bag.h>
56 #include <wtf/BumpPointerAllocator.h>
57 #include <wtf/DateMath.h>
58 #include <wtf/Forward.h>
59 #include <wtf/HashMap.h>
60 #include <wtf/HashSet.h>
61 #include <wtf/SimpleStats.h>
62 #include <wtf/StackBounds.h>
63 #include <wtf/ThreadSafeRefCounted.h>
64 #include <wtf/ThreadSpecific.h>
65 #include <wtf/WTFThreadData.h>
66 #include <wtf/text/SymbolRegistry.h>
67 #include <wtf/text/WTFString.h>
68 #if ENABLE(REGEXP_TRACING)
69 #include <wtf/ListHashSet.h>
70 #endif
71
72 namespace JSC {
73
74 class ArityCheckFailReturnThunks;
75 class BuiltinExecutables;
76 class CodeBlock;
77 class CodeCache;
78 class CommonIdentifiers;
79 class ExecState;
80 class Exception;
81 class HandleStack;
82 class TypeProfiler;
83 class TypeProfilerLog;
84 class Identifier;
85 class Interpreter;
86 class JSGlobalObject;
87 class JSObject;
88 class Keywords;
89 class LLIntOffsetsExtractor;
90 class LegacyProfiler;
91 class NativeExecutable;
92 class RegExpCache;
93 class ScriptExecutable;
94 class SourceProvider;
95 class SourceProviderCache;
96 struct StackFrame;
97 class Stringifier;
98 class Structure;
99 #if ENABLE(REGEXP_TRACING)
100 class RegExp;
101 #endif
102 class UnlinkedCodeBlock;
103 class UnlinkedEvalCodeBlock;
104 class UnlinkedFunctionExecutable;
105 class UnlinkedProgramCodeBlock;
106 class VirtualRegister;
107 class VMEntryScope;
108 class Watchpoint;
109 class WatchpointSet;
110
111 #if ENABLE(DFG_JIT)
112 namespace DFG {
113 class LongLivedState;
114 }
115 #endif // ENABLE(DFG_JIT)
116 #if ENABLE(FTL_JIT)
117 namespace FTL {
118 class Thunks;
119 }
120 #endif // ENABLE(FTL_JIT)
121 namespace CommonSlowPaths {
122 struct ArityCheckData;
123 }
124 namespace Profiler {
125 class Database;
126 }
127
128 struct HashTable;
129 struct Instruction;
130
131 struct LocalTimeOffsetCache {
132 LocalTimeOffsetCache()
133 : start(0.0)
134 , end(-1.0)
135 , increment(0.0)
136 , timeType(WTF::UTCTime)
137 {
138 }
139
140 void reset()
141 {
142 offset = LocalTimeOffset();
143 start = 0.0;
144 end = -1.0;
145 increment = 0.0;
146 timeType = WTF::UTCTime;
147 }
148
149 LocalTimeOffset offset;
150 double start;
151 double end;
152 double increment;
153 WTF::TimeType timeType;
154 };
155
156 class ConservativeRoots;
157
158 #if COMPILER(MSVC)
159 #pragma warning(push)
160 #pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning
161 #endif
162 struct ScratchBuffer {
163 ScratchBuffer()
164 {
165 u.m_activeLength = 0;
166 }
167
168 static ScratchBuffer* create(size_t size)
169 {
170 ScratchBuffer* result = new (fastMalloc(ScratchBuffer::allocationSize(size))) ScratchBuffer;
171
172 return result;
173 }
174
175 static size_t allocationSize(size_t bufferSize) { return sizeof(ScratchBuffer) + bufferSize; }
176 void setActiveLength(size_t activeLength) { u.m_activeLength = activeLength; }
177 size_t activeLength() const { return u.m_activeLength; };
178 size_t* activeLengthPtr() { return &u.m_activeLength; };
179 void* dataBuffer() { return m_buffer; }
180
181 union {
182 size_t m_activeLength;
183 double pad; // Make sure m_buffer is double aligned.
184 } u;
185 #if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2)
186 void* m_buffer[0] __attribute__((aligned(8)));
187 #else
188 void* m_buffer[0];
189 #endif
190 };
191 #if COMPILER(MSVC)
192 #pragma warning(pop)
193 #endif
194
195 class VM : public ThreadSafeRefCounted<VM> {
196 public:
197 // WebCore has a one-to-one mapping of threads to VMs;
198 // either create() or createLeaked() should only be called once
199 // on a thread, this is the 'default' VM (it uses the
200 // thread's default string uniquing table from wtfThreadData).
201 // API contexts created using the new context group aware interface
202 // create APIContextGroup objects which require less locking of JSC
203 // than the old singleton APIShared VM created for use by
204 // the original API.
205 enum VMType { Default, APIContextGroup, APIShared };
206
207 struct ClientData {
208 JS_EXPORT_PRIVATE virtual ~ClientData() = 0;
209 };
210
211 bool isSharedInstance() { return vmType == APIShared; }
212 bool usingAPI() { return vmType != Default; }
213 JS_EXPORT_PRIVATE static bool sharedInstanceExists();
214 JS_EXPORT_PRIVATE static VM& sharedInstance();
215
216 JS_EXPORT_PRIVATE static Ref<VM> create(HeapType = SmallHeap);
217 JS_EXPORT_PRIVATE static Ref<VM> createLeaked(HeapType = SmallHeap);
218 static Ref<VM> createContextGroup(HeapType = SmallHeap);
219 JS_EXPORT_PRIVATE ~VM();
220
221 private:
222 RefPtr<JSLock> m_apiLock;
223
224 public:
225 #if ENABLE(ASSEMBLER)
226 // executableAllocator should be destructed after the heap, as the heap can call executableAllocator
227 // in its destructor.
228 ExecutableAllocator executableAllocator;
229 #endif
230
231 // The heap should be just after executableAllocator and before other members to ensure that it's
232 // destructed after all the objects that reference it.
233 Heap heap;
234
235 #if ENABLE(DFG_JIT)
236 std::unique_ptr<DFG::LongLivedState> dfgState;
237 #endif // ENABLE(DFG_JIT)
238
239 VMType vmType;
240 ClientData* clientData;
241 VMEntryFrame* topVMEntryFrame;
242 ExecState* topCallFrame;
243 std::unique_ptr<Watchdog> watchdog;
244
245 Strong<Structure> structureStructure;
246 Strong<Structure> structureRareDataStructure;
247 Strong<Structure> terminatedExecutionErrorStructure;
248 Strong<Structure> stringStructure;
249 Strong<Structure> notAnObjectStructure;
250 Strong<Structure> propertyNameIteratorStructure;
251 Strong<Structure> propertyNameEnumeratorStructure;
252 Strong<Structure> getterSetterStructure;
253 Strong<Structure> customGetterSetterStructure;
254 Strong<Structure> scopedArgumentsTableStructure;
255 Strong<Structure> apiWrapperStructure;
256 Strong<Structure> JSScopeStructure;
257 Strong<Structure> executableStructure;
258 Strong<Structure> nativeExecutableStructure;
259 Strong<Structure> evalExecutableStructure;
260 Strong<Structure> programExecutableStructure;
261 Strong<Structure> functionExecutableStructure;
262 Strong<Structure> regExpStructure;
263 Strong<Structure> symbolStructure;
264 Strong<Structure> symbolTableStructure;
265 Strong<Structure> structureChainStructure;
266 Strong<Structure> sparseArrayValueMapStructure;
267 Strong<Structure> templateRegistryKeyStructure;
268 Strong<Structure> arrayBufferNeuteringWatchpointStructure;
269 Strong<Structure> unlinkedFunctionExecutableStructure;
270 Strong<Structure> unlinkedProgramCodeBlockStructure;
271 Strong<Structure> unlinkedEvalCodeBlockStructure;
272 Strong<Structure> unlinkedFunctionCodeBlockStructure;
273 Strong<Structure> propertyTableStructure;
274 Strong<Structure> weakMapDataStructure;
275 Strong<Structure> inferredValueStructure;
276 Strong<Structure> functionRareDataStructure;
277 Strong<Structure> exceptionStructure;
278 #if ENABLE(PROMISES)
279 Strong<Structure> promiseDeferredStructure;
280 #endif
281 Strong<JSCell> iterationTerminator;
282 Strong<JSCell> emptyPropertyNameEnumerator;
283
284 AtomicStringTable* m_atomicStringTable;
285 WTF::SymbolRegistry m_symbolRegistry;
286 CommonIdentifiers* propertyNames;
287 const MarkedArgumentBuffer* emptyList; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark.
288 SmallStrings smallStrings;
289 NumericStrings numericStrings;
290 DateInstanceCache dateInstanceCache;
291 WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT;
292 WeakGCMap<StringImpl*, JSString, PtrHash<StringImpl*>> stringCache;
293 Strong<JSString> lastCachedString;
294
295 AtomicStringTable* atomicStringTable() const { return m_atomicStringTable; }
296 WTF::SymbolRegistry& symbolRegistry() { return m_symbolRegistry; }
297
298 void setInDefineOwnProperty(bool inDefineOwnProperty)
299 {
300 m_inDefineOwnProperty = inDefineOwnProperty;
301 }
302
303 bool isInDefineOwnProperty()
304 {
305 return m_inDefineOwnProperty;
306 }
307
308 LegacyProfiler* enabledProfiler() { return m_enabledProfiler; }
309 void setEnabledProfiler(LegacyProfiler*);
310
311 void* enabledProfilerAddress() { return &m_enabledProfiler; }
312
313 #if ENABLE(JIT)
314 bool canUseJIT() { return m_canUseJIT; }
315 #else
316 bool canUseJIT() { return false; } // interpreter only
317 #endif
318
319 #if ENABLE(YARR_JIT)
320 bool canUseRegExpJIT() { return m_canUseRegExpJIT; }
321 #else
322 bool canUseRegExpJIT() { return false; } // interpreter only
323 #endif
324
325 SourceProviderCache* addSourceProviderCache(SourceProvider*);
326 void clearSourceProviderCaches();
327
328 PrototypeMap prototypeMap;
329
330 typedef HashMap<RefPtr<SourceProvider>, RefPtr<SourceProviderCache>> SourceProviderCacheMap;
331 SourceProviderCacheMap sourceProviderCacheMap;
332 std::unique_ptr<Keywords> keywords;
333 Interpreter* interpreter;
334 #if ENABLE(JIT)
335 std::unique_ptr<JITThunks> jitStubs;
336 MacroAssemblerCodeRef getCTIStub(ThunkGenerator generator)
337 {
338 return jitStubs->ctiStub(this, generator);
339 }
340 NativeExecutable* getHostFunction(NativeFunction, Intrinsic);
341
342 std::unique_ptr<ArityCheckFailReturnThunks> arityCheckFailReturnThunks;
343 #endif // ENABLE(JIT)
344 std::unique_ptr<CommonSlowPaths::ArityCheckData> arityCheckData;
345 #if ENABLE(FTL_JIT)
346 std::unique_ptr<FTL::Thunks> ftlThunks;
347 #endif
348 NativeExecutable* getHostFunction(NativeFunction, NativeFunction constructor);
349
350 static ptrdiff_t exceptionOffset()
351 {
352 return OBJECT_OFFSETOF(VM, m_exception);
353 }
354
355 static ptrdiff_t vmEntryFrameForThrowOffset()
356 {
357 return OBJECT_OFFSETOF(VM, vmEntryFrameForThrow);
358 }
359
360 static ptrdiff_t topVMEntryFrameOffset()
361 {
362 return OBJECT_OFFSETOF(VM, topVMEntryFrame);
363 }
364
365 static ptrdiff_t callFrameForThrowOffset()
366 {
367 return OBJECT_OFFSETOF(VM, callFrameForThrow);
368 }
369
370 static ptrdiff_t targetMachinePCForThrowOffset()
371 {
372 return OBJECT_OFFSETOF(VM, targetMachinePCForThrow);
373 }
374
375 void clearException() { m_exception = nullptr; }
376 void clearLastException() { m_lastException = nullptr; }
377
378 void setException(Exception* exception)
379 {
380 m_exception = exception;
381 m_lastException = exception;
382 }
383
384 Exception* exception() const { return m_exception; }
385 JSCell** addressOfException() { return reinterpret_cast<JSCell**>(&m_exception); }
386
387 Exception* lastException() const { return m_lastException; }
388 JSCell** addressOfLastException() { return reinterpret_cast<JSCell**>(&m_lastException); }
389
390 JS_EXPORT_PRIVATE void throwException(ExecState*, Exception*);
391 JS_EXPORT_PRIVATE JSValue throwException(ExecState*, JSValue);
392 JS_EXPORT_PRIVATE JSObject* throwException(ExecState*, JSObject*);
393
394 void* stackPointerAtVMEntry() const { return m_stackPointerAtVMEntry; }
395 void setStackPointerAtVMEntry(void*);
396
397 size_t reservedZoneSize() const { return m_reservedZoneSize; }
398 size_t updateReservedZoneSize(size_t reservedZoneSize);
399
400 #if ENABLE(FTL_JIT)
401 void updateFTLLargestStackSize(size_t);
402 void** addressOfFTLStackLimit() { return &m_ftlStackLimit; }
403 #endif
404
405 #if !ENABLE(JIT)
406 void* jsStackLimit() { return m_jsStackLimit; }
407 void setJSStackLimit(void* limit) { m_jsStackLimit = limit; }
408 #endif
409 void* stackLimit() { return m_stackLimit; }
410 void** addressOfStackLimit() { return &m_stackLimit; }
411
412 bool isSafeToRecurse(size_t neededStackInBytes = 0) const
413 {
414 ASSERT(wtfThreadData().stack().isGrowingDownward());
415 int8_t* curr = reinterpret_cast<int8_t*>(&curr);
416 int8_t* limit = reinterpret_cast<int8_t*>(m_stackLimit);
417 return curr >= limit && static_cast<size_t>(curr - limit) >= neededStackInBytes;
418 }
419
420 void* lastStackTop() { return m_lastStackTop; }
421 void setLastStackTop(void* lastStackTop) { m_lastStackTop = lastStackTop; }
422
423 const ClassInfo* const jsArrayClassInfo;
424 const ClassInfo* const jsFinalObjectClassInfo;
425
426 JSValue hostCallReturnValue;
427 unsigned varargsLength;
428 ExecState* newCallFrameReturnValue;
429 VMEntryFrame* vmEntryFrameForThrow;
430 ExecState* callFrameForThrow;
431 void* targetMachinePCForThrow;
432 Instruction* targetInterpreterPCForThrow;
433 uint32_t osrExitIndex;
434 void* osrExitJumpDestination;
435 Vector<ScratchBuffer*> scratchBuffers;
436 size_t sizeOfLastScratchBuffer;
437
438 ScratchBuffer* scratchBufferForSize(size_t size)
439 {
440 if (!size)
441 return 0;
442
443 if (size > sizeOfLastScratchBuffer) {
444 // Protect against a N^2 memory usage pathology by ensuring
445 // that at worst, we get a geometric series, meaning that the
446 // total memory usage is somewhere around
447 // max(scratch buffer size) * 4.
448 sizeOfLastScratchBuffer = size * 2;
449
450 ScratchBuffer* newBuffer = ScratchBuffer::create(sizeOfLastScratchBuffer);
451 RELEASE_ASSERT(newBuffer);
452 scratchBuffers.append(newBuffer);
453 }
454
455 ScratchBuffer* result = scratchBuffers.last();
456 result->setActiveLength(0);
457 return result;
458 }
459
460 void gatherConservativeRoots(ConservativeRoots&);
461
462 VMEntryScope* entryScope;
463
464 JSObject* stringRecursionCheckFirstObject { nullptr };
465 HashSet<JSObject*> stringRecursionCheckVisitedObjects;
466
467 LocalTimeOffsetCache localTimeOffsetCache;
468
469 String cachedDateString;
470 double cachedDateStringValue;
471
472 std::unique_ptr<Profiler::Database> m_perBytecodeProfiler;
473 RefPtr<TypedArrayController> m_typedArrayController;
474 RegExpCache* m_regExpCache;
475 BumpPointerAllocator m_regExpAllocator;
476
477 #if ENABLE(REGEXP_TRACING)
478 typedef ListHashSet<RegExp*> RTTraceList;
479 RTTraceList* m_rtTraceList;
480 #endif
481
482 bool hasExclusiveThread() const { return m_apiLock->hasExclusiveThread(); }
483 std::thread::id exclusiveThread() const { return m_apiLock->exclusiveThread(); }
484 void setExclusiveThread(std::thread::id threadId) { m_apiLock->setExclusiveThread(threadId); }
485
486 JS_EXPORT_PRIVATE void resetDateCache();
487
488 JS_EXPORT_PRIVATE void startSampling();
489 JS_EXPORT_PRIVATE void stopSampling();
490 JS_EXPORT_PRIVATE void dumpSampleData(ExecState*);
491 RegExpCache* regExpCache() { return m_regExpCache; }
492 #if ENABLE(REGEXP_TRACING)
493 void addRegExpToTrace(RegExp*);
494 #endif
495 JS_EXPORT_PRIVATE void dumpRegExpTrace();
496
497 bool isCollectorBusy() { return heap.isBusy(); }
498 JS_EXPORT_PRIVATE void releaseExecutableMemory();
499
500 #if ENABLE(GC_VALIDATION)
501 bool isInitializingObject() const;
502 void setInitializingObjectClass(const ClassInfo*);
503 #endif
504
505 unsigned m_newStringsSinceLastHashCons;
506
507 static const unsigned s_minNumberOfNewStringsToHashCons = 100;
508
509 bool haveEnoughNewStringsToHashCons() { return m_newStringsSinceLastHashCons > s_minNumberOfNewStringsToHashCons; }
510 void resetNewStringsSinceLastHashCons() { m_newStringsSinceLastHashCons = 0; }
511
512 bool currentThreadIsHoldingAPILock() const { return m_apiLock->currentThreadIsHoldingLock(); }
513
514 JSLock& apiLock() { return *m_apiLock; }
515 CodeCache* codeCache() { return m_codeCache.get(); }
516
517 void prepareToDiscardCode();
518
519 JS_EXPORT_PRIVATE void discardAllCode();
520
521 void registerWatchpointForImpureProperty(const Identifier&, Watchpoint*);
522 // FIXME: Use AtomicString once it got merged with Identifier.
523 JS_EXPORT_PRIVATE void addImpureProperty(const String&);
524
525 BuiltinExecutables* builtinExecutables() { return m_builtinExecutables.get(); }
526
527 bool enableTypeProfiler();
528 bool disableTypeProfiler();
529 TypeProfilerLog* typeProfilerLog() { return m_typeProfilerLog.get(); }
530 TypeProfiler* typeProfiler() { return m_typeProfiler.get(); }
531 JS_EXPORT_PRIVATE void dumpTypeProfilerData();
532
533 FunctionHasExecutedCache* functionHasExecutedCache() { return &m_functionHasExecutedCache; }
534
535 ControlFlowProfiler* controlFlowProfiler() { return m_controlFlowProfiler.get(); }
536 bool enableControlFlowProfiler();
537 bool disableControlFlowProfiler();
538
539 private:
540 friend class LLIntOffsetsExtractor;
541 friend class ClearExceptionScope;
542 friend class RecursiveAllocationScope;
543
544 VM(VMType, HeapType);
545 static VM*& sharedInstanceInternal();
546 void createNativeThunk();
547
548 void updateStackLimit();
549
550 #if ENABLE(ASSEMBLER)
551 bool m_canUseAssembler;
552 #endif
553 #if ENABLE(JIT)
554 bool m_canUseJIT;
555 #endif
556 #if ENABLE(YARR_JIT)
557 bool m_canUseRegExpJIT;
558 #endif
559 #if ENABLE(GC_VALIDATION)
560 const ClassInfo* m_initializingObjectClass;
561 #endif
562 void* m_stackPointerAtVMEntry;
563 size_t m_reservedZoneSize;
564 #if !ENABLE(JIT)
565 struct {
566 void* m_stackLimit;
567 void* m_jsStackLimit;
568 };
569 #else
570 union {
571 void* m_stackLimit;
572 void* m_jsStackLimit;
573 };
574 #if ENABLE(FTL_JIT)
575 void* m_ftlStackLimit;
576 size_t m_largestFTLStackSize;
577 #endif
578 #endif
579 void* m_lastStackTop;
580 Exception* m_exception { nullptr };
581 Exception* m_lastException { nullptr };
582 bool m_inDefineOwnProperty;
583 std::unique_ptr<CodeCache> m_codeCache;
584 LegacyProfiler* m_enabledProfiler;
585 std::unique_ptr<BuiltinExecutables> m_builtinExecutables;
586 HashMap<String, RefPtr<WatchpointSet>> m_impurePropertyWatchpointSets;
587 std::unique_ptr<TypeProfiler> m_typeProfiler;
588 std::unique_ptr<TypeProfilerLog> m_typeProfilerLog;
589 unsigned m_typeProfilerEnabledCount;
590 FunctionHasExecutedCache m_functionHasExecutedCache;
591 std::unique_ptr<ControlFlowProfiler> m_controlFlowProfiler;
592 unsigned m_controlFlowProfilerEnabledCount;
593 };
594
595 #if ENABLE(GC_VALIDATION)
596 inline bool VM::isInitializingObject() const
597 {
598 return !!m_initializingObjectClass;
599 }
600
601 inline void VM::setInitializingObjectClass(const ClassInfo* initializingObjectClass)
602 {
603 m_initializingObjectClass = initializingObjectClass;
604 }
605 #endif
606
607 inline Heap* WeakSet::heap() const
608 {
609 return &m_vm->heap;
610 }
611
612 #if ENABLE(JIT)
613 extern "C" void sanitizeStackForVMImpl(VM*);
614 #endif
615
616 void sanitizeStackForVM(VM*);
617 void logSanitizeStack(VM*);
618
619 } // namespace JSC
620
621 #endif // VM_h