]> git.saurik.com Git - apple/javascriptcore.git/blob - runtime/VM.h
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / runtime / VM.h
1 /*
2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #ifndef VM_h
30 #define VM_h
31
32 #include "CachedTranscendentalFunction.h"
33 #include "DateInstanceCache.h"
34 #include "ExecutableAllocator.h"
35 #include "Heap.h"
36 #include "Intrinsic.h"
37 #include "JITThunks.h"
38 #include "JITThunks.h"
39 #include "JSCJSValue.h"
40 #include "JSLock.h"
41 #include "LLIntData.h"
42 #include "MacroAssemblerCodeRef.h"
43 #include "NumericStrings.h"
44 #include "ProfilerDatabase.h"
45 #include "PrivateName.h"
46 #include "PrototypeMap.h"
47 #include "SmallStrings.h"
48 #include "Strong.h"
49 #include "ThunkGenerators.h"
50 #include "TypedArrayDescriptor.h"
51 #include "Watchdog.h"
52 #include "WeakRandom.h"
53 #include <wtf/BumpPointerAllocator.h>
54 #include <wtf/DateMath.h>
55 #include <wtf/Forward.h>
56 #include <wtf/HashMap.h>
57 #include <wtf/RefCountedArray.h>
58 #include <wtf/SimpleStats.h>
59 #include <wtf/ThreadSafeRefCounted.h>
60 #include <wtf/ThreadSpecific.h>
61 #include <wtf/WTFThreadData.h>
62 #if ENABLE(REGEXP_TRACING)
63 #include <wtf/ListHashSet.h>
64 #endif
65
66 namespace JSC {
67
68 class CodeBlock;
69 class CodeCache;
70 class CommonIdentifiers;
71 class ExecState;
72 class HandleStack;
73 class IdentifierTable;
74 class Interpreter;
75 class JSGlobalObject;
76 class JSObject;
77 class Keywords;
78 class LLIntOffsetsExtractor;
79 class LegacyProfiler;
80 class NativeExecutable;
81 class ParserArena;
82 class RegExpCache;
83 class SourceProvider;
84 class SourceProviderCache;
85 struct StackFrame;
86 class Stringifier;
87 class Structure;
88 #if ENABLE(REGEXP_TRACING)
89 class RegExp;
90 #endif
91 class UnlinkedCodeBlock;
92 class UnlinkedEvalCodeBlock;
93 class UnlinkedFunctionExecutable;
94 class UnlinkedProgramCodeBlock;
95
96 #if ENABLE(DFG_JIT)
97 namespace DFG {
98 class LongLivedState;
99 }
100 #endif // ENABLE(DFG_JIT)
101
102 struct HashTable;
103 struct Instruction;
104
105 struct LocalTimeOffsetCache {
106 LocalTimeOffsetCache()
107 : start(0.0)
108 , end(-1.0)
109 , increment(0.0)
110 {
111 }
112
113 void reset()
114 {
115 offset = LocalTimeOffset();
116 start = 0.0;
117 end = -1.0;
118 increment = 0.0;
119 }
120
121 LocalTimeOffset offset;
122 double start;
123 double end;
124 double increment;
125 };
126
127 #if ENABLE(DFG_JIT)
128 class ConservativeRoots;
129
130 #if COMPILER(MSVC)
131 #pragma warning(push)
132 #pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning
133 #endif
134 struct ScratchBuffer {
135 ScratchBuffer()
136 {
137 u.m_activeLength = 0;
138 }
139
140 static ScratchBuffer* create(size_t size)
141 {
142 ScratchBuffer* result = new (fastMalloc(ScratchBuffer::allocationSize(size))) ScratchBuffer;
143
144 return result;
145 }
146
147 static size_t allocationSize(size_t bufferSize) { return sizeof(ScratchBuffer) + bufferSize; }
148 void setActiveLength(size_t activeLength) { u.m_activeLength = activeLength; }
149 size_t activeLength() const { return u.m_activeLength; };
150 size_t* activeLengthPtr() { return &u.m_activeLength; };
151 void* dataBuffer() { return m_buffer; }
152
153 union {
154 size_t m_activeLength;
155 double pad; // Make sure m_buffer is double aligned.
156 } u;
157 #if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2)
158 void* m_buffer[0] __attribute__((aligned(8)));
159 #else
160 void* m_buffer[0];
161 #endif
162 };
163 #if COMPILER(MSVC)
164 #pragma warning(pop)
165 #endif
166 #endif
167
168 class VM : public ThreadSafeRefCounted<VM> {
169 public:
170 // WebCore has a one-to-one mapping of threads to VMs;
171 // either create() or createLeaked() should only be called once
172 // on a thread, this is the 'default' VM (it uses the
173 // thread's default string uniquing table from wtfThreadData).
174 // API contexts created using the new context group aware interface
175 // create APIContextGroup objects which require less locking of JSC
176 // than the old singleton APIShared VM created for use by
177 // the original API.
178 enum VMType { Default, APIContextGroup, APIShared };
179
180 struct ClientData {
181 JS_EXPORT_PRIVATE virtual ~ClientData() = 0;
182 };
183
184 bool isSharedInstance() { return vmType == APIShared; }
185 bool usingAPI() { return vmType != Default; }
186 #if PLATFORM(IOS)
187 JS_EXPORT_PRIVATE static bool sharedInstanceExists();
188 #else
189 static bool sharedInstanceExists();
190 #endif
191 JS_EXPORT_PRIVATE static VM& sharedInstance();
192
193 JS_EXPORT_PRIVATE static PassRefPtr<VM> create(HeapType = SmallHeap);
194 JS_EXPORT_PRIVATE static PassRefPtr<VM> createLeaked(HeapType = SmallHeap);
195 static PassRefPtr<VM> createContextGroup(HeapType = SmallHeap);
196 JS_EXPORT_PRIVATE ~VM();
197
198 void makeUsableFromMultipleThreads() { heap.machineThreads().makeUsableFromMultipleThreads(); }
199
200 private:
201 RefPtr<JSLock> m_apiLock;
202
203 public:
204 #if ENABLE(ASSEMBLER)
205 // executableAllocator should be destructed after the heap, as the heap can call executableAllocator
206 // in its destructor.
207 ExecutableAllocator executableAllocator;
208 #endif
209
210 // The heap should be just after executableAllocator and before other members to ensure that it's
211 // destructed after all the objects that reference it.
212 Heap heap;
213
214 #if ENABLE(DFG_JIT)
215 OwnPtr<DFG::LongLivedState> m_dfgState;
216 #endif // ENABLE(DFG_JIT)
217
218 VMType vmType;
219 ClientData* clientData;
220 ExecState* topCallFrame;
221 Watchdog watchdog;
222
223 const HashTable* arrayConstructorTable;
224 const HashTable* arrayPrototypeTable;
225 const HashTable* booleanPrototypeTable;
226 const HashTable* dateTable;
227 const HashTable* dateConstructorTable;
228 const HashTable* errorPrototypeTable;
229 const HashTable* globalObjectTable;
230 const HashTable* jsonTable;
231 const HashTable* mathTable;
232 const HashTable* numberConstructorTable;
233 const HashTable* numberPrototypeTable;
234 const HashTable* objectConstructorTable;
235 const HashTable* privateNamePrototypeTable;
236 const HashTable* regExpTable;
237 const HashTable* regExpConstructorTable;
238 const HashTable* regExpPrototypeTable;
239 const HashTable* stringConstructorTable;
240
241 Strong<Structure> structureStructure;
242 Strong<Structure> structureRareDataStructure;
243 Strong<Structure> debuggerActivationStructure;
244 Strong<Structure> terminatedExecutionErrorStructure;
245 Strong<Structure> stringStructure;
246 Strong<Structure> notAnObjectStructure;
247 Strong<Structure> propertyNameIteratorStructure;
248 Strong<Structure> getterSetterStructure;
249 Strong<Structure> apiWrapperStructure;
250 Strong<Structure> JSScopeStructure;
251 Strong<Structure> executableStructure;
252 Strong<Structure> nativeExecutableStructure;
253 Strong<Structure> evalExecutableStructure;
254 Strong<Structure> programExecutableStructure;
255 Strong<Structure> functionExecutableStructure;
256 Strong<Structure> regExpStructure;
257 Strong<Structure> sharedSymbolTableStructure;
258 Strong<Structure> structureChainStructure;
259 Strong<Structure> sparseArrayValueMapStructure;
260 Strong<Structure> withScopeStructure;
261 Strong<Structure> unlinkedFunctionExecutableStructure;
262 Strong<Structure> unlinkedProgramCodeBlockStructure;
263 Strong<Structure> unlinkedEvalCodeBlockStructure;
264 Strong<Structure> unlinkedFunctionCodeBlockStructure;
265 Strong<Structure> propertyTableStructure;
266
267 IdentifierTable* identifierTable;
268 CommonIdentifiers* propertyNames;
269 const MarkedArgumentBuffer* emptyList; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark.
270 SmallStrings smallStrings;
271 NumericStrings numericStrings;
272 DateInstanceCache dateInstanceCache;
273 WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT;
274 Vector<CodeBlock*> codeBlocksBeingCompiled;
275 void startedCompiling(CodeBlock* codeBlock)
276 {
277 codeBlocksBeingCompiled.append(codeBlock);
278 }
279
280 void finishedCompiling(CodeBlock* codeBlock)
281 {
282 ASSERT_UNUSED(codeBlock, codeBlock == codeBlocksBeingCompiled.last());
283 codeBlocksBeingCompiled.removeLast();
284 }
285
286 void setInDefineOwnProperty(bool inDefineOwnProperty)
287 {
288 m_inDefineOwnProperty = inDefineOwnProperty;
289 }
290
291 bool isInDefineOwnProperty()
292 {
293 return m_inDefineOwnProperty;
294 }
295
296 LegacyProfiler* enabledProfiler()
297 {
298 return m_enabledProfiler;
299 }
300
301 #if ENABLE(JIT) && ENABLE(LLINT)
302 bool canUseJIT() { return m_canUseJIT; }
303 #elif ENABLE(JIT)
304 bool canUseJIT() { return true; } // jit only
305 #else
306 bool canUseJIT() { return false; } // interpreter only
307 #endif
308
309 #if ENABLE(YARR_JIT)
310 bool canUseRegExpJIT() { return m_canUseRegExpJIT; }
311 #else
312 bool canUseRegExpJIT() { return false; } // interpreter only
313 #endif
314
315 SourceProviderCache* addSourceProviderCache(SourceProvider*);
316 void clearSourceProviderCaches();
317
318 PrototypeMap prototypeMap;
319
320 OwnPtr<ParserArena> parserArena;
321 typedef HashMap<RefPtr<SourceProvider>, RefPtr<SourceProviderCache> > SourceProviderCacheMap;
322 SourceProviderCacheMap sourceProviderCacheMap;
323 OwnPtr<Keywords> keywords;
324 Interpreter* interpreter;
325 #if ENABLE(JIT)
326 OwnPtr<JITThunks> jitStubs;
327 MacroAssemblerCodeRef getCTIStub(ThunkGenerator generator)
328 {
329 return jitStubs->ctiStub(this, generator);
330 }
331 NativeExecutable* getHostFunction(NativeFunction, Intrinsic);
332 #endif
333 NativeExecutable* getHostFunction(NativeFunction, NativeFunction constructor);
334
335 JSValue exception;
336 JS_EXPORT_PRIVATE void clearExceptionStack();
337 RefCountedArray<StackFrame>& exceptionStack() { return m_exceptionStack; }
338
339 const ClassInfo* const jsArrayClassInfo;
340 const ClassInfo* const jsFinalObjectClassInfo;
341
342 ReturnAddressPtr exceptionLocation;
343 JSValue hostCallReturnValue;
344 ExecState* callFrameForThrow;
345 void* targetMachinePCForThrow;
346 Instruction* targetInterpreterPCForThrow;
347 #if ENABLE(DFG_JIT)
348 uint32_t osrExitIndex;
349 void* osrExitJumpDestination;
350 Vector<ScratchBuffer*> scratchBuffers;
351 size_t sizeOfLastScratchBuffer;
352
353 ScratchBuffer* scratchBufferForSize(size_t size)
354 {
355 if (!size)
356 return 0;
357
358 if (size > sizeOfLastScratchBuffer) {
359 // Protect against a N^2 memory usage pathology by ensuring
360 // that at worst, we get a geometric series, meaning that the
361 // total memory usage is somewhere around
362 // max(scratch buffer size) * 4.
363 sizeOfLastScratchBuffer = size * 2;
364
365 scratchBuffers.append(ScratchBuffer::create(sizeOfLastScratchBuffer));
366 }
367
368 ScratchBuffer* result = scratchBuffers.last();
369 result->setActiveLength(0);
370 return result;
371 }
372
373 void gatherConservativeRoots(ConservativeRoots&);
374 #endif
375
376 JSGlobalObject* dynamicGlobalObject;
377
378 HashSet<JSObject*> stringRecursionCheckVisitedObjects;
379
380 LocalTimeOffsetCache localTimeOffsetCache;
381
382 String cachedDateString;
383 double cachedDateStringValue;
384
385 LegacyProfiler* m_enabledProfiler;
386 OwnPtr<Profiler::Database> m_perBytecodeProfiler;
387 RegExpCache* m_regExpCache;
388 BumpPointerAllocator m_regExpAllocator;
389
390 #if ENABLE(REGEXP_TRACING)
391 typedef ListHashSet<RefPtr<RegExp> > RTTraceList;
392 RTTraceList* m_rtTraceList;
393 #endif
394
395 #ifndef NDEBUG
396 ThreadIdentifier exclusiveThread;
397 #endif
398
399 CachedTranscendentalFunction<std::sin> cachedSin;
400
401 JS_EXPORT_PRIVATE void resetDateCache();
402
403 JS_EXPORT_PRIVATE void startSampling();
404 JS_EXPORT_PRIVATE void stopSampling();
405 JS_EXPORT_PRIVATE void dumpSampleData(ExecState* exec);
406 RegExpCache* regExpCache() { return m_regExpCache; }
407 #if ENABLE(REGEXP_TRACING)
408 void addRegExpToTrace(PassRefPtr<RegExp> regExp);
409 #endif
410 JS_EXPORT_PRIVATE void dumpRegExpTrace();
411
412 bool isCollectorBusy() { return heap.isBusy(); }
413 JS_EXPORT_PRIVATE void releaseExecutableMemory();
414
415 #if ENABLE(GC_VALIDATION)
416 bool isInitializingObject() const;
417 void setInitializingObjectClass(const ClassInfo*);
418 #endif
419
420 unsigned m_newStringsSinceLastHashCons;
421
422 static const unsigned s_minNumberOfNewStringsToHashCons = 100;
423
424 bool haveEnoughNewStringsToHashCons() { return m_newStringsSinceLastHashCons > s_minNumberOfNewStringsToHashCons; }
425 void resetNewStringsSinceLastHashCons() { m_newStringsSinceLastHashCons = 0; }
426
427 #define registerTypedArrayFunction(type, capitalizedType) \
428 void registerTypedArrayDescriptor(const capitalizedType##Array*, const TypedArrayDescriptor& descriptor) \
429 { \
430 ASSERT(!m_##type##ArrayDescriptor.m_classInfo || m_##type##ArrayDescriptor.m_classInfo == descriptor.m_classInfo); \
431 m_##type##ArrayDescriptor = descriptor; \
432 ASSERT(m_##type##ArrayDescriptor.m_classInfo); \
433 } \
434 const TypedArrayDescriptor& type##ArrayDescriptor() const { ASSERT(m_##type##ArrayDescriptor.m_classInfo); return m_##type##ArrayDescriptor; }
435
436 registerTypedArrayFunction(int8, Int8);
437 registerTypedArrayFunction(int16, Int16);
438 registerTypedArrayFunction(int32, Int32);
439 registerTypedArrayFunction(uint8, Uint8);
440 registerTypedArrayFunction(uint8Clamped, Uint8Clamped);
441 registerTypedArrayFunction(uint16, Uint16);
442 registerTypedArrayFunction(uint32, Uint32);
443 registerTypedArrayFunction(float32, Float32);
444 registerTypedArrayFunction(float64, Float64);
445 #undef registerTypedArrayFunction
446
447 const TypedArrayDescriptor* typedArrayDescriptor(TypedArrayType type) const
448 {
449 switch (type) {
450 case TypedArrayNone:
451 return 0;
452 case TypedArrayInt8:
453 return &int8ArrayDescriptor();
454 case TypedArrayInt16:
455 return &int16ArrayDescriptor();
456 case TypedArrayInt32:
457 return &int32ArrayDescriptor();
458 case TypedArrayUint8:
459 return &uint8ArrayDescriptor();
460 case TypedArrayUint8Clamped:
461 return &uint8ClampedArrayDescriptor();
462 case TypedArrayUint16:
463 return &uint16ArrayDescriptor();
464 case TypedArrayUint32:
465 return &uint32ArrayDescriptor();
466 case TypedArrayFloat32:
467 return &float32ArrayDescriptor();
468 case TypedArrayFloat64:
469 return &float64ArrayDescriptor();
470 default:
471 CRASH();
472 return 0;
473 }
474 }
475
476 JSLock& apiLock() { return *m_apiLock; }
477 CodeCache* codeCache() { return m_codeCache.get(); }
478
479 JS_EXPORT_PRIVATE void discardAllCode();
480
481 private:
482 friend class LLIntOffsetsExtractor;
483
484 VM(VMType, HeapType);
485 static VM*& sharedInstanceInternal();
486 void createNativeThunk();
487 #if ENABLE(ASSEMBLER)
488 bool m_canUseAssembler;
489 #endif
490 #if ENABLE(JIT)
491 bool m_canUseJIT;
492 #endif
493 #if ENABLE(YARR_JIT)
494 bool m_canUseRegExpJIT;
495 #endif
496 #if ENABLE(GC_VALIDATION)
497 const ClassInfo* m_initializingObjectClass;
498 #endif
499 bool m_inDefineOwnProperty;
500 RefPtr<CodeCache> m_codeCache;
501 RefCountedArray<StackFrame> m_exceptionStack;
502
503 TypedArrayDescriptor m_int8ArrayDescriptor;
504 TypedArrayDescriptor m_int16ArrayDescriptor;
505 TypedArrayDescriptor m_int32ArrayDescriptor;
506 TypedArrayDescriptor m_uint8ArrayDescriptor;
507 TypedArrayDescriptor m_uint8ClampedArrayDescriptor;
508 TypedArrayDescriptor m_uint16ArrayDescriptor;
509 TypedArrayDescriptor m_uint32ArrayDescriptor;
510 TypedArrayDescriptor m_float32ArrayDescriptor;
511 TypedArrayDescriptor m_float64ArrayDescriptor;
512 };
513
514 #if ENABLE(GC_VALIDATION)
515 inline bool VM::isInitializingObject() const
516 {
517 return !!m_initializingObjectClass;
518 }
519
520 inline void VM::setInitializingObjectClass(const ClassInfo* initializingObjectClass)
521 {
522 m_initializingObjectClass = initializingObjectClass;
523 }
524 #endif
525
526 inline Heap* WeakSet::heap() const
527 {
528 return &m_vm->heap;
529 }
530
531 } // namespace JSC
532
533 #endif // VM_h