2 * Copyright (C) 2007, 2008, 2012-2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "ConcurrentJITLock.h"
34 #include "VariableWatchpointSet.h"
36 #include <wtf/HashTraits.h>
37 #include <wtf/text/StringImpl.h>
56 int index
; // If status is 'Deleted', index is bogus.
59 static ALWAYS_INLINE
int missingSymbolMarker() { return std::numeric_limits
<int>::max(); }
61 // The bit twiddling in this class assumes that every register index is a
62 // reasonably small positive or negative number, and therefore has its high
63 // four bits all set or all unset.
65 // In addition to implementing semantics-mandated variable attributes and
66 // implementation-mandated variable indexing, this class also implements
67 // watchpoints to be used for JIT optimizations. Because watchpoints are
68 // meant to be relatively rare, this class optimizes heavily for the case
69 // that they are not being used. To that end, this class uses the thin-fat
70 // idiom: either it is thin, in which case it contains an in-place encoded
71 // word that consists of attributes, the index, and a bit saying that it is
72 // thin; or it is fat, in which case it contains a pointer to a malloc'd
73 // data structure and a bit saying that it is fat. The malloc'd data
74 // structure will be malloced a second time upon copy, to preserve the
75 // property that in-place edits to SymbolTableEntry do not manifest in any
76 // copies. However, the malloc'd FatEntry data structure contains a ref-
77 // counted pointer to a shared WatchpointSet. Thus, in-place edits of the
78 // WatchpointSet will manifest in all copies. Here's a picture:
80 // SymbolTableEntry --> FatEntry --> VariableWatchpointSet
82 // If you make a copy of a SymbolTableEntry, you will have:
84 // original: SymbolTableEntry --> FatEntry --> VariableWatchpointSet
85 // copy: SymbolTableEntry --> FatEntry -----^
87 struct SymbolTableEntry
{
88 // Use the SymbolTableEntry::Fast class, either via implicit cast or by calling
89 // getFast(), when you (1) only care about isNull(), getIndex(), and isReadOnly(),
90 // and (2) you are in a hot path where you need to minimize the number of times
91 // that you branch on isFat() when getting the bits().
99 ALWAYS_INLINE
Fast(const SymbolTableEntry
& entry
)
100 : m_bits(entry
.bits())
106 return !(m_bits
& ~SlimFlag
);
111 return static_cast<int>(m_bits
>> FlagBits
);
114 bool isReadOnly() const
116 return m_bits
& ReadOnlyFlag
;
119 unsigned getAttributes() const
121 unsigned attributes
= 0;
122 if (m_bits
& ReadOnlyFlag
)
123 attributes
|= ReadOnly
;
124 if (m_bits
& DontEnumFlag
)
125 attributes
|= DontEnum
;
131 return !(m_bits
& SlimFlag
);
135 friend struct SymbolTableEntry
;
144 SymbolTableEntry(int index
)
147 ASSERT(isValidIndex(index
));
148 pack(index
, false, false);
151 SymbolTableEntry(int index
, unsigned attributes
)
154 ASSERT(isValidIndex(index
));
155 pack(index
, attributes
& ReadOnly
, attributes
& DontEnum
);
163 SymbolTableEntry(const SymbolTableEntry
& other
)
169 SymbolTableEntry
& operator=(const SymbolTableEntry
& other
)
171 if (UNLIKELY(other
.isFat()))
172 return copySlow(other
);
174 m_bits
= other
.m_bits
;
180 return !(bits() & ~SlimFlag
);
185 return static_cast<int>(bits() >> FlagBits
);
188 ALWAYS_INLINE Fast
getFast() const
193 ALWAYS_INLINE Fast
getFast(bool& wasFat
) const
198 result
.m_bits
= fatEntry()->m_bits
| SlimFlag
;
200 result
.m_bits
= m_bits
;
204 unsigned getAttributes() const
206 return getFast().getAttributes();
209 void setAttributes(unsigned attributes
)
211 pack(getIndex(), attributes
& ReadOnly
, attributes
& DontEnum
);
214 bool isReadOnly() const
216 return bits() & ReadOnlyFlag
;
219 JSValue
inferredValue();
221 void prepareToWatch(SymbolTable
*);
223 void addWatchpoint(Watchpoint
*);
225 VariableWatchpointSet
* watchpointSet()
229 return fatEntry()->m_watchpoints
.get();
232 ALWAYS_INLINE
void notifyWrite(VM
& vm
, JSValue value
)
234 if (LIKELY(!isFat()))
236 notifyWriteSlow(vm
, value
);
240 static const intptr_t SlimFlag
= 0x1;
241 static const intptr_t ReadOnlyFlag
= 0x2;
242 static const intptr_t DontEnumFlag
= 0x4;
243 static const intptr_t NotNullFlag
= 0x8;
244 static const intptr_t FlagBits
= 4;
247 WTF_MAKE_FAST_ALLOCATED
;
249 FatEntry(intptr_t bits
)
250 : m_bits(bits
& ~SlimFlag
)
254 intptr_t m_bits
; // always has FatFlag set and exactly matches what the bits would have been if this wasn't fat.
256 RefPtr
<VariableWatchpointSet
> m_watchpoints
;
259 SymbolTableEntry
& copySlow(const SymbolTableEntry
&);
260 JS_EXPORT_PRIVATE
void notifyWriteSlow(VM
&, JSValue
);
264 return !(m_bits
& SlimFlag
);
267 const FatEntry
* fatEntry() const
270 return bitwise_cast
<const FatEntry
*>(m_bits
);
276 return bitwise_cast
<FatEntry
*>(m_bits
);
283 return inflateSlow();
286 FatEntry
* inflateSlow();
288 ALWAYS_INLINE
intptr_t bits() const
291 return fatEntry()->m_bits
;
295 ALWAYS_INLINE
intptr_t& bits()
298 return fatEntry()->m_bits
;
304 if (LIKELY(!isFat()))
309 JS_EXPORT_PRIVATE
void freeFatEntrySlow();
311 void pack(int index
, bool readOnly
, bool dontEnum
)
314 intptr_t& bitsRef
= bits();
315 bitsRef
= (static_cast<intptr_t>(index
) << FlagBits
) | NotNullFlag
| SlimFlag
;
317 bitsRef
|= ReadOnlyFlag
;
319 bitsRef
|= DontEnumFlag
;
322 bool isValidIndex(int index
)
324 return ((static_cast<intptr_t>(index
) << FlagBits
) >> FlagBits
) == static_cast<intptr_t>(index
);
330 struct SymbolTableIndexHashTraits
: HashTraits
<SymbolTableEntry
> {
331 static const bool needsDestruction
= true;
334 class SymbolTable
: public JSCell
{
338 typedef HashMap
<RefPtr
<StringImpl
>, SymbolTableEntry
, IdentifierRepHash
, HashTraits
<RefPtr
<StringImpl
>>, SymbolTableIndexHashTraits
> Map
;
340 static SymbolTable
* create(VM
& vm
)
342 SymbolTable
* symbolTable
= new (NotNull
, allocateCell
<SymbolTable
>(vm
.heap
)) SymbolTable(vm
);
343 symbolTable
->finishCreation(vm
);
346 static const bool needsDestruction
= true;
347 static const bool hasImmortalStructure
= true;
348 static void destroy(JSCell
*);
350 static Structure
* createStructure(VM
& vm
, JSGlobalObject
* globalObject
, JSValue prototype
)
352 return Structure::create(vm
, globalObject
, prototype
, TypeInfo(LeafType
, StructureFlags
), info());
355 // You must hold the lock until after you're done with the iterator.
356 Map::iterator
find(const ConcurrentJITLocker
&, StringImpl
* key
)
358 return m_map
.find(key
);
361 Map::iterator
find(const GCSafeConcurrentJITLocker
&, StringImpl
* key
)
363 return m_map
.find(key
);
366 SymbolTableEntry
get(const ConcurrentJITLocker
&, StringImpl
* key
)
368 return m_map
.get(key
);
371 SymbolTableEntry
get(StringImpl
* key
)
373 ConcurrentJITLocker
locker(m_lock
);
374 return get(locker
, key
);
377 SymbolTableEntry
inlineGet(const ConcurrentJITLocker
&, StringImpl
* key
)
379 return m_map
.inlineGet(key
);
382 SymbolTableEntry
inlineGet(StringImpl
* key
)
384 ConcurrentJITLocker
locker(m_lock
);
385 return inlineGet(locker
, key
);
388 Map::iterator
begin(const ConcurrentJITLocker
&)
390 return m_map
.begin();
393 Map::iterator
end(const ConcurrentJITLocker
&)
398 Map::iterator
end(const GCSafeConcurrentJITLocker
&)
403 size_t size(const ConcurrentJITLocker
&) const
410 ConcurrentJITLocker
locker(m_lock
);
414 Map::AddResult
add(const ConcurrentJITLocker
&, StringImpl
* key
, const SymbolTableEntry
& entry
)
416 return m_map
.add(key
, entry
);
419 void add(StringImpl
* key
, const SymbolTableEntry
& entry
)
421 ConcurrentJITLocker
locker(m_lock
);
422 add(locker
, key
, entry
);
425 Map::AddResult
set(const ConcurrentJITLocker
&, StringImpl
* key
, const SymbolTableEntry
& entry
)
427 return m_map
.set(key
, entry
);
430 void set(StringImpl
* key
, const SymbolTableEntry
& entry
)
432 ConcurrentJITLocker
locker(m_lock
);
433 set(locker
, key
, entry
);
436 bool contains(const ConcurrentJITLocker
&, StringImpl
* key
)
438 return m_map
.contains(key
);
441 bool contains(StringImpl
* key
)
443 ConcurrentJITLocker
locker(m_lock
);
444 return contains(locker
, key
);
447 bool usesNonStrictEval() { return m_usesNonStrictEval
; }
448 void setUsesNonStrictEval(bool usesNonStrictEval
) { m_usesNonStrictEval
= usesNonStrictEval
; }
450 int captureStart() const { return m_captureStart
; }
451 void setCaptureStart(int captureStart
) { m_captureStart
= captureStart
; }
453 int captureEnd() const { return m_captureEnd
; }
454 void setCaptureEnd(int captureEnd
) { m_captureEnd
= captureEnd
; }
456 int captureCount() const { return -(m_captureEnd
- m_captureStart
); }
458 bool isCaptured(int operand
)
460 return operand
<= captureStart() && operand
> captureEnd();
463 int parameterCount() { return m_parameterCountIncludingThis
- 1; }
464 int parameterCountIncludingThis() { return m_parameterCountIncludingThis
; }
465 void setParameterCountIncludingThis(int parameterCountIncludingThis
) { m_parameterCountIncludingThis
= parameterCountIncludingThis
; }
467 // 0 if we don't capture any arguments; parameterCount() in length if we do.
468 const SlowArgument
* slowArguments() { return m_slowArguments
.get(); }
469 void setSlowArguments(std::unique_ptr
<SlowArgument
[]> slowArguments
) { m_slowArguments
= WTF::move(slowArguments
); }
471 SymbolTable
* cloneCapturedNames(VM
&);
473 static void visitChildren(JSCell
*, SlotVisitor
&);
478 static const unsigned StructureFlags
= StructureIsImmortal
| Base::StructureFlags
;
481 class WatchpointCleanup
: public UnconditionalFinalizer
{
483 WatchpointCleanup(SymbolTable
*);
484 virtual ~WatchpointCleanup();
487 virtual void finalizeUnconditionally() override
;
490 SymbolTable
* m_symbolTable
;
493 JS_EXPORT_PRIVATE
SymbolTable(VM
&);
498 int m_parameterCountIncludingThis
;
499 bool m_usesNonStrictEval
;
504 std::unique_ptr
<SlowArgument
[]> m_slowArguments
;
506 std::unique_ptr
<WatchpointCleanup
> m_watchpointCleanup
;
509 InlineWatchpointSet m_functionEnteredOnce
;
511 mutable ConcurrentJITLock m_lock
;
516 #endif // SymbolTable_h