2 * Copyright (C) 2012, 2014, 2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "SymbolTable.h"
32 #include "JSDestructibleObject.h"
33 #include "JSCInlines.h"
34 #include "SlotVisitorInlines.h"
35 #include "TypeProfiler.h"
39 const ClassInfo
SymbolTable::s_info
= { "SymbolTable", 0, 0, CREATE_METHOD_TABLE(SymbolTable
) };
41 SymbolTableEntry
& SymbolTableEntry::copySlow(const SymbolTableEntry
& other
)
43 ASSERT(other
.isFat());
44 FatEntry
* newFatEntry
= new FatEntry(*other
.fatEntry());
46 m_bits
= bitwise_cast
<intptr_t>(newFatEntry
);
50 void SymbolTable::destroy(JSCell
* cell
)
52 SymbolTable
* thisObject
= jsCast
<SymbolTable
*>(cell
);
53 thisObject
->SymbolTable::~SymbolTable();
56 void SymbolTableEntry::freeFatEntrySlow()
62 void SymbolTableEntry::prepareToWatch()
66 FatEntry
* entry
= inflate();
67 if (entry
->m_watchpoints
)
69 entry
->m_watchpoints
= adoptRef(new WatchpointSet(ClearWatchpoint
));
72 void SymbolTableEntry::addWatchpoint(Watchpoint
* watchpoint
)
74 fatEntry()->m_watchpoints
->add(watchpoint
);
77 SymbolTableEntry::FatEntry
* SymbolTableEntry::inflateSlow()
79 FatEntry
* entry
= new FatEntry(m_bits
);
80 m_bits
= bitwise_cast
<intptr_t>(entry
);
84 SymbolTable::SymbolTable(VM
& vm
)
85 : JSCell(vm
, vm
.symbolTableStructure
.get())
86 , m_usesNonStrictEval(false)
90 SymbolTable::~SymbolTable() { }
92 void SymbolTable::finishCreation(VM
& vm
)
94 Base::finishCreation(vm
);
95 m_singletonScope
.set(vm
, this, InferredValue::create(vm
));
98 void SymbolTable::visitChildren(JSCell
* thisCell
, SlotVisitor
& visitor
)
100 SymbolTable
* thisSymbolTable
= jsCast
<SymbolTable
*>(thisCell
);
102 visitor
.append(&thisSymbolTable
->m_arguments
);
103 visitor
.append(&thisSymbolTable
->m_singletonScope
);
105 // Save some memory. This is O(n) to rebuild and we do so on the fly.
106 ConcurrentJITLocker
locker(thisSymbolTable
->m_lock
);
107 thisSymbolTable
->m_localToEntry
= nullptr;
110 const SymbolTable::LocalToEntryVec
& SymbolTable::localToEntry(const ConcurrentJITLocker
&)
112 if (UNLIKELY(!m_localToEntry
)) {
114 for (auto& entry
: m_map
) {
115 VarOffset offset
= entry
.value
.varOffset();
116 if (offset
.isScope())
117 size
= std::max(size
, offset
.scopeOffset().offset() + 1);
120 m_localToEntry
= std::make_unique
<LocalToEntryVec
>(size
, nullptr);
121 for (auto& entry
: m_map
) {
122 VarOffset offset
= entry
.value
.varOffset();
123 if (offset
.isScope())
124 m_localToEntry
->at(offset
.scopeOffset().offset()) = &entry
.value
;
128 return *m_localToEntry
;
131 SymbolTableEntry
* SymbolTable::entryFor(const ConcurrentJITLocker
& locker
, ScopeOffset offset
)
133 auto& toEntryVector
= localToEntry(locker
);
134 if (offset
.offset() >= toEntryVector
.size())
136 return toEntryVector
[offset
.offset()];
139 SymbolTable
* SymbolTable::cloneScopePart(VM
& vm
)
141 SymbolTable
* result
= SymbolTable::create(vm
);
143 result
->m_usesNonStrictEval
= m_usesNonStrictEval
;
145 for (auto iter
= m_map
.begin(), end
= m_map
.end(); iter
!= end
; ++iter
) {
146 if (!iter
->value
.varOffset().isScope())
150 SymbolTableEntry(iter
->value
.varOffset(), iter
->value
.getAttributes()));
153 result
->m_maxScopeOffset
= m_maxScopeOffset
;
155 if (ScopedArgumentsTable
* arguments
= this->arguments())
156 result
->m_arguments
.set(vm
, result
, arguments
);
158 if (m_typeProfilingRareData
) {
159 result
->m_typeProfilingRareData
= std::make_unique
<TypeProfilingRareData
>();
162 auto iter
= m_typeProfilingRareData
->m_uniqueIDMap
.begin();
163 auto end
= m_typeProfilingRareData
->m_uniqueIDMap
.end();
164 for (; iter
!= end
; ++iter
)
165 result
->m_typeProfilingRareData
->m_uniqueIDMap
.set(iter
->key
, iter
->value
);
169 auto iter
= m_typeProfilingRareData
->m_offsetToVariableMap
.begin();
170 auto end
= m_typeProfilingRareData
->m_offsetToVariableMap
.end();
171 for (; iter
!= end
; ++iter
)
172 result
->m_typeProfilingRareData
->m_offsetToVariableMap
.set(iter
->key
, iter
->value
);
176 auto iter
= m_typeProfilingRareData
->m_uniqueTypeSetMap
.begin();
177 auto end
= m_typeProfilingRareData
->m_uniqueTypeSetMap
.end();
178 for (; iter
!= end
; ++iter
)
179 result
->m_typeProfilingRareData
->m_uniqueTypeSetMap
.set(iter
->key
, iter
->value
);
186 void SymbolTable::prepareForTypeProfiling(const ConcurrentJITLocker
&)
188 if (m_typeProfilingRareData
)
191 m_typeProfilingRareData
= std::make_unique
<TypeProfilingRareData
>();
193 for (auto iter
= m_map
.begin(), end
= m_map
.end(); iter
!= end
; ++iter
) {
194 m_typeProfilingRareData
->m_uniqueIDMap
.set(iter
->key
, TypeProfilerNeedsUniqueIDGeneration
);
195 m_typeProfilingRareData
->m_offsetToVariableMap
.set(iter
->value
.varOffset(), iter
->key
);
199 GlobalVariableID
SymbolTable::uniqueIDForVariable(const ConcurrentJITLocker
&, UniquedStringImpl
* key
, VM
& vm
)
201 RELEASE_ASSERT(m_typeProfilingRareData
);
203 auto iter
= m_typeProfilingRareData
->m_uniqueIDMap
.find(key
);
204 auto end
= m_typeProfilingRareData
->m_uniqueIDMap
.end();
206 return TypeProfilerNoGlobalIDExists
;
208 GlobalVariableID id
= iter
->value
;
209 if (id
== TypeProfilerNeedsUniqueIDGeneration
) {
210 id
= vm
.typeProfiler()->getNextUniqueVariableID();
211 m_typeProfilingRareData
->m_uniqueIDMap
.set(key
, id
);
212 m_typeProfilingRareData
->m_uniqueTypeSetMap
.set(key
, TypeSet::create()); // Make a new global typeset for this corresponding ID.
218 GlobalVariableID
SymbolTable::uniqueIDForOffset(const ConcurrentJITLocker
& locker
, VarOffset offset
, VM
& vm
)
220 RELEASE_ASSERT(m_typeProfilingRareData
);
222 auto iter
= m_typeProfilingRareData
->m_offsetToVariableMap
.find(offset
);
223 auto end
= m_typeProfilingRareData
->m_offsetToVariableMap
.end();
225 return TypeProfilerNoGlobalIDExists
;
227 return uniqueIDForVariable(locker
, iter
->value
.get(), vm
);
230 RefPtr
<TypeSet
> SymbolTable::globalTypeSetForOffset(const ConcurrentJITLocker
& locker
, VarOffset offset
, VM
& vm
)
232 RELEASE_ASSERT(m_typeProfilingRareData
);
234 uniqueIDForOffset(locker
, offset
, vm
); // Lazily create the TypeSet if necessary.
236 auto iter
= m_typeProfilingRareData
->m_offsetToVariableMap
.find(offset
);
237 auto end
= m_typeProfilingRareData
->m_offsetToVariableMap
.end();
241 return globalTypeSetForVariable(locker
, iter
->value
.get(), vm
);
244 RefPtr
<TypeSet
> SymbolTable::globalTypeSetForVariable(const ConcurrentJITLocker
& locker
, UniquedStringImpl
* key
, VM
& vm
)
246 RELEASE_ASSERT(m_typeProfilingRareData
);
248 uniqueIDForVariable(locker
, key
, vm
); // Lazily create the TypeSet if necessary.
250 auto iter
= m_typeProfilingRareData
->m_uniqueTypeSetMap
.find(key
);
251 auto end
= m_typeProfilingRareData
->m_uniqueTypeSetMap
.end();