]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecode/GetByIdStatus.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / bytecode / GetByIdStatus.cpp
1 /*
2 * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "AccessorCallJITStubRoutine.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "JSCInlines.h"
33 #include "JSScope.h"
34 #include "LLIntData.h"
35 #include "LowLevelInterpreter.h"
36 #include "PolymorphicGetByIdList.h"
37 #include <wtf/ListDump.h>
38
39 namespace JSC {
40
41 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
42 {
43 // Attempt to merge this variant with an already existing variant.
44 for (unsigned i = 0; i < m_variants.size(); ++i) {
45 if (m_variants[i].attemptToMerge(variant))
46 return true;
47 }
48
49 // Make sure there is no overlap. We should have pruned out opportunities for
50 // overlap but it's possible that an inline cache got into a weird state. We are
51 // defensive and bail if we detect crazy.
52 for (unsigned i = 0; i < m_variants.size(); ++i) {
53 if (m_variants[i].structureSet().overlaps(variant.structureSet()))
54 return false;
55 }
56
57 m_variants.append(variant);
58 return true;
59 }
60
61 #if ENABLE(DFG_JIT)
62 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex)
63 {
64 return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache))
65 || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache));
66 }
67 #endif
68
69 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
70 {
71 UNUSED_PARAM(profiledBlock);
72 UNUSED_PARAM(bytecodeIndex);
73 UNUSED_PARAM(uid);
74 Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
75
76 if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
77 return GetByIdStatus(NoInformation, false);
78
79 Structure* structure = instruction[4].u.structure.get();
80 if (!structure)
81 return GetByIdStatus(NoInformation, false);
82
83 if (structure->takesSlowPathInDFGForImpureProperty())
84 return GetByIdStatus(NoInformation, false);
85
86 unsigned attributesIgnored;
87 PropertyOffset offset = structure->getConcurrently(uid, attributesIgnored);
88 if (!isValidOffset(offset))
89 return GetByIdStatus(NoInformation, false);
90
91 return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset));
92 }
93
94 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
95 {
96 ConcurrentJITLocker locker(profiledBlock->m_lock);
97
98 GetByIdStatus result;
99
100 #if ENABLE(DFG_JIT)
101 result = computeForStubInfo(
102 locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid,
103 CallLinkStatus::computeExitSiteData(locker, profiledBlock, bytecodeIndex));
104
105 if (!result.takesSlowPath()
106 && hasExitSite(locker, profiledBlock, bytecodeIndex))
107 return GetByIdStatus(result.makesCalls() ? MakesCalls : TakesSlowPath, true);
108 #else
109 UNUSED_PARAM(map);
110 #endif
111
112 if (!result)
113 return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
114
115 return result;
116 }
117
118 #if ENABLE(JIT)
119 GetByIdStatus GetByIdStatus::computeForStubInfo(
120 const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid,
121 CallLinkStatus::ExitSiteData callExitSiteData)
122 {
123 if (!stubInfo)
124 return GetByIdStatus(NoInformation);
125
126 if (!stubInfo->seen)
127 return GetByIdStatus(NoInformation);
128
129 PolymorphicGetByIdList* list = 0;
130 State slowPathState = TakesSlowPath;
131 if (stubInfo->accessType == access_get_by_id_list) {
132 list = stubInfo->u.getByIdList.list;
133 for (unsigned i = 0; i < list->size(); ++i) {
134 const GetByIdAccess& access = list->at(i);
135 if (access.doesCalls())
136 slowPathState = MakesCalls;
137 }
138 }
139
140 if (stubInfo->tookSlowPath)
141 return GetByIdStatus(slowPathState);
142
143 // Finally figure out if we can derive an access strategy.
144 GetByIdStatus result;
145 result.m_state = Simple;
146 result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
147 switch (stubInfo->accessType) {
148 case access_unset:
149 return GetByIdStatus(NoInformation);
150
151 case access_get_by_id_self: {
152 Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
153 if (structure->takesSlowPathInDFGForImpureProperty())
154 return GetByIdStatus(slowPathState, true);
155 unsigned attributesIgnored;
156 GetByIdVariant variant;
157 variant.m_offset = structure->getConcurrently(uid, attributesIgnored);
158 if (!isValidOffset(variant.m_offset))
159 return GetByIdStatus(slowPathState, true);
160
161 variant.m_structureSet.add(structure);
162 bool didAppend = result.appendVariant(variant);
163 ASSERT_UNUSED(didAppend, didAppend);
164 return result;
165 }
166
167 case access_get_by_id_list: {
168 for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
169 Structure* structure = list->at(listIndex).structure();
170
171 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
172 profiledBlock, structure, list->at(listIndex).chain(),
173 list->at(listIndex).chainCount(), uid);
174
175 switch (complexGetStatus.kind()) {
176 case ComplexGetStatus::ShouldSkip:
177 continue;
178
179 case ComplexGetStatus::TakesSlowPath:
180 return GetByIdStatus(slowPathState, true);
181
182 case ComplexGetStatus::Inlineable: {
183 std::unique_ptr<CallLinkStatus> callLinkStatus;
184 switch (list->at(listIndex).type()) {
185 case GetByIdAccess::SimpleInline:
186 case GetByIdAccess::SimpleStub: {
187 break;
188 }
189 case GetByIdAccess::Getter: {
190 AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>(
191 list->at(listIndex).stubRoutine());
192 callLinkStatus = std::make_unique<CallLinkStatus>(
193 CallLinkStatus::computeFor(
194 locker, profiledBlock, *stub->m_callLinkInfo, callExitSiteData));
195 break;
196 }
197 case GetByIdAccess::SimpleMiss:
198 case GetByIdAccess::CustomGetter:
199 case GetByIdAccess::WatchedStub:{
200 // FIXME: It would be totally sweet to support this at some point in the future.
201 // https://bugs.webkit.org/show_bug.cgi?id=133052
202 return GetByIdStatus(slowPathState, true);
203 }
204 default:
205 RELEASE_ASSERT_NOT_REACHED();
206 }
207
208 GetByIdVariant variant(
209 StructureSet(structure), complexGetStatus.offset(), complexGetStatus.chain(),
210 WTF::move(callLinkStatus));
211
212 if (!result.appendVariant(variant))
213 return GetByIdStatus(slowPathState, true);
214 break;
215 } }
216 }
217
218 return result;
219 }
220
221 default:
222 return GetByIdStatus(slowPathState, true);
223 }
224
225 RELEASE_ASSERT_NOT_REACHED();
226 return GetByIdStatus();
227 }
228 #endif // ENABLE(JIT)
229
230 GetByIdStatus GetByIdStatus::computeFor(
231 CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
232 StubInfoMap& dfgMap, CodeOrigin codeOrigin, UniquedStringImpl* uid)
233 {
234 #if ENABLE(DFG_JIT)
235 if (dfgBlock) {
236 CallLinkStatus::ExitSiteData exitSiteData;
237 {
238 ConcurrentJITLocker locker(profiledBlock->m_lock);
239 exitSiteData = CallLinkStatus::computeExitSiteData(
240 locker, profiledBlock, codeOrigin.bytecodeIndex);
241 }
242
243 GetByIdStatus result;
244 {
245 ConcurrentJITLocker locker(dfgBlock->m_lock);
246 result = computeForStubInfo(
247 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
248 }
249
250 if (result.takesSlowPath())
251 return result;
252
253 {
254 ConcurrentJITLocker locker(profiledBlock->m_lock);
255 if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex))
256 return GetByIdStatus(TakesSlowPath, true);
257 }
258
259 if (result.isSet())
260 return result;
261 }
262 #else
263 UNUSED_PARAM(dfgBlock);
264 UNUSED_PARAM(dfgMap);
265 #endif
266
267 return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
268 }
269
270 GetByIdStatus GetByIdStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid)
271 {
272 // For now we only handle the super simple self access case. We could handle the
273 // prototype case in the future.
274
275 if (set.isEmpty())
276 return GetByIdStatus();
277
278 if (parseIndex(*uid))
279 return GetByIdStatus(TakesSlowPath);
280
281 GetByIdStatus result;
282 result.m_state = Simple;
283 result.m_wasSeenInJIT = false;
284 for (unsigned i = 0; i < set.size(); ++i) {
285 Structure* structure = set[i];
286 if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
287 return GetByIdStatus(TakesSlowPath);
288
289 if (!structure->propertyAccessesAreCacheable())
290 return GetByIdStatus(TakesSlowPath);
291
292 unsigned attributes;
293 PropertyOffset offset = structure->getConcurrently(uid, attributes);
294 if (!isValidOffset(offset))
295 return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
296 if (attributes & Accessor)
297 return GetByIdStatus(MakesCalls); // We could be smarter here, like strenght-reducing this to a Call.
298
299 if (!result.appendVariant(GetByIdVariant(structure, offset)))
300 return GetByIdStatus(TakesSlowPath);
301 }
302
303 return result;
304 }
305
306 bool GetByIdStatus::makesCalls() const
307 {
308 switch (m_state) {
309 case NoInformation:
310 case TakesSlowPath:
311 return false;
312 case Simple:
313 for (unsigned i = m_variants.size(); i--;) {
314 if (m_variants[i].callLinkStatus())
315 return true;
316 }
317 return false;
318 case MakesCalls:
319 return true;
320 }
321 RELEASE_ASSERT_NOT_REACHED();
322
323 return false;
324 }
325
326 void GetByIdStatus::dump(PrintStream& out) const
327 {
328 out.print("(");
329 switch (m_state) {
330 case NoInformation:
331 out.print("NoInformation");
332 break;
333 case Simple:
334 out.print("Simple");
335 break;
336 case TakesSlowPath:
337 out.print("TakesSlowPath");
338 break;
339 case MakesCalls:
340 out.print("MakesCalls");
341 break;
342 }
343 out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
344 }
345
346 } // namespace JSC
347