2 * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "GetByIdStatus.h"
29 #include "AccessorCallJITStubRoutine.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "JSCInlines.h"
34 #include "LLIntData.h"
35 #include "LowLevelInterpreter.h"
36 #include "PolymorphicGetByIdList.h"
37 #include <wtf/ListDump.h>
41 bool GetByIdStatus::appendVariant(const GetByIdVariant
& variant
)
43 // Attempt to merge this variant with an already existing variant.
44 for (unsigned i
= 0; i
< m_variants
.size(); ++i
) {
45 if (m_variants
[i
].attemptToMerge(variant
))
49 // Make sure there is no overlap. We should have pruned out opportunities for
50 // overlap but it's possible that an inline cache got into a weird state. We are
51 // defensive and bail if we detect crazy.
52 for (unsigned i
= 0; i
< m_variants
.size(); ++i
) {
53 if (m_variants
[i
].structureSet().overlaps(variant
.structureSet()))
57 m_variants
.append(variant
);
62 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker
& locker
, CodeBlock
* profiledBlock
, unsigned bytecodeIndex
)
64 return profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadCache
))
65 || profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadConstantCache
));
69 GetByIdStatus
GetByIdStatus::computeFromLLInt(CodeBlock
* profiledBlock
, unsigned bytecodeIndex
, UniquedStringImpl
* uid
)
71 UNUSED_PARAM(profiledBlock
);
72 UNUSED_PARAM(bytecodeIndex
);
74 Instruction
* instruction
= profiledBlock
->instructions().begin() + bytecodeIndex
;
76 if (instruction
[0].u
.opcode
== LLInt::getOpcode(op_get_array_length
))
77 return GetByIdStatus(NoInformation
, false);
79 Structure
* structure
= instruction
[4].u
.structure
.get();
81 return GetByIdStatus(NoInformation
, false);
83 if (structure
->takesSlowPathInDFGForImpureProperty())
84 return GetByIdStatus(NoInformation
, false);
86 unsigned attributesIgnored
;
87 PropertyOffset offset
= structure
->getConcurrently(uid
, attributesIgnored
);
88 if (!isValidOffset(offset
))
89 return GetByIdStatus(NoInformation
, false);
91 return GetByIdStatus(Simple
, false, GetByIdVariant(StructureSet(structure
), offset
));
94 GetByIdStatus
GetByIdStatus::computeFor(CodeBlock
* profiledBlock
, StubInfoMap
& map
, unsigned bytecodeIndex
, UniquedStringImpl
* uid
)
96 ConcurrentJITLocker
locker(profiledBlock
->m_lock
);
101 result
= computeForStubInfo(
102 locker
, profiledBlock
, map
.get(CodeOrigin(bytecodeIndex
)), uid
,
103 CallLinkStatus::computeExitSiteData(locker
, profiledBlock
, bytecodeIndex
));
105 if (!result
.takesSlowPath()
106 && hasExitSite(locker
, profiledBlock
, bytecodeIndex
))
107 return GetByIdStatus(result
.makesCalls() ? MakesCalls
: TakesSlowPath
, true);
113 return computeFromLLInt(profiledBlock
, bytecodeIndex
, uid
);
119 GetByIdStatus
GetByIdStatus::computeForStubInfo(
120 const ConcurrentJITLocker
& locker
, CodeBlock
* profiledBlock
, StructureStubInfo
* stubInfo
, UniquedStringImpl
* uid
,
121 CallLinkStatus::ExitSiteData callExitSiteData
)
124 return GetByIdStatus(NoInformation
);
127 return GetByIdStatus(NoInformation
);
129 PolymorphicGetByIdList
* list
= 0;
130 State slowPathState
= TakesSlowPath
;
131 if (stubInfo
->accessType
== access_get_by_id_list
) {
132 list
= stubInfo
->u
.getByIdList
.list
;
133 for (unsigned i
= 0; i
< list
->size(); ++i
) {
134 const GetByIdAccess
& access
= list
->at(i
);
135 if (access
.doesCalls())
136 slowPathState
= MakesCalls
;
140 if (stubInfo
->tookSlowPath
)
141 return GetByIdStatus(slowPathState
);
143 // Finally figure out if we can derive an access strategy.
144 GetByIdStatus result
;
145 result
.m_state
= Simple
;
146 result
.m_wasSeenInJIT
= true; // This is interesting for bytecode dumping only.
147 switch (stubInfo
->accessType
) {
149 return GetByIdStatus(NoInformation
);
151 case access_get_by_id_self
: {
152 Structure
* structure
= stubInfo
->u
.getByIdSelf
.baseObjectStructure
.get();
153 if (structure
->takesSlowPathInDFGForImpureProperty())
154 return GetByIdStatus(slowPathState
, true);
155 unsigned attributesIgnored
;
156 GetByIdVariant variant
;
157 variant
.m_offset
= structure
->getConcurrently(uid
, attributesIgnored
);
158 if (!isValidOffset(variant
.m_offset
))
159 return GetByIdStatus(slowPathState
, true);
161 variant
.m_structureSet
.add(structure
);
162 bool didAppend
= result
.appendVariant(variant
);
163 ASSERT_UNUSED(didAppend
, didAppend
);
167 case access_get_by_id_list
: {
168 for (unsigned listIndex
= 0; listIndex
< list
->size(); ++listIndex
) {
169 Structure
* structure
= list
->at(listIndex
).structure();
171 ComplexGetStatus complexGetStatus
= ComplexGetStatus::computeFor(
172 profiledBlock
, structure
, list
->at(listIndex
).chain(),
173 list
->at(listIndex
).chainCount(), uid
);
175 switch (complexGetStatus
.kind()) {
176 case ComplexGetStatus::ShouldSkip
:
179 case ComplexGetStatus::TakesSlowPath
:
180 return GetByIdStatus(slowPathState
, true);
182 case ComplexGetStatus::Inlineable
: {
183 std::unique_ptr
<CallLinkStatus
> callLinkStatus
;
184 switch (list
->at(listIndex
).type()) {
185 case GetByIdAccess::SimpleInline
:
186 case GetByIdAccess::SimpleStub
: {
189 case GetByIdAccess::Getter
: {
190 AccessorCallJITStubRoutine
* stub
= static_cast<AccessorCallJITStubRoutine
*>(
191 list
->at(listIndex
).stubRoutine());
192 callLinkStatus
= std::make_unique
<CallLinkStatus
>(
193 CallLinkStatus::computeFor(
194 locker
, profiledBlock
, *stub
->m_callLinkInfo
, callExitSiteData
));
197 case GetByIdAccess::SimpleMiss
:
198 case GetByIdAccess::CustomGetter
:
199 case GetByIdAccess::WatchedStub
:{
200 // FIXME: It would be totally sweet to support this at some point in the future.
201 // https://bugs.webkit.org/show_bug.cgi?id=133052
202 return GetByIdStatus(slowPathState
, true);
205 RELEASE_ASSERT_NOT_REACHED();
208 GetByIdVariant
variant(
209 StructureSet(structure
), complexGetStatus
.offset(), complexGetStatus
.chain(),
210 WTF::move(callLinkStatus
));
212 if (!result
.appendVariant(variant
))
213 return GetByIdStatus(slowPathState
, true);
222 return GetByIdStatus(slowPathState
, true);
225 RELEASE_ASSERT_NOT_REACHED();
226 return GetByIdStatus();
228 #endif // ENABLE(JIT)
230 GetByIdStatus
GetByIdStatus::computeFor(
231 CodeBlock
* profiledBlock
, CodeBlock
* dfgBlock
, StubInfoMap
& baselineMap
,
232 StubInfoMap
& dfgMap
, CodeOrigin codeOrigin
, UniquedStringImpl
* uid
)
236 CallLinkStatus::ExitSiteData exitSiteData
;
238 ConcurrentJITLocker
locker(profiledBlock
->m_lock
);
239 exitSiteData
= CallLinkStatus::computeExitSiteData(
240 locker
, profiledBlock
, codeOrigin
.bytecodeIndex
);
243 GetByIdStatus result
;
245 ConcurrentJITLocker
locker(dfgBlock
->m_lock
);
246 result
= computeForStubInfo(
247 locker
, dfgBlock
, dfgMap
.get(codeOrigin
), uid
, exitSiteData
);
250 if (result
.takesSlowPath())
254 ConcurrentJITLocker
locker(profiledBlock
->m_lock
);
255 if (hasExitSite(locker
, profiledBlock
, codeOrigin
.bytecodeIndex
))
256 return GetByIdStatus(TakesSlowPath
, true);
263 UNUSED_PARAM(dfgBlock
);
264 UNUSED_PARAM(dfgMap
);
267 return computeFor(profiledBlock
, baselineMap
, codeOrigin
.bytecodeIndex
, uid
);
270 GetByIdStatus
GetByIdStatus::computeFor(const StructureSet
& set
, UniquedStringImpl
* uid
)
272 // For now we only handle the super simple self access case. We could handle the
273 // prototype case in the future.
276 return GetByIdStatus();
278 if (parseIndex(*uid
))
279 return GetByIdStatus(TakesSlowPath
);
281 GetByIdStatus result
;
282 result
.m_state
= Simple
;
283 result
.m_wasSeenInJIT
= false;
284 for (unsigned i
= 0; i
< set
.size(); ++i
) {
285 Structure
* structure
= set
[i
];
286 if (structure
->typeInfo().overridesGetOwnPropertySlot() && structure
->typeInfo().type() != GlobalObjectType
)
287 return GetByIdStatus(TakesSlowPath
);
289 if (!structure
->propertyAccessesAreCacheable())
290 return GetByIdStatus(TakesSlowPath
);
293 PropertyOffset offset
= structure
->getConcurrently(uid
, attributes
);
294 if (!isValidOffset(offset
))
295 return GetByIdStatus(TakesSlowPath
); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
296 if (attributes
& Accessor
)
297 return GetByIdStatus(MakesCalls
); // We could be smarter here, like strenght-reducing this to a Call.
299 if (!result
.appendVariant(GetByIdVariant(structure
, offset
)))
300 return GetByIdStatus(TakesSlowPath
);
306 bool GetByIdStatus::makesCalls() const
313 for (unsigned i
= m_variants
.size(); i
--;) {
314 if (m_variants
[i
].callLinkStatus())
321 RELEASE_ASSERT_NOT_REACHED();
326 void GetByIdStatus::dump(PrintStream
& out
) const
331 out
.print("NoInformation");
337 out
.print("TakesSlowPath");
340 out
.print("MakesCalls");
343 out
.print(", ", listDump(m_variants
), ", seenInJIT = ", m_wasSeenInJIT
, ")");