2 * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "GetByIdStatus.h"
29 #include "CodeBlock.h"
30 #include "JSCInlines.h"
32 #include "LLIntData.h"
33 #include "LowLevelInterpreter.h"
34 #include "PolymorphicGetByIdList.h"
35 #include <wtf/ListDump.h>
39 bool GetByIdStatus::appendVariant(const GetByIdVariant
& variant
)
41 for (unsigned i
= 0; i
< m_variants
.size(); ++i
) {
42 if (m_variants
[i
].structureSet().overlaps(variant
.structureSet()))
45 m_variants
.append(variant
);
50 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker
& locker
, CodeBlock
* profiledBlock
, unsigned bytecodeIndex
, ExitingJITType jitType
)
52 return profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadCache
, jitType
))
53 || profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadCacheWatchpoint
, jitType
))
54 || profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadWeakConstantCache
, jitType
))
55 || profiledBlock
->hasExitSite(locker
, DFG::FrequentExitSite(bytecodeIndex
, BadWeakConstantCacheWatchpoint
, jitType
));
59 GetByIdStatus
GetByIdStatus::computeFromLLInt(CodeBlock
* profiledBlock
, unsigned bytecodeIndex
, StringImpl
* uid
)
61 UNUSED_PARAM(profiledBlock
);
62 UNUSED_PARAM(bytecodeIndex
);
64 Instruction
* instruction
= profiledBlock
->instructions().begin() + bytecodeIndex
;
66 if (instruction
[0].u
.opcode
== LLInt::getOpcode(op_get_array_length
))
67 return GetByIdStatus(NoInformation
, false);
69 Structure
* structure
= instruction
[4].u
.structure
.get();
71 return GetByIdStatus(NoInformation
, false);
73 if (structure
->takesSlowPathInDFGForImpureProperty())
74 return GetByIdStatus(NoInformation
, false);
76 unsigned attributesIgnored
;
77 JSCell
* specificValue
;
78 PropertyOffset offset
= structure
->getConcurrently(
79 *profiledBlock
->vm(), uid
, attributesIgnored
, specificValue
);
80 if (structure
->isDictionary())
82 if (!isValidOffset(offset
))
83 return GetByIdStatus(NoInformation
, false);
85 return GetByIdStatus(Simple
, false, GetByIdVariant(StructureSet(structure
), offset
, specificValue
));
88 bool GetByIdStatus::computeForChain(CodeBlock
* profiledBlock
, StringImpl
* uid
, PassRefPtr
<IntendedStructureChain
> passedChain
)
91 RefPtr
<IntendedStructureChain
> chain
= passedChain
;
93 // Validate the chain. If the chain is invalid, then currently the best thing
94 // we can do is to assume that TakesSlow is true. In the future, it might be
95 // worth exploring reifying the structure chain from the structure we've got
96 // instead of using the one from the cache, since that will do the right things
97 // if the structure chain has changed. But that may be harder, because we may
98 // then end up having a different type of access altogether. And it currently
99 // does not appear to be worth it to do so -- effectively, the heuristic we
100 // have now is that if the structure chain has changed between when it was
101 // cached on in the baseline JIT and when the DFG tried to inline the access,
102 // then we fall back on a polymorphic access.
103 if (!chain
->isStillValid())
106 if (chain
->head()->takesSlowPathInDFGForImpureProperty())
108 size_t chainSize
= chain
->size();
109 for (size_t i
= 0; i
< chainSize
; i
++) {
110 if (chain
->at(i
)->takesSlowPathInDFGForImpureProperty())
114 JSObject
* currentObject
= chain
->terminalPrototype();
115 Structure
* currentStructure
= chain
->last();
117 ASSERT_UNUSED(currentObject
, currentObject
);
119 unsigned attributesIgnored
;
120 JSCell
* specificValue
;
122 PropertyOffset offset
= currentStructure
->getConcurrently(
123 *profiledBlock
->vm(), uid
, attributesIgnored
, specificValue
);
124 if (currentStructure
->isDictionary())
126 if (!isValidOffset(offset
))
129 return appendVariant(GetByIdVariant(StructureSet(chain
->head()), offset
, specificValue
, chain
));
131 UNUSED_PARAM(profiledBlock
);
133 UNUSED_PARAM(passedChain
);
134 UNREACHABLE_FOR_PLATFORM();
136 #endif // ENABLE(JIT)
139 GetByIdStatus
GetByIdStatus::computeFor(CodeBlock
* profiledBlock
, StubInfoMap
& map
, unsigned bytecodeIndex
, StringImpl
* uid
)
141 ConcurrentJITLocker
locker(profiledBlock
->m_lock
);
143 GetByIdStatus result
;
146 result
= computeForStubInfo(
147 locker
, profiledBlock
, map
.get(CodeOrigin(bytecodeIndex
)), uid
);
149 if (!result
.takesSlowPath()
150 && (hasExitSite(locker
, profiledBlock
, bytecodeIndex
)
151 || profiledBlock
->likelyToTakeSlowCase(bytecodeIndex
)))
152 return GetByIdStatus(TakesSlowPath
, true);
158 return computeFromLLInt(profiledBlock
, bytecodeIndex
, uid
);
164 GetByIdStatus
GetByIdStatus::computeForStubInfo(
165 const ConcurrentJITLocker
&, CodeBlock
* profiledBlock
, StructureStubInfo
* stubInfo
,
168 if (!stubInfo
|| !stubInfo
->seen
)
169 return GetByIdStatus(NoInformation
);
171 if (stubInfo
->resetByGC
)
172 return GetByIdStatus(TakesSlowPath
, true);
174 PolymorphicGetByIdList
* list
= 0;
175 if (stubInfo
->accessType
== access_get_by_id_list
) {
176 list
= stubInfo
->u
.getByIdList
.list
;
177 bool makesCalls
= false;
178 bool isWatched
= false;
179 for (unsigned i
= 0; i
< list
->size(); ++i
) {
180 const GetByIdAccess
& access
= list
->at(i
);
181 if (access
.doesCalls()) {
185 if (access
.isWatched()) {
191 return GetByIdStatus(MakesCalls
, true);
193 return GetByIdStatus(TakesSlowPath
, true);
196 // Finally figure out if we can derive an access strategy.
197 GetByIdStatus result
;
198 result
.m_state
= Simple
;
199 result
.m_wasSeenInJIT
= true; // This is interesting for bytecode dumping only.
200 switch (stubInfo
->accessType
) {
202 return GetByIdStatus(NoInformation
);
204 case access_get_by_id_self
: {
205 Structure
* structure
= stubInfo
->u
.getByIdSelf
.baseObjectStructure
.get();
206 if (structure
->takesSlowPathInDFGForImpureProperty())
207 return GetByIdStatus(TakesSlowPath
, true);
208 unsigned attributesIgnored
;
209 JSCell
* specificValue
;
210 GetByIdVariant variant
;
211 variant
.m_offset
= structure
->getConcurrently(
212 *profiledBlock
->vm(), uid
, attributesIgnored
, specificValue
);
213 if (!isValidOffset(variant
.m_offset
))
214 return GetByIdStatus(TakesSlowPath
, true);
216 if (structure
->isDictionary())
219 variant
.m_structureSet
.add(structure
);
220 variant
.m_specificValue
= JSValue(specificValue
);
221 result
.appendVariant(variant
);
225 case access_get_by_id_list
: {
226 for (unsigned listIndex
= 0; listIndex
< list
->size(); ++listIndex
) {
227 ASSERT(list
->at(listIndex
).isSimple());
229 Structure
* structure
= list
->at(listIndex
).structure();
231 // FIXME: We should assert that we never see a structure that
232 // hasImpureGetOwnPropertySlot() but for which we don't
233 // newImpurePropertyFiresWatchpoints(). We're not at a point where we can do
235 // https://bugs.webkit.org/show_bug.cgi?id=131810
237 if (structure
->takesSlowPathInDFGForImpureProperty())
238 return GetByIdStatus(TakesSlowPath
, true);
240 if (list
->at(listIndex
).chain()) {
241 RefPtr
<IntendedStructureChain
> chain
= adoptRef(new IntendedStructureChain(
242 profiledBlock
, structure
, list
->at(listIndex
).chain(),
243 list
->at(listIndex
).chainCount()));
244 if (!result
.computeForChain(profiledBlock
, uid
, chain
))
245 return GetByIdStatus(TakesSlowPath
, true);
249 unsigned attributesIgnored
;
250 JSCell
* specificValue
;
251 PropertyOffset myOffset
= structure
->getConcurrently(
252 *profiledBlock
->vm(), uid
, attributesIgnored
, specificValue
);
253 if (structure
->isDictionary())
256 if (!isValidOffset(myOffset
))
257 return GetByIdStatus(TakesSlowPath
, true);
260 for (unsigned variantIndex
= 0; variantIndex
< result
.m_variants
.size(); ++variantIndex
) {
261 GetByIdVariant
& variant
= result
.m_variants
[variantIndex
];
265 if (variant
.m_offset
!= myOffset
)
269 if (variant
.m_structureSet
.contains(structure
))
272 if (variant
.m_specificValue
!= JSValue(specificValue
))
273 variant
.m_specificValue
= JSValue();
275 variant
.m_structureSet
.add(structure
);
282 if (!result
.appendVariant(GetByIdVariant(StructureSet(structure
), myOffset
, specificValue
)))
283 return GetByIdStatus(TakesSlowPath
, true);
289 case access_get_by_id_chain
: {
290 if (!stubInfo
->u
.getByIdChain
.isDirect
)
291 return GetByIdStatus(MakesCalls
, true);
292 RefPtr
<IntendedStructureChain
> chain
= adoptRef(new IntendedStructureChain(
294 stubInfo
->u
.getByIdChain
.baseObjectStructure
.get(),
295 stubInfo
->u
.getByIdChain
.chain
.get(),
296 stubInfo
->u
.getByIdChain
.count
));
297 if (result
.computeForChain(profiledBlock
, uid
, chain
))
299 return GetByIdStatus(TakesSlowPath
, true);
303 return GetByIdStatus(TakesSlowPath
, true);
306 RELEASE_ASSERT_NOT_REACHED();
307 return GetByIdStatus();
309 #endif // ENABLE(JIT)
311 GetByIdStatus
GetByIdStatus::computeFor(
312 CodeBlock
* profiledBlock
, CodeBlock
* dfgBlock
, StubInfoMap
& baselineMap
,
313 StubInfoMap
& dfgMap
, CodeOrigin codeOrigin
, StringImpl
* uid
)
317 GetByIdStatus result
;
319 ConcurrentJITLocker
locker(dfgBlock
->m_lock
);
320 result
= computeForStubInfo(locker
, dfgBlock
, dfgMap
.get(codeOrigin
), uid
);
323 if (result
.takesSlowPath())
327 ConcurrentJITLocker
locker(profiledBlock
->m_lock
);
328 if (hasExitSite(locker
, profiledBlock
, codeOrigin
.bytecodeIndex
, ExitFromFTL
))
329 return GetByIdStatus(TakesSlowPath
, true);
336 UNUSED_PARAM(dfgBlock
);
337 UNUSED_PARAM(dfgMap
);
340 return computeFor(profiledBlock
, baselineMap
, codeOrigin
.bytecodeIndex
, uid
);
343 GetByIdStatus
GetByIdStatus::computeFor(VM
& vm
, Structure
* structure
, StringImpl
* uid
)
345 // For now we only handle the super simple self access case. We could handle the
346 // prototype case in the future.
349 return GetByIdStatus(TakesSlowPath
);
351 if (toUInt32FromStringImpl(uid
) != PropertyName::NotAnIndex
)
352 return GetByIdStatus(TakesSlowPath
);
354 if (structure
->typeInfo().overridesGetOwnPropertySlot() && structure
->typeInfo().type() != GlobalObjectType
)
355 return GetByIdStatus(TakesSlowPath
);
357 if (!structure
->propertyAccessesAreCacheable())
358 return GetByIdStatus(TakesSlowPath
);
361 JSCell
* specificValue
;
362 PropertyOffset offset
= structure
->getConcurrently(vm
, uid
, attributes
, specificValue
);
363 if (!isValidOffset(offset
))
364 return GetByIdStatus(TakesSlowPath
); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
365 if (attributes
& Accessor
)
366 return GetByIdStatus(MakesCalls
);
367 if (structure
->isDictionary())
369 return GetByIdStatus(
370 Simple
, false, GetByIdVariant(StructureSet(structure
), offset
, specificValue
));
373 void GetByIdStatus::dump(PrintStream
& out
) const
378 out
.print("NoInformation");
384 out
.print("TakesSlowPath");
387 out
.print("MakesCalls");
390 out
.print(", ", listDump(m_variants
), ", seenInJIT = ", m_wasSeenInJIT
, ")");