]> git.saurik.com Git - apple/javascriptcore.git/blob - bytecode/GetByIdStatus.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / bytecode / GetByIdStatus.cpp
1 /*
2 * Copyright (C) 2012 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "CodeBlock.h"
30 #include "JSScope.h"
31 #include "LLIntData.h"
32 #include "LowLevelInterpreter.h"
33 #include "Operations.h"
34
35 namespace JSC {
36
37 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
38 {
39 UNUSED_PARAM(profiledBlock);
40 UNUSED_PARAM(bytecodeIndex);
41 UNUSED_PARAM(ident);
42 #if ENABLE(LLINT)
43 Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
44
45 if (instruction[0].u.opcode == LLInt::getOpcode(llint_op_get_array_length))
46 return GetByIdStatus(NoInformation, false);
47
48 Structure* structure = instruction[4].u.structure.get();
49 if (!structure)
50 return GetByIdStatus(NoInformation, false);
51
52 unsigned attributesIgnored;
53 JSCell* specificValue;
54 PropertyOffset offset = structure->get(
55 *profiledBlock->vm(), ident, attributesIgnored, specificValue);
56 if (structure->isDictionary())
57 specificValue = 0;
58 if (!isValidOffset(offset))
59 return GetByIdStatus(NoInformation, false);
60
61 return GetByIdStatus(Simple, false, StructureSet(structure), offset, specificValue);
62 #else
63 return GetByIdStatus(NoInformation, false);
64 #endif
65 }
66
67 void GetByIdStatus::computeForChain(GetByIdStatus& result, CodeBlock* profiledBlock, Identifier& ident, Structure* structure)
68 {
69 #if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
70 // Validate the chain. If the chain is invalid, then currently the best thing
71 // we can do is to assume that TakesSlow is true. In the future, it might be
72 // worth exploring reifying the structure chain from the structure we've got
73 // instead of using the one from the cache, since that will do the right things
74 // if the structure chain has changed. But that may be harder, because we may
75 // then end up having a different type of access altogether. And it currently
76 // does not appear to be worth it to do so -- effectively, the heuristic we
77 // have now is that if the structure chain has changed between when it was
78 // cached on in the baseline JIT and when the DFG tried to inline the access,
79 // then we fall back on a polymorphic access.
80 Structure* currentStructure = structure;
81 JSObject* currentObject = 0;
82 for (unsigned i = 0; i < result.m_chain.size(); ++i) {
83 ASSERT(!currentStructure->isDictionary());
84 currentObject = asObject(currentStructure->prototypeForLookup(profiledBlock));
85 currentStructure = result.m_chain[i];
86 if (currentObject->structure() != currentStructure)
87 return;
88 }
89
90 ASSERT(currentObject);
91
92 unsigned attributesIgnored;
93 JSCell* specificValue;
94
95 result.m_offset = currentStructure->get(
96 *profiledBlock->vm(), ident, attributesIgnored, specificValue);
97 if (currentStructure->isDictionary())
98 specificValue = 0;
99 if (!isValidOffset(result.m_offset))
100 return;
101
102 result.m_structureSet.add(structure);
103 result.m_specificValue = JSValue(specificValue);
104 #else
105 UNUSED_PARAM(result);
106 UNUSED_PARAM(profiledBlock);
107 UNUSED_PARAM(ident);
108 UNUSED_PARAM(structure);
109 UNREACHABLE_FOR_PLATFORM();
110 #endif
111 }
112
113 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
114 {
115 UNUSED_PARAM(profiledBlock);
116 UNUSED_PARAM(bytecodeIndex);
117 UNUSED_PARAM(ident);
118 #if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
119 if (!profiledBlock->numberOfStructureStubInfos())
120 return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
121
122 // First check if it makes either calls, in which case we want to be super careful, or
123 // if it's not set at all, in which case we punt.
124 StructureStubInfo& stubInfo = profiledBlock->getStubInfo(bytecodeIndex);
125 if (!stubInfo.seen)
126 return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
127
128 if (stubInfo.resetByGC)
129 return GetByIdStatus(TakesSlowPath, true);
130
131 PolymorphicAccessStructureList* list;
132 int listSize;
133 switch (stubInfo.accessType) {
134 case access_get_by_id_self_list:
135 list = stubInfo.u.getByIdSelfList.structureList;
136 listSize = stubInfo.u.getByIdSelfList.listSize;
137 break;
138 case access_get_by_id_proto_list:
139 list = stubInfo.u.getByIdProtoList.structureList;
140 listSize = stubInfo.u.getByIdProtoList.listSize;
141 break;
142 default:
143 list = 0;
144 listSize = 0;
145 break;
146 }
147 for (int i = 0; i < listSize; ++i) {
148 if (!list->list[i].isDirect)
149 return GetByIdStatus(MakesCalls, true);
150 }
151
152 // Next check if it takes slow case, in which case we want to be kind of careful.
153 if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex))
154 return GetByIdStatus(TakesSlowPath, true);
155
156 // Finally figure out if we can derive an access strategy.
157 GetByIdStatus result;
158 result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
159 switch (stubInfo.accessType) {
160 case access_unset:
161 return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
162
163 case access_get_by_id_self: {
164 Structure* structure = stubInfo.u.getByIdSelf.baseObjectStructure.get();
165 unsigned attributesIgnored;
166 JSCell* specificValue;
167 result.m_offset = structure->get(
168 *profiledBlock->vm(), ident, attributesIgnored, specificValue);
169 if (structure->isDictionary())
170 specificValue = 0;
171
172 if (isValidOffset(result.m_offset)) {
173 result.m_structureSet.add(structure);
174 result.m_specificValue = JSValue(specificValue);
175 }
176
177 if (isValidOffset(result.m_offset))
178 ASSERT(result.m_structureSet.size());
179 break;
180 }
181
182 case access_get_by_id_self_list: {
183 for (int i = 0; i < listSize; ++i) {
184 ASSERT(list->list[i].isDirect);
185
186 Structure* structure = list->list[i].base.get();
187 if (result.m_structureSet.contains(structure))
188 continue;
189
190 unsigned attributesIgnored;
191 JSCell* specificValue;
192 PropertyOffset myOffset = structure->get(
193 *profiledBlock->vm(), ident, attributesIgnored, specificValue);
194 if (structure->isDictionary())
195 specificValue = 0;
196
197 if (!isValidOffset(myOffset)) {
198 result.m_offset = invalidOffset;
199 break;
200 }
201
202 if (!i) {
203 result.m_offset = myOffset;
204 result.m_specificValue = JSValue(specificValue);
205 } else if (result.m_offset != myOffset) {
206 result.m_offset = invalidOffset;
207 break;
208 } else if (result.m_specificValue != JSValue(specificValue))
209 result.m_specificValue = JSValue();
210
211 result.m_structureSet.add(structure);
212 }
213
214 if (isValidOffset(result.m_offset))
215 ASSERT(result.m_structureSet.size());
216 break;
217 }
218
219 case access_get_by_id_proto: {
220 if (!stubInfo.u.getByIdProto.isDirect)
221 return GetByIdStatus(MakesCalls, true);
222 result.m_chain.append(stubInfo.u.getByIdProto.prototypeStructure.get());
223 computeForChain(
224 result, profiledBlock, ident,
225 stubInfo.u.getByIdProto.baseObjectStructure.get());
226 break;
227 }
228
229 case access_get_by_id_chain: {
230 if (!stubInfo.u.getByIdChain.isDirect)
231 return GetByIdStatus(MakesCalls, true);
232 for (unsigned i = 0; i < stubInfo.u.getByIdChain.count; ++i)
233 result.m_chain.append(stubInfo.u.getByIdChain.chain->head()[i].get());
234 computeForChain(
235 result, profiledBlock, ident,
236 stubInfo.u.getByIdChain.baseObjectStructure.get());
237 break;
238 }
239
240 default:
241 ASSERT(!isValidOffset(result.m_offset));
242 break;
243 }
244
245 if (!isValidOffset(result.m_offset)) {
246 result.m_state = TakesSlowPath;
247 result.m_structureSet.clear();
248 result.m_chain.clear();
249 result.m_specificValue = JSValue();
250 } else
251 result.m_state = Simple;
252
253 return result;
254 #else // ENABLE(JIT)
255 return GetByIdStatus(NoInformation, false);
256 #endif // ENABLE(JIT)
257 }
258
259 GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, Identifier& ident)
260 {
261 // For now we only handle the super simple self access case. We could handle the
262 // prototype case in the future.
263
264 if (PropertyName(ident).asIndex() != PropertyName::NotAnIndex)
265 return GetByIdStatus(TakesSlowPath);
266
267 if (structure->typeInfo().overridesGetOwnPropertySlot())
268 return GetByIdStatus(TakesSlowPath);
269
270 if (!structure->propertyAccessesAreCacheable())
271 return GetByIdStatus(TakesSlowPath);
272
273 GetByIdStatus result;
274 result.m_wasSeenInJIT = false; // To my knowledge nobody that uses computeFor(VM&, Structure*, Identifier&) reads this field, but I might as well be honest: no, it wasn't seen in the JIT, since I computed it statically.
275 unsigned attributes;
276 JSCell* specificValue;
277 result.m_offset = structure->get(vm, ident, attributes, specificValue);
278 if (!isValidOffset(result.m_offset))
279 return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
280 if (attributes & Accessor)
281 return GetByIdStatus(MakesCalls);
282 if (structure->isDictionary())
283 specificValue = 0;
284 result.m_structureSet.add(structure);
285 result.m_specificValue = JSValue(specificValue);
286 return result;
287 }
288
289 } // namespace JSC
290