]>
Commit | Line | Data |
---|---|---|
9dae56ea | 1 | /* |
ba379fdc | 2 | * Copyright (C) 2008, 2009 Apple Inc. All rights reserved. |
9dae56ea A |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
ba379fdc | 27 | |
4e4e5a6f | 28 | #if ENABLE(JIT) |
14957cd0 | 29 | #include "JIT.h" |
ba379fdc | 30 | |
4e4e5a6f A |
31 | #include "CodeBlock.h" |
32 | #include "GetterSetter.h" | |
33 | #include "JITInlineMethods.h" | |
34 | #include "JITStubCall.h" | |
35 | #include "JSArray.h" | |
36 | #include "JSFunction.h" | |
37 | #include "JSPropertyNameIterator.h" | |
38 | #include "Interpreter.h" | |
39 | #include "LinkBuffer.h" | |
40 | #include "RepatchBuffer.h" | |
41 | #include "ResultType.h" | |
42 | #include "SamplingTool.h" | |
ba379fdc | 43 | |
4e4e5a6f A |
44 | #ifndef NDEBUG |
45 | #include <stdio.h> | |
ba379fdc A |
46 | #endif |
47 | ||
4e4e5a6f | 48 | using namespace std; |
ba379fdc | 49 | |
4e4e5a6f | 50 | namespace JSC { |
14957cd0 | 51 | #if USE(JSVALUE64) |
ba379fdc | 52 | |
6fe7ccc8 | 53 | JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData) |
9dae56ea | 54 | { |
4e4e5a6f A |
55 | JSInterfaceJIT jit; |
56 | JumpList failures; | |
6fe7ccc8 | 57 | failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info))); |
ba379fdc | 58 | |
14957cd0 | 59 | // Load string length to regT2, and start the process of loading the data pointer into regT0 |
4e4e5a6f A |
60 | jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2); |
61 | jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0); | |
6fe7ccc8 A |
62 | failures.append(jit.branchTest32(Zero, regT0)); |
63 | ||
4e4e5a6f A |
64 | // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large |
65 | failures.append(jit.branch32(AboveOrEqual, regT1, regT2)); | |
ba379fdc | 66 | |
4e4e5a6f | 67 | // Load the character |
6fe7ccc8 A |
68 | JumpList is16Bit; |
69 | JumpList cont8Bit; | |
70 | // Load the string flags | |
71 | jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT2); | |
72 | jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0); | |
73 | is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(ThunkHelpers::stringImpl8BitFlag()))); | |
74 | jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0); | |
75 | cont8Bit.append(jit.jump()); | |
76 | is16Bit.link(&jit); | |
4e4e5a6f | 77 | jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0); |
6fe7ccc8 A |
78 | cont8Bit.link(&jit); |
79 | ||
14957cd0 A |
80 | failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100))); |
81 | jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1); | |
4e4e5a6f A |
82 | jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0); |
83 | jit.ret(); | |
f9bf01c6 | 84 | |
4e4e5a6f | 85 | failures.link(&jit); |
14957cd0 | 86 | jit.move(TrustedImm32(0), regT0); |
4e4e5a6f | 87 | jit.ret(); |
f9bf01c6 | 88 | |
6fe7ccc8 A |
89 | LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID); |
90 | return patchBuffer.finalizeCode(); | |
f9bf01c6 A |
91 | } |
92 | ||
ba379fdc A |
93 | void JIT::emit_op_get_by_val(Instruction* currentInstruction) |
94 | { | |
f9bf01c6 A |
95 | unsigned dst = currentInstruction[1].u.operand; |
96 | unsigned base = currentInstruction[2].u.operand; | |
97 | unsigned property = currentInstruction[3].u.operand; | |
98 | ||
99 | emitGetVirtualRegisters(base, regT0, property, regT1); | |
ba379fdc | 100 | emitJumpSlowCaseIfNotImmediateInteger(regT1); |
14957cd0 | 101 | |
ba379fdc | 102 | // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter. |
f9bf01c6 A |
103 | // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if |
104 | // number was signed since m_vectorLength is always less than intmax (since the total allocation | |
ba379fdc A |
105 | // size is always less than 4Gb). As such zero extending wil have been correct (and extending the value |
106 | // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign | |
107 | // extending since it makes it easier to re-tag the value in the slow case. | |
108 | zeroExtend32ToPtr(regT1, regT1); | |
14957cd0 | 109 | |
f9bf01c6 | 110 | emitJumpSlowCaseIfNotJSCell(regT0, base); |
6fe7ccc8 | 111 | addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info))); |
ba379fdc | 112 | |
14957cd0 A |
113 | loadPtr(Address(regT0, JSArray::storageOffset()), regT2); |
114 | addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset()))); | |
ba379fdc | 115 | |
ba379fdc | 116 | loadPtr(BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); |
f9bf01c6 A |
117 | addSlowCase(branchTestPtr(Zero, regT0)); |
118 | ||
6fe7ccc8 | 119 | emitValueProfilingSite(); |
f9bf01c6 A |
120 | emitPutVirtualRegister(dst); |
121 | } | |
122 | ||
4e4e5a6f A |
123 | void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) |
124 | { | |
125 | unsigned dst = currentInstruction[1].u.operand; | |
126 | unsigned base = currentInstruction[2].u.operand; | |
127 | unsigned property = currentInstruction[3].u.operand; | |
128 | ||
129 | linkSlowCase(iter); // property int32 check | |
130 | linkSlowCaseIfNotJSCell(iter, base); // base cell check | |
131 | Jump nonCell = jump(); | |
132 | linkSlowCase(iter); // base array check | |
6fe7ccc8 A |
133 | Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)); |
134 | emitNakedCall(CodeLocationLabel(m_globalData->getCTIStub(stringGetByValStubGenerator).code())); | |
4e4e5a6f A |
135 | Jump failed = branchTestPtr(Zero, regT0); |
136 | emitPutVirtualRegister(dst, regT0); | |
137 | emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val)); | |
138 | failed.link(this); | |
139 | notString.link(this); | |
140 | nonCell.link(this); | |
141 | ||
142 | linkSlowCase(iter); // vector length check | |
143 | linkSlowCase(iter); // empty value | |
144 | ||
145 | JITStubCall stubCall(this, cti_op_get_by_val); | |
146 | stubCall.addArgument(base, regT2); | |
147 | stubCall.addArgument(property, regT2); | |
148 | stubCall.call(dst); | |
6fe7ccc8 A |
149 | |
150 | emitValueProfilingSite(); | |
4e4e5a6f A |
151 | } |
152 | ||
14957cd0 | 153 | void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch) |
f9bf01c6 | 154 | { |
6fe7ccc8 | 155 | loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), scratch); |
f9bf01c6 | 156 | loadPtr(BaseIndex(scratch, offset, ScalePtr, 0), result); |
f9bf01c6 A |
157 | } |
158 | ||
159 | void JIT::emit_op_get_by_pname(Instruction* currentInstruction) | |
160 | { | |
161 | unsigned dst = currentInstruction[1].u.operand; | |
162 | unsigned base = currentInstruction[2].u.operand; | |
163 | unsigned property = currentInstruction[3].u.operand; | |
164 | unsigned expected = currentInstruction[4].u.operand; | |
165 | unsigned iter = currentInstruction[5].u.operand; | |
166 | unsigned i = currentInstruction[6].u.operand; | |
167 | ||
168 | emitGetVirtualRegister(property, regT0); | |
169 | addSlowCase(branchPtr(NotEqual, regT0, addressFor(expected))); | |
170 | emitGetVirtualRegisters(base, regT0, iter, regT1); | |
171 | emitJumpSlowCaseIfNotJSCell(regT0, base); | |
172 | ||
173 | // Test base's structure | |
14957cd0 | 174 | loadPtr(Address(regT0, JSCell::structureOffset()), regT2); |
f9bf01c6 A |
175 | addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))); |
176 | load32(addressFor(i), regT3); | |
14957cd0 | 177 | sub32(TrustedImm32(1), regT3); |
f9bf01c6 | 178 | addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots)))); |
14957cd0 | 179 | compileGetDirectOffset(regT0, regT0, regT3, regT1); |
f9bf01c6 A |
180 | |
181 | emitPutVirtualRegister(dst, regT0); | |
182 | } | |
183 | ||
184 | void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
185 | { | |
186 | unsigned dst = currentInstruction[1].u.operand; | |
187 | unsigned base = currentInstruction[2].u.operand; | |
188 | unsigned property = currentInstruction[3].u.operand; | |
189 | ||
190 | linkSlowCase(iter); | |
191 | linkSlowCaseIfNotJSCell(iter, base); | |
192 | linkSlowCase(iter); | |
193 | linkSlowCase(iter); | |
194 | ||
195 | JITStubCall stubCall(this, cti_op_get_by_val); | |
196 | stubCall.addArgument(base, regT2); | |
197 | stubCall.addArgument(property, regT2); | |
198 | stubCall.call(dst); | |
ba379fdc A |
199 | } |
200 | ||
201 | void JIT::emit_op_put_by_val(Instruction* currentInstruction) | |
202 | { | |
f9bf01c6 A |
203 | unsigned base = currentInstruction[1].u.operand; |
204 | unsigned property = currentInstruction[2].u.operand; | |
205 | unsigned value = currentInstruction[3].u.operand; | |
206 | ||
207 | emitGetVirtualRegisters(base, regT0, property, regT1); | |
ba379fdc | 208 | emitJumpSlowCaseIfNotImmediateInteger(regT1); |
ba379fdc A |
209 | // See comment in op_get_by_val. |
210 | zeroExtend32ToPtr(regT1, regT1); | |
f9bf01c6 | 211 | emitJumpSlowCaseIfNotJSCell(regT0, base); |
6fe7ccc8 | 212 | addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info))); |
14957cd0 | 213 | addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset()))); |
ba379fdc | 214 | |
14957cd0 | 215 | loadPtr(Address(regT0, JSArray::storageOffset()), regT2); |
f9bf01c6 | 216 | Jump empty = branchTestPtr(Zero, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); |
ba379fdc | 217 | |
f9bf01c6 | 218 | Label storeResult(this); |
6fe7ccc8 A |
219 | emitGetVirtualRegister(value, regT3); |
220 | storePtr(regT3, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); | |
f9bf01c6 A |
221 | Jump end = jump(); |
222 | ||
223 | empty.link(this); | |
14957cd0 | 224 | add32(TrustedImm32(1), Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector))); |
f9bf01c6 A |
225 | branch32(Below, regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this); |
226 | ||
6fe7ccc8 A |
227 | add32(TrustedImm32(1), regT1); |
228 | store32(regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length))); | |
229 | sub32(TrustedImm32(1), regT1); | |
f9bf01c6 A |
230 | jump().linkTo(storeResult, this); |
231 | ||
232 | end.link(this); | |
6fe7ccc8 A |
233 | |
234 | emitWriteBarrier(regT0, regT3, regT1, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess); | |
ba379fdc A |
235 | } |
236 | ||
6fe7ccc8 | 237 | void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) |
ba379fdc | 238 | { |
6fe7ccc8 A |
239 | unsigned base = currentInstruction[1].u.operand; |
240 | unsigned property = currentInstruction[2].u.operand; | |
241 | unsigned value = currentInstruction[3].u.operand; | |
242 | ||
243 | linkSlowCase(iter); // property int32 check | |
244 | linkSlowCaseIfNotJSCell(iter, base); // base cell check | |
245 | linkSlowCase(iter); // base not array check | |
246 | linkSlowCase(iter); // in vector check | |
247 | ||
248 | JITStubCall stubPutByValCall(this, cti_op_put_by_val); | |
249 | stubPutByValCall.addArgument(regT0); | |
250 | stubPutByValCall.addArgument(property, regT2); | |
251 | stubPutByValCall.addArgument(value, regT2); | |
252 | stubPutByValCall.call(); | |
ba379fdc A |
253 | } |
254 | ||
6fe7ccc8 | 255 | void JIT::emit_op_put_by_index(Instruction* currentInstruction) |
ba379fdc | 256 | { |
6fe7ccc8 | 257 | JITStubCall stubCall(this, cti_op_put_by_index); |
ba379fdc | 258 | stubCall.addArgument(currentInstruction[1].u.operand, regT2); |
6fe7ccc8 | 259 | stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand)); |
ba379fdc A |
260 | stubCall.addArgument(currentInstruction[3].u.operand, regT2); |
261 | stubCall.call(); | |
262 | } | |
263 | ||
6fe7ccc8 | 264 | void JIT::emit_op_put_getter_setter(Instruction* currentInstruction) |
ba379fdc | 265 | { |
6fe7ccc8 | 266 | JITStubCall stubCall(this, cti_op_put_getter_setter); |
ba379fdc | 267 | stubCall.addArgument(currentInstruction[1].u.operand, regT2); |
14957cd0 | 268 | stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand))); |
ba379fdc | 269 | stubCall.addArgument(currentInstruction[3].u.operand, regT2); |
6fe7ccc8 | 270 | stubCall.addArgument(currentInstruction[4].u.operand, regT2); |
ba379fdc A |
271 | stubCall.call(); |
272 | } | |
273 | ||
274 | void JIT::emit_op_del_by_id(Instruction* currentInstruction) | |
275 | { | |
276 | JITStubCall stubCall(this, cti_op_del_by_id); | |
277 | stubCall.addArgument(currentInstruction[2].u.operand, regT2); | |
14957cd0 | 278 | stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand))); |
ba379fdc | 279 | stubCall.call(currentInstruction[1].u.operand); |
9dae56ea A |
280 | } |
281 | ||
ba379fdc A |
282 | void JIT::emit_op_method_check(Instruction* currentInstruction) |
283 | { | |
284 | // Assert that the following instruction is a get_by_id. | |
285 | ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id); | |
286 | ||
287 | currentInstruction += OPCODE_LENGTH(op_method_check); | |
288 | unsigned resultVReg = currentInstruction[1].u.operand; | |
289 | unsigned baseVReg = currentInstruction[2].u.operand; | |
290 | Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand)); | |
291 | ||
292 | emitGetVirtualRegister(baseVReg, regT0); | |
293 | ||
294 | // Do the method check - check the object & its prototype's structure inline (this is the common case). | |
6fe7ccc8 | 295 | m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size())); |
ba379fdc | 296 | MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last(); |
f9bf01c6 | 297 | |
ba379fdc | 298 | Jump notCell = emitJumpIfNotJSCell(regT0); |
f9bf01c6 A |
299 | |
300 | BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck); | |
301 | ||
14957cd0 A |
302 | Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))); |
303 | DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT1); | |
304 | Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT1, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))); | |
ba379fdc A |
305 | |
306 | // This will be relinked to load the function without doing a load. | |
14957cd0 | 307 | DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0); |
f9bf01c6 A |
308 | |
309 | END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck); | |
310 | ||
ba379fdc A |
311 | Jump match = jump(); |
312 | ||
ba379fdc A |
313 | // Link the failure cases here. |
314 | notCell.link(this); | |
315 | structureCheck.link(this); | |
316 | protoStructureCheck.link(this); | |
317 | ||
318 | // Do a regular(ish) get_by_id (the slow case will be link to | |
319 | // cti_op_get_by_id_method_check instead of cti_op_get_by_id. | |
6fe7ccc8 | 320 | compileGetByIdHotPath(baseVReg, ident); |
ba379fdc A |
321 | |
322 | match.link(this); | |
6fe7ccc8 | 323 | emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check)); |
ba379fdc A |
324 | emitPutVirtualRegister(resultVReg); |
325 | ||
326 | // We've already generated the following get_by_id, so make sure it's skipped over. | |
14957cd0 | 327 | m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id); |
6fe7ccc8 A |
328 | |
329 | m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction); | |
ba379fdc | 330 | } |
9dae56ea | 331 | |
ba379fdc A |
332 | void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) |
333 | { | |
334 | currentInstruction += OPCODE_LENGTH(op_method_check); | |
335 | unsigned resultVReg = currentInstruction[1].u.operand; | |
336 | unsigned baseVReg = currentInstruction[2].u.operand; | |
337 | Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand)); | |
338 | ||
339 | compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true); | |
6fe7ccc8 | 340 | emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check)); |
ba379fdc A |
341 | |
342 | // We've already generated the following get_by_id, so make sure it's skipped over. | |
14957cd0 | 343 | m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id); |
ba379fdc A |
344 | } |
345 | ||
ba379fdc A |
346 | void JIT::emit_op_get_by_id(Instruction* currentInstruction) |
347 | { | |
348 | unsigned resultVReg = currentInstruction[1].u.operand; | |
349 | unsigned baseVReg = currentInstruction[2].u.operand; | |
350 | Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand)); | |
351 | ||
352 | emitGetVirtualRegister(baseVReg, regT0); | |
6fe7ccc8 A |
353 | compileGetByIdHotPath(baseVReg, ident); |
354 | emitValueProfilingSite(); | |
ba379fdc A |
355 | emitPutVirtualRegister(resultVReg); |
356 | } | |
357 | ||
6fe7ccc8 | 358 | void JIT::compileGetByIdHotPath(int baseVReg, Identifier*) |
9dae56ea A |
359 | { |
360 | // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched. | |
361 | // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump | |
362 | // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label | |
363 | // to jump back to if one of these trampolies finds a match. | |
364 | ||
ba379fdc | 365 | emitJumpSlowCaseIfNotJSCell(regT0, baseVReg); |
9dae56ea | 366 | |
f9bf01c6 A |
367 | BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath); |
368 | ||
9dae56ea | 369 | Label hotPathBegin(this); |
9dae56ea A |
370 | |
371 | DataLabelPtr structureToCompare; | |
6fe7ccc8 | 372 | PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))); |
9dae56ea | 373 | addSlowCase(structureCheck); |
9dae56ea | 374 | |
6fe7ccc8 | 375 | loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT0); |
14957cd0 | 376 | DataLabelCompact displacementLabel = loadPtrWithCompactAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0); |
9dae56ea A |
377 | |
378 | Label putResult(this); | |
f9bf01c6 A |
379 | |
380 | END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath); | |
381 | ||
6fe7ccc8 | 382 | m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, displacementLabel, putResult)); |
9dae56ea A |
383 | } |
384 | ||
ba379fdc A |
385 | void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) |
386 | { | |
387 | unsigned resultVReg = currentInstruction[1].u.operand; | |
388 | unsigned baseVReg = currentInstruction[2].u.operand; | |
389 | Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand)); | |
390 | ||
391 | compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false); | |
6fe7ccc8 | 392 | emitValueProfilingSite(); |
ba379fdc | 393 | } |
9dae56ea | 394 | |
ba379fdc | 395 | void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck) |
9dae56ea A |
396 | { |
397 | // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset | |
398 | // so that we only need track one pointer into the slow case code - we track a pointer to the location | |
399 | // of the call (which we can use to look up the patch information), but should a array-length or | |
400 | // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back | |
401 | // the distance from the call to the head of the slow case. | |
402 | ||
403 | linkSlowCaseIfNotJSCell(iter, baseVReg); | |
404 | linkSlowCase(iter); | |
405 | ||
f9bf01c6 A |
406 | BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase); |
407 | ||
9dae56ea | 408 | Label coldPathBegin(this); |
ba379fdc A |
409 | JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id); |
410 | stubCall.addArgument(regT0); | |
14957cd0 | 411 | stubCall.addArgument(TrustedImmPtr(ident)); |
ba379fdc | 412 | Call call = stubCall.call(resultVReg); |
9dae56ea | 413 | |
f9bf01c6 A |
414 | END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase); |
415 | ||
9dae56ea | 416 | // Track the location of the call; this will be used to recover patch information. |
6fe7ccc8 | 417 | m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call); |
9dae56ea A |
418 | } |
419 | ||
ba379fdc | 420 | void JIT::emit_op_put_by_id(Instruction* currentInstruction) |
9dae56ea | 421 | { |
ba379fdc A |
422 | unsigned baseVReg = currentInstruction[1].u.operand; |
423 | unsigned valueVReg = currentInstruction[3].u.operand; | |
424 | ||
9dae56ea A |
425 | // In order to be able to patch both the Structure, and the object offset, we store one pointer, |
426 | // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code | |
427 | // such that the Structure & offset are always at the same distance from this. | |
428 | ||
ba379fdc | 429 | emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1); |
9dae56ea A |
430 | |
431 | // Jump to a slow case if either the base object is an immediate, or if the Structure does not match. | |
ba379fdc | 432 | emitJumpSlowCaseIfNotJSCell(regT0, baseVReg); |
9dae56ea | 433 | |
f9bf01c6 A |
434 | BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById); |
435 | ||
9dae56ea | 436 | Label hotPathBegin(this); |
9dae56ea A |
437 | |
438 | // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over. | |
439 | DataLabelPtr structureToCompare; | |
14957cd0 | 440 | addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)))); |
9dae56ea | 441 | |
6fe7ccc8 A |
442 | loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2); |
443 | DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT2, patchPutByIdDefaultOffset)); | |
f9bf01c6 A |
444 | |
445 | END_UNINTERRUPTED_SEQUENCE(sequencePutById); | |
446 | ||
6fe7ccc8 A |
447 | emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess); |
448 | ||
449 | m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, displacementLabel)); | |
9dae56ea A |
450 | } |
451 | ||
ba379fdc | 452 | void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) |
9dae56ea | 453 | { |
ba379fdc A |
454 | unsigned baseVReg = currentInstruction[1].u.operand; |
455 | Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand)); | |
4e4e5a6f | 456 | unsigned direct = currentInstruction[8].u.operand; |
ba379fdc | 457 | |
9dae56ea A |
458 | linkSlowCaseIfNotJSCell(iter, baseVReg); |
459 | linkSlowCase(iter); | |
460 | ||
4e4e5a6f | 461 | JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id); |
ba379fdc | 462 | stubCall.addArgument(regT0); |
14957cd0 | 463 | stubCall.addArgument(TrustedImmPtr(ident)); |
ba379fdc A |
464 | stubCall.addArgument(regT1); |
465 | Call call = stubCall.call(); | |
9dae56ea A |
466 | |
467 | // Track the location of the call; this will be used to recover patch information. | |
6fe7ccc8 | 468 | m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call); |
9dae56ea A |
469 | } |
470 | ||
ba379fdc A |
471 | // Compile a store into an object's property storage. May overwrite the |
472 | // value in objectReg. | |
6fe7ccc8 | 473 | void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset) |
ba379fdc A |
474 | { |
475 | int offset = cachedOffset * sizeof(JSValue); | |
6fe7ccc8 | 476 | loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base); |
ba379fdc A |
477 | storePtr(value, Address(base, offset)); |
478 | } | |
479 | ||
480 | // Compile a load from an object's property storage. May overwrite base. | |
6fe7ccc8 | 481 | void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset) |
9dae56ea | 482 | { |
ba379fdc | 483 | int offset = cachedOffset * sizeof(JSValue); |
6fe7ccc8 A |
484 | loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), result); |
485 | loadPtr(Address(result, offset), result); | |
9dae56ea A |
486 | } |
487 | ||
14957cd0 | 488 | void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset) |
f9bf01c6 | 489 | { |
6fe7ccc8 A |
490 | loadPtr(base->addressOfPropertyStorage(), result); |
491 | loadPtr(Address(result, cachedOffset * sizeof(WriteBarrier<Unknown>)), result); | |
f9bf01c6 A |
492 | } |
493 | ||
4e4e5a6f | 494 | void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct) |
9dae56ea A |
495 | { |
496 | JumpList failureCases; | |
497 | // Check eax is an object of the right Structure. | |
ba379fdc | 498 | failureCases.append(emitJumpIfNotJSCell(regT0)); |
14957cd0 | 499 | failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure))); |
6fe7ccc8 | 500 | |
14957cd0 | 501 | testPrototype(oldStructure->storedPrototype(), failureCases); |
6fe7ccc8 A |
502 | |
503 | ASSERT(oldStructure->storedPrototype().isNull() || oldStructure->storedPrototype().asCell()->structure() == chain->head()->get()); | |
9dae56ea | 504 | |
9dae56ea | 505 | // ecx = baseObject->m_structure |
4e4e5a6f | 506 | if (!direct) { |
6fe7ccc8 A |
507 | for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) { |
508 | ASSERT((*it)->storedPrototype().isNull() || (*it)->storedPrototype().asCell()->structure() == it[1].get()); | |
14957cd0 | 509 | testPrototype((*it)->storedPrototype(), failureCases); |
6fe7ccc8 | 510 | } |
4e4e5a6f | 511 | } |
9dae56ea | 512 | |
6fe7ccc8 A |
513 | // If we succeed in all of our checks, and the code was optimizable, then make sure we |
514 | // decrement the rare case counter. | |
515 | #if ENABLE(VALUE_PROFILER) | |
516 | if (m_codeBlock->canCompileWithDFG()) { | |
517 | sub32( | |
518 | TrustedImm32(1), | |
519 | AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter)); | |
520 | } | |
521 | #endif | |
522 | ||
9dae56ea | 523 | // emit a call only if storage realloc is needed |
ba379fdc A |
524 | bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity(); |
525 | if (willNeedStorageRealloc) { | |
526 | // This trampoline was called to like a JIT stub; before we can can call again we need to | |
527 | // remove the return address from the stack, to prevent the stack from becoming misaligned. | |
528 | preserveReturnAddressAfterCall(regT3); | |
529 | ||
530 | JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc); | |
531 | stubCall.skipArgument(); // base | |
532 | stubCall.skipArgument(); // ident | |
533 | stubCall.skipArgument(); // value | |
14957cd0 | 534 | stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity())); |
6fe7ccc8 | 535 | stubCall.addArgument(TrustedImmPtr(newStructure)); |
ba379fdc | 536 | stubCall.call(regT0); |
f9bf01c6 | 537 | emitGetJITStubArg(2, regT1); |
ba379fdc A |
538 | |
539 | restoreReturnAddressBeforeReturn(regT3); | |
9dae56ea A |
540 | } |
541 | ||
6fe7ccc8 A |
542 | // Planting the new structure triggers the write barrier so we need |
543 | // an unconditional barrier here. | |
544 | emitWriteBarrier(regT0, regT1, regT2, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess); | |
9dae56ea | 545 | |
6fe7ccc8 A |
546 | ASSERT(newStructure->classInfo() == oldStructure->classInfo()); |
547 | storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset())); | |
548 | compilePutDirectOffset(regT0, regT1, cachedOffset); | |
9dae56ea A |
549 | |
550 | ret(); | |
551 | ||
ba379fdc A |
552 | ASSERT(!failureCases.empty()); |
553 | failureCases.link(this); | |
554 | restoreArgumentReferenceForTrampoline(); | |
555 | Call failureCall = tailRecursiveCall(); | |
9dae56ea | 556 | |
6fe7ccc8 | 557 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea | 558 | |
4e4e5a6f | 559 | patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail)); |
9dae56ea | 560 | |
ba379fdc A |
561 | if (willNeedStorageRealloc) { |
562 | ASSERT(m_calls.size() == 1); | |
563 | patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc)); | |
564 | } | |
9dae56ea | 565 | |
6fe7ccc8 | 566 | stubInfo->stubRoutine = patchBuffer.finalizeCode(); |
ba379fdc | 567 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 568 | repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code())); |
9dae56ea A |
569 | } |
570 | ||
ba379fdc | 571 | void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress) |
9dae56ea | 572 | { |
ba379fdc A |
573 | RepatchBuffer repatchBuffer(codeBlock); |
574 | ||
9dae56ea | 575 | // We don't want to patch more than once - in future go to cti_op_get_by_id_generic. |
ba379fdc A |
576 | // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now. |
577 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail)); | |
578 | ||
579 | int offset = sizeof(JSValue) * cachedOffset; | |
580 | ||
9dae56ea | 581 | // Patch the offset into the propoerty map to load from, then patch the Structure to look for. |
6fe7ccc8 A |
582 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure); |
583 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), offset); | |
9dae56ea A |
584 | } |
585 | ||
4e4e5a6f | 586 | void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct) |
9dae56ea | 587 | { |
ba379fdc A |
588 | RepatchBuffer repatchBuffer(codeBlock); |
589 | ||
9dae56ea | 590 | // We don't want to patch more than once - in future go to cti_op_put_by_id_generic. |
ba379fdc | 591 | // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now. |
4e4e5a6f | 592 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic)); |
ba379fdc A |
593 | |
594 | int offset = sizeof(JSValue) * cachedOffset; | |
595 | ||
9dae56ea | 596 | // Patch the offset into the propoerty map to load from, then patch the Structure to look for. |
6fe7ccc8 A |
597 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure); |
598 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel), offset); | |
9dae56ea A |
599 | } |
600 | ||
ba379fdc | 601 | void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress) |
9dae56ea A |
602 | { |
603 | StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress); | |
604 | ||
9dae56ea | 605 | // Check eax is an array |
6fe7ccc8 | 606 | Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)); |
9dae56ea A |
607 | |
608 | // Checks out okay! - get the length from the storage | |
14957cd0 A |
609 | loadPtr(Address(regT0, JSArray::storageOffset()), regT3); |
610 | load32(Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2); | |
611 | Jump failureCases2 = branch32(LessThan, regT2, TrustedImm32(0)); | |
9dae56ea | 612 | |
ba379fdc | 613 | emitFastArithIntToImmNoCheck(regT2, regT0); |
9dae56ea A |
614 | Jump success = jump(); |
615 | ||
6fe7ccc8 | 616 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea A |
617 | |
618 | // Use the patch information to link the failure cases back to the original slow case routine. | |
6fe7ccc8 | 619 | CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin); |
9dae56ea A |
620 | patchBuffer.link(failureCases1, slowCaseBegin); |
621 | patchBuffer.link(failureCases2, slowCaseBegin); | |
622 | ||
623 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 624 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
9dae56ea A |
625 | |
626 | // Track the stub we have created so that it will be deleted later. | |
6fe7ccc8 | 627 | stubInfo->stubRoutine = patchBuffer.finalizeCode(); |
9dae56ea | 628 | |
ba379fdc | 629 | // Finally patch the jump to slow case back in the hot path to jump here instead. |
6fe7ccc8 | 630 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 631 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 632 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code())); |
9dae56ea | 633 | |
ba379fdc A |
634 | // We don't want to patch more than once - in future go to cti_op_put_by_id_generic. |
635 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail)); | |
9dae56ea A |
636 | } |
637 | ||
4e4e5a6f | 638 | void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame) |
9dae56ea | 639 | { |
9dae56ea A |
640 | // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is |
641 | // referencing the prototype object - let's speculatively load it's table nice and early!) | |
642 | JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame)); | |
9dae56ea A |
643 | |
644 | // Check eax is an object of the right Structure. | |
ba379fdc | 645 | Jump failureCases1 = checkStructure(regT0, structure); |
9dae56ea A |
646 | |
647 | // Check the prototype object's Structure had not changed. | |
14957cd0 A |
648 | move(TrustedImmPtr(protoObject), regT3); |
649 | Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure)); | |
9dae56ea | 650 | |
4e4e5a6f A |
651 | bool needsStubLink = false; |
652 | ||
653 | // Checks out okay! | |
654 | if (slot.cachedPropertyType() == PropertySlot::Getter) { | |
655 | needsStubLink = true; | |
14957cd0 | 656 | compileGetDirectOffset(protoObject, regT1, cachedOffset); |
4e4e5a6f A |
657 | JITStubCall stubCall(this, cti_op_get_by_id_getter_stub); |
658 | stubCall.addArgument(regT1); | |
659 | stubCall.addArgument(regT0); | |
14957cd0 | 660 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); |
4e4e5a6f A |
661 | stubCall.call(); |
662 | } else if (slot.cachedPropertyType() == PropertySlot::Custom) { | |
663 | needsStubLink = true; | |
664 | JITStubCall stubCall(this, cti_op_get_by_id_custom_stub); | |
14957cd0 A |
665 | stubCall.addArgument(TrustedImmPtr(protoObject)); |
666 | stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress())); | |
667 | stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
668 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); | |
4e4e5a6f A |
669 | stubCall.call(); |
670 | } else | |
14957cd0 | 671 | compileGetDirectOffset(protoObject, regT0, cachedOffset); |
9dae56ea | 672 | Jump success = jump(); |
6fe7ccc8 | 673 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea A |
674 | |
675 | // Use the patch information to link the failure cases back to the original slow case routine. | |
6fe7ccc8 | 676 | CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin); |
9dae56ea A |
677 | patchBuffer.link(failureCases1, slowCaseBegin); |
678 | patchBuffer.link(failureCases2, slowCaseBegin); | |
679 | ||
680 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 681 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
9dae56ea | 682 | |
4e4e5a6f A |
683 | if (needsStubLink) { |
684 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
685 | if (iter->to) | |
686 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); | |
687 | } | |
688 | } | |
9dae56ea | 689 | // Track the stub we have created so that it will be deleted later. |
6fe7ccc8 | 690 | stubInfo->stubRoutine = patchBuffer.finalizeCode(); |
9dae56ea A |
691 | |
692 | // Finally patch the jump to slow case back in the hot path to jump here instead. | |
6fe7ccc8 | 693 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 694 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 695 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code())); |
9dae56ea | 696 | |
ba379fdc A |
697 | // We don't want to patch more than once - in future go to cti_op_put_by_id_generic. |
698 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list)); | |
9dae56ea A |
699 | } |
700 | ||
4e4e5a6f | 701 | void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset) |
9dae56ea | 702 | { |
ba379fdc | 703 | Jump failureCase = checkStructure(regT0, structure); |
4e4e5a6f | 704 | bool needsStubLink = false; |
6fe7ccc8 | 705 | bool isDirect = false; |
4e4e5a6f A |
706 | if (slot.cachedPropertyType() == PropertySlot::Getter) { |
707 | needsStubLink = true; | |
6fe7ccc8 | 708 | compileGetDirectOffset(regT0, regT1, cachedOffset); |
4e4e5a6f A |
709 | JITStubCall stubCall(this, cti_op_get_by_id_getter_stub); |
710 | stubCall.addArgument(regT1); | |
711 | stubCall.addArgument(regT0); | |
14957cd0 | 712 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); |
4e4e5a6f A |
713 | stubCall.call(); |
714 | } else if (slot.cachedPropertyType() == PropertySlot::Custom) { | |
715 | needsStubLink = true; | |
716 | JITStubCall stubCall(this, cti_op_get_by_id_custom_stub); | |
717 | stubCall.addArgument(regT0); | |
14957cd0 A |
718 | stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress())); |
719 | stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
720 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); | |
4e4e5a6f | 721 | stubCall.call(); |
6fe7ccc8 A |
722 | } else { |
723 | isDirect = true; | |
724 | compileGetDirectOffset(regT0, regT0, cachedOffset); | |
725 | } | |
9dae56ea A |
726 | Jump success = jump(); |
727 | ||
6fe7ccc8 | 728 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea | 729 | |
4e4e5a6f A |
730 | if (needsStubLink) { |
731 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
732 | if (iter->to) | |
733 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); | |
734 | } | |
735 | } | |
736 | ||
9dae56ea | 737 | // Use the patch information to link the failure cases back to the original slow case routine. |
6fe7ccc8 | 738 | CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code()); |
9dae56ea | 739 | if (!lastProtoBegin) |
6fe7ccc8 | 740 | lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin); |
9dae56ea A |
741 | |
742 | patchBuffer.link(failureCase, lastProtoBegin); | |
743 | ||
744 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 745 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
ba379fdc | 746 | |
6fe7ccc8 | 747 | MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode(); |
9dae56ea | 748 | |
6fe7ccc8 | 749 | polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect); |
9dae56ea A |
750 | |
751 | // Finally patch the jump to slow case back in the hot path to jump here instead. | |
6fe7ccc8 | 752 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 753 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 754 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code())); |
9dae56ea A |
755 | } |
756 | ||
4e4e5a6f | 757 | void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame) |
9dae56ea A |
758 | { |
759 | // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is | |
760 | // referencing the prototype object - let's speculatively load it's table nice and early!) | |
761 | JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame)); | |
9dae56ea A |
762 | |
763 | // Check eax is an object of the right Structure. | |
ba379fdc | 764 | Jump failureCases1 = checkStructure(regT0, structure); |
9dae56ea A |
765 | |
766 | // Check the prototype object's Structure had not changed. | |
14957cd0 A |
767 | move(TrustedImmPtr(protoObject), regT3); |
768 | Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure)); | |
9dae56ea | 769 | |
4e4e5a6f A |
770 | // Checks out okay! |
771 | bool needsStubLink = false; | |
6fe7ccc8 | 772 | bool isDirect = false; |
4e4e5a6f A |
773 | if (slot.cachedPropertyType() == PropertySlot::Getter) { |
774 | needsStubLink = true; | |
14957cd0 | 775 | compileGetDirectOffset(protoObject, regT1, cachedOffset); |
4e4e5a6f A |
776 | JITStubCall stubCall(this, cti_op_get_by_id_getter_stub); |
777 | stubCall.addArgument(regT1); | |
778 | stubCall.addArgument(regT0); | |
14957cd0 | 779 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); |
4e4e5a6f A |
780 | stubCall.call(); |
781 | } else if (slot.cachedPropertyType() == PropertySlot::Custom) { | |
782 | needsStubLink = true; | |
783 | JITStubCall stubCall(this, cti_op_get_by_id_custom_stub); | |
14957cd0 A |
784 | stubCall.addArgument(TrustedImmPtr(protoObject)); |
785 | stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress())); | |
786 | stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
787 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); | |
4e4e5a6f | 788 | stubCall.call(); |
6fe7ccc8 A |
789 | } else { |
790 | isDirect = true; | |
14957cd0 | 791 | compileGetDirectOffset(protoObject, regT0, cachedOffset); |
6fe7ccc8 | 792 | } |
9dae56ea A |
793 | |
794 | Jump success = jump(); | |
795 | ||
6fe7ccc8 | 796 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea | 797 | |
4e4e5a6f A |
798 | if (needsStubLink) { |
799 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
800 | if (iter->to) | |
801 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); | |
802 | } | |
803 | } | |
804 | ||
9dae56ea | 805 | // Use the patch information to link the failure cases back to the original slow case routine. |
6fe7ccc8 | 806 | CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code()); |
9dae56ea A |
807 | patchBuffer.link(failureCases1, lastProtoBegin); |
808 | patchBuffer.link(failureCases2, lastProtoBegin); | |
809 | ||
810 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 811 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
ba379fdc | 812 | |
6fe7ccc8 A |
813 | MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode(); |
814 | prototypeStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect); | |
9dae56ea A |
815 | |
816 | // Finally patch the jump to slow case back in the hot path to jump here instead. | |
6fe7ccc8 | 817 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 818 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 819 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code())); |
9dae56ea A |
820 | } |
821 | ||
4e4e5a6f | 822 | void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame) |
9dae56ea A |
823 | { |
824 | ASSERT(count); | |
9dae56ea A |
825 | JumpList bucketsOfFail; |
826 | ||
827 | // Check eax is an object of the right Structure. | |
ba379fdc | 828 | Jump baseObjectCheck = checkStructure(regT0, structure); |
9dae56ea A |
829 | bucketsOfFail.append(baseObjectCheck); |
830 | ||
831 | Structure* currStructure = structure; | |
14957cd0 | 832 | WriteBarrier<Structure>* it = chain->head(); |
9dae56ea | 833 | JSObject* protoObject = 0; |
14957cd0 | 834 | for (unsigned i = 0; i < count; ++i, ++it) { |
9dae56ea | 835 | protoObject = asObject(currStructure->prototypeForLookup(callFrame)); |
14957cd0 A |
836 | currStructure = it->get(); |
837 | testPrototype(protoObject, bucketsOfFail); | |
9dae56ea A |
838 | } |
839 | ASSERT(protoObject); | |
4e4e5a6f A |
840 | |
841 | bool needsStubLink = false; | |
6fe7ccc8 | 842 | bool isDirect = false; |
4e4e5a6f A |
843 | if (slot.cachedPropertyType() == PropertySlot::Getter) { |
844 | needsStubLink = true; | |
14957cd0 | 845 | compileGetDirectOffset(protoObject, regT1, cachedOffset); |
4e4e5a6f A |
846 | JITStubCall stubCall(this, cti_op_get_by_id_getter_stub); |
847 | stubCall.addArgument(regT1); | |
848 | stubCall.addArgument(regT0); | |
14957cd0 | 849 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); |
4e4e5a6f A |
850 | stubCall.call(); |
851 | } else if (slot.cachedPropertyType() == PropertySlot::Custom) { | |
852 | needsStubLink = true; | |
853 | JITStubCall stubCall(this, cti_op_get_by_id_custom_stub); | |
14957cd0 A |
854 | stubCall.addArgument(TrustedImmPtr(protoObject)); |
855 | stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress())); | |
856 | stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
857 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); | |
4e4e5a6f | 858 | stubCall.call(); |
6fe7ccc8 A |
859 | } else { |
860 | isDirect = true; | |
14957cd0 | 861 | compileGetDirectOffset(protoObject, regT0, cachedOffset); |
6fe7ccc8 | 862 | } |
9dae56ea A |
863 | Jump success = jump(); |
864 | ||
6fe7ccc8 | 865 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
4e4e5a6f A |
866 | |
867 | if (needsStubLink) { | |
868 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
869 | if (iter->to) | |
870 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); | |
871 | } | |
872 | } | |
9dae56ea A |
873 | |
874 | // Use the patch information to link the failure cases back to the original slow case routine. | |
6fe7ccc8 | 875 | CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code()); |
9dae56ea A |
876 | |
877 | patchBuffer.link(bucketsOfFail, lastProtoBegin); | |
878 | ||
879 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 880 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
ba379fdc | 881 | |
6fe7ccc8 | 882 | CodeRef stubRoutine = patchBuffer.finalizeCode(); |
9dae56ea A |
883 | |
884 | // Track the stub we have created so that it will be deleted later. | |
6fe7ccc8 | 885 | prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect); |
9dae56ea A |
886 | |
887 | // Finally patch the jump to slow case back in the hot path to jump here instead. | |
6fe7ccc8 | 888 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 889 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 890 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); |
9dae56ea | 891 | } |
9dae56ea | 892 | |
4e4e5a6f | 893 | void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame) |
9dae56ea | 894 | { |
9dae56ea | 895 | ASSERT(count); |
4e4e5a6f | 896 | |
9dae56ea A |
897 | JumpList bucketsOfFail; |
898 | ||
899 | // Check eax is an object of the right Structure. | |
ba379fdc | 900 | bucketsOfFail.append(checkStructure(regT0, structure)); |
9dae56ea A |
901 | |
902 | Structure* currStructure = structure; | |
14957cd0 | 903 | WriteBarrier<Structure>* it = chain->head(); |
9dae56ea | 904 | JSObject* protoObject = 0; |
14957cd0 | 905 | for (unsigned i = 0; i < count; ++i, ++it) { |
9dae56ea | 906 | protoObject = asObject(currStructure->prototypeForLookup(callFrame)); |
14957cd0 A |
907 | currStructure = it->get(); |
908 | testPrototype(protoObject, bucketsOfFail); | |
9dae56ea A |
909 | } |
910 | ASSERT(protoObject); | |
911 | ||
4e4e5a6f A |
912 | bool needsStubLink = false; |
913 | if (slot.cachedPropertyType() == PropertySlot::Getter) { | |
914 | needsStubLink = true; | |
14957cd0 | 915 | compileGetDirectOffset(protoObject, regT1, cachedOffset); |
4e4e5a6f A |
916 | JITStubCall stubCall(this, cti_op_get_by_id_getter_stub); |
917 | stubCall.addArgument(regT1); | |
918 | stubCall.addArgument(regT0); | |
14957cd0 | 919 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); |
4e4e5a6f A |
920 | stubCall.call(); |
921 | } else if (slot.cachedPropertyType() == PropertySlot::Custom) { | |
922 | needsStubLink = true; | |
923 | JITStubCall stubCall(this, cti_op_get_by_id_custom_stub); | |
14957cd0 A |
924 | stubCall.addArgument(TrustedImmPtr(protoObject)); |
925 | stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress())); | |
926 | stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
927 | stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress())); | |
4e4e5a6f A |
928 | stubCall.call(); |
929 | } else | |
14957cd0 | 930 | compileGetDirectOffset(protoObject, regT0, cachedOffset); |
9dae56ea A |
931 | Jump success = jump(); |
932 | ||
6fe7ccc8 | 933 | LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock); |
9dae56ea | 934 | |
4e4e5a6f A |
935 | if (needsStubLink) { |
936 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
937 | if (iter->to) | |
938 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); | |
939 | } | |
940 | } | |
941 | ||
9dae56ea | 942 | // Use the patch information to link the failure cases back to the original slow case routine. |
6fe7ccc8 | 943 | patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin)); |
9dae56ea A |
944 | |
945 | // On success return back to the hot patch code, at a point it will perform the store to dest for us. | |
6fe7ccc8 | 946 | patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult)); |
9dae56ea A |
947 | |
948 | // Track the stub we have created so that it will be deleted later. | |
6fe7ccc8 A |
949 | CodeRef stubRoutine = patchBuffer.finalizeCode(); |
950 | stubInfo->stubRoutine = stubRoutine; | |
9dae56ea A |
951 | |
952 | // Finally patch the jump to slow case back in the hot path to jump here instead. | |
6fe7ccc8 | 953 | CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck); |
ba379fdc | 954 | RepatchBuffer repatchBuffer(m_codeBlock); |
6fe7ccc8 | 955 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); |
9dae56ea | 956 | |
ba379fdc A |
957 | // We don't want to patch more than once - in future go to cti_op_put_by_id_generic. |
958 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list)); | |
9dae56ea A |
959 | } |
960 | ||
6fe7ccc8 A |
961 | void JIT::emit_op_get_scoped_var(Instruction* currentInstruction) |
962 | { | |
963 | int skip = currentInstruction[3].u.operand; | |
964 | ||
965 | emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0); | |
966 | bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain(); | |
967 | ASSERT(skip || !checkTopLevel); | |
968 | if (checkTopLevel && skip--) { | |
969 | Jump activationNotCreated; | |
970 | if (checkTopLevel) | |
971 | activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister())); | |
972 | loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0); | |
973 | activationNotCreated.link(this); | |
974 | } | |
975 | while (skip--) | |
976 | loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0); | |
977 | ||
978 | loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0); | |
979 | loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0); | |
980 | loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0); | |
981 | emitValueProfilingSite(); | |
982 | emitPutVirtualRegister(currentInstruction[1].u.operand); | |
983 | } | |
984 | ||
985 | void JIT::emit_op_put_scoped_var(Instruction* currentInstruction) | |
986 | { | |
987 | int skip = currentInstruction[2].u.operand; | |
988 | ||
989 | emitGetVirtualRegister(currentInstruction[3].u.operand, regT0); | |
990 | ||
991 | emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1); | |
992 | bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain(); | |
993 | ASSERT(skip || !checkTopLevel); | |
994 | if (checkTopLevel && skip--) { | |
995 | Jump activationNotCreated; | |
996 | if (checkTopLevel) | |
997 | activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister())); | |
998 | loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1); | |
999 | activationNotCreated.link(this); | |
1000 | } | |
1001 | while (skip--) | |
1002 | loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1); | |
1003 | loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1); | |
9dae56ea | 1004 | |
6fe7ccc8 A |
1005 | emitWriteBarrier(regT1, regT0, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess); |
1006 | ||
1007 | loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1); | |
1008 | storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register))); | |
1009 | } | |
1010 | ||
1011 | void JIT::emit_op_get_global_var(Instruction* currentInstruction) | |
1012 | { | |
1013 | JSVariableObject* globalObject = m_codeBlock->globalObject(); | |
1014 | loadPtr(&globalObject->m_registers, regT0); | |
1015 | loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0); | |
1016 | emitValueProfilingSite(); | |
1017 | emitPutVirtualRegister(currentInstruction[1].u.operand); | |
1018 | } | |
1019 | ||
1020 | void JIT::emit_op_put_global_var(Instruction* currentInstruction) | |
1021 | { | |
1022 | JSGlobalObject* globalObject = m_codeBlock->globalObject(); | |
1023 | ||
1024 | emitGetVirtualRegister(currentInstruction[2].u.operand, regT0); | |
1025 | ||
1026 | move(TrustedImmPtr(globalObject), regT1); | |
1027 | loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1); | |
1028 | storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register))); | |
1029 | emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess); | |
1030 | } | |
1031 | ||
1032 | void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo) | |
1033 | { | |
1034 | repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id); | |
1035 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1)); | |
1036 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), 0); | |
1037 | repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin)); | |
1038 | } | |
1039 | ||
1040 | void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo) | |
1041 | { | |
1042 | if (isDirectPutById(stubInfo)) | |
1043 | repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct); | |
1044 | else | |
1045 | repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id); | |
1046 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1)); | |
1047 | repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.put.displacementLabel), 0); | |
1048 | } | |
9dae56ea | 1049 | |
14957cd0 A |
1050 | #endif // USE(JSVALUE64) |
1051 | ||
6fe7ccc8 A |
1052 | void JIT::emitWriteBarrier(RegisterID owner, RegisterID value, RegisterID scratch, RegisterID scratch2, WriteBarrierMode mode, WriteBarrierUseKind useKind) |
1053 | { | |
1054 | UNUSED_PARAM(owner); | |
1055 | UNUSED_PARAM(scratch); | |
1056 | UNUSED_PARAM(scratch2); | |
1057 | UNUSED_PARAM(useKind); | |
1058 | UNUSED_PARAM(value); | |
1059 | UNUSED_PARAM(mode); | |
1060 | ASSERT(owner != scratch); | |
1061 | ASSERT(owner != scratch2); | |
1062 | ||
1063 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
1064 | emitCount(WriteBarrierCounters::jitCounterFor(useKind)); | |
1065 | #endif | |
1066 | ||
1067 | #if ENABLE(GGC) | |
1068 | Jump filterCells; | |
1069 | if (mode == ShouldFilterImmediates) | |
1070 | filterCells = emitJumpIfNotJSCell(value); | |
1071 | move(owner, scratch); | |
1072 | andPtr(TrustedImm32(static_cast<int32_t>(MarkedBlock::blockMask)), scratch); | |
1073 | move(owner, scratch2); | |
1074 | // consume additional 8 bits as we're using an approximate filter | |
1075 | rshift32(TrustedImm32(MarkedBlock::atomShift + 8), scratch2); | |
1076 | andPtr(TrustedImm32(MarkedBlock::atomMask >> 8), scratch2); | |
1077 | Jump filter = branchTest8(Zero, BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfMarks())); | |
1078 | move(owner, scratch2); | |
1079 | rshift32(TrustedImm32(MarkedBlock::cardShift), scratch2); | |
1080 | andPtr(TrustedImm32(MarkedBlock::cardMask), scratch2); | |
1081 | store8(TrustedImm32(1), BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfCards())); | |
1082 | filter.link(this); | |
1083 | if (mode == ShouldFilterImmediates) | |
1084 | filterCells.link(this); | |
1085 | #endif | |
1086 | } | |
1087 | ||
1088 | void JIT::emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode mode, WriteBarrierUseKind useKind) | |
1089 | { | |
1090 | UNUSED_PARAM(owner); | |
1091 | UNUSED_PARAM(scratch); | |
1092 | UNUSED_PARAM(useKind); | |
1093 | UNUSED_PARAM(value); | |
1094 | UNUSED_PARAM(mode); | |
1095 | ||
1096 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
1097 | emitCount(WriteBarrierCounters::jitCounterFor(useKind)); | |
1098 | #endif | |
1099 | ||
1100 | #if ENABLE(GGC) | |
1101 | Jump filterCells; | |
1102 | if (mode == ShouldFilterImmediates) | |
1103 | filterCells = emitJumpIfNotJSCell(value); | |
1104 | uint8_t* cardAddress = Heap::addressOfCardFor(owner); | |
1105 | move(TrustedImmPtr(cardAddress), scratch); | |
1106 | store8(TrustedImm32(1), Address(scratch)); | |
1107 | if (mode == ShouldFilterImmediates) | |
1108 | filterCells.link(this); | |
1109 | #endif | |
1110 | } | |
1111 | ||
14957cd0 A |
1112 | void JIT::testPrototype(JSValue prototype, JumpList& failureCases) |
1113 | { | |
1114 | if (prototype.isNull()) | |
1115 | return; | |
1116 | ||
1117 | ASSERT(prototype.isCell()); | |
1118 | move(TrustedImmPtr(prototype.asCell()), regT3); | |
1119 | failureCases.append(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototype.asCell()->structure()))); | |
1120 | } | |
1121 | ||
6fe7ccc8 | 1122 | void JIT::patchMethodCallProto(JSGlobalData& globalData, CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, StructureStubInfo& stubInfo, JSObject* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress) |
14957cd0 A |
1123 | { |
1124 | RepatchBuffer repatchBuffer(codeBlock); | |
1125 | ||
14957cd0 A |
1126 | CodeLocationDataLabelPtr structureLocation = methodCallLinkInfo.cachedStructure.location(); |
1127 | methodCallLinkInfo.cachedStructure.set(globalData, structureLocation, codeBlock->ownerExecutable(), structure); | |
1128 | ||
1129 | Structure* prototypeStructure = proto->structure(); | |
6fe7ccc8 A |
1130 | methodCallLinkInfo.cachedPrototypeStructure.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoStructureToCompare), codeBlock->ownerExecutable(), prototypeStructure); |
1131 | methodCallLinkInfo.cachedPrototype.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoObj), codeBlock->ownerExecutable(), proto); | |
1132 | methodCallLinkInfo.cachedFunction.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckPutFunction), codeBlock->ownerExecutable(), callee); | |
1133 | repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_method_check_update)); | |
1134 | } | |
1135 | ||
1136 | bool JIT::isDirectPutById(StructureStubInfo* stubInfo) | |
1137 | { | |
1138 | switch (stubInfo->accessType) { | |
1139 | case access_put_by_id_transition_normal: | |
1140 | return false; | |
1141 | case access_put_by_id_transition_direct: | |
1142 | return true; | |
1143 | case access_put_by_id_replace: | |
1144 | case access_put_by_id_generic: { | |
1145 | void* oldCall = MacroAssembler::readCallTarget(stubInfo->callReturnLocation).executableAddress(); | |
1146 | if (oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct) | |
1147 | || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_generic) | |
1148 | || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_fail)) | |
1149 | return true; | |
1150 | ASSERT(oldCall == bitwise_cast<void*>(cti_op_put_by_id) | |
1151 | || oldCall == bitwise_cast<void*>(cti_op_put_by_id_generic) | |
1152 | || oldCall == bitwise_cast<void*>(cti_op_put_by_id_fail)); | |
1153 | return false; | |
1154 | } | |
1155 | default: | |
1156 | ASSERT_NOT_REACHED(); | |
1157 | return false; | |
1158 | } | |
14957cd0 A |
1159 | } |
1160 | ||
9dae56ea A |
1161 | } // namespace JSC |
1162 | ||
1163 | #endif // ENABLE(JIT) |