]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/Repatch.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / Repatch.cpp
CommitLineData
81345200 1/*
ed1e77d3 2 * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
81345200
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "Repatch.h"
28
29#if ENABLE(JIT)
30
31#include "AccessorCallJITStubRoutine.h"
ed1e77d3 32#include "BinarySwitch.h"
81345200
A
33#include "CCallHelpers.h"
34#include "DFGOperations.h"
35#include "DFGSpeculativeJIT.h"
36#include "FTLThunks.h"
37#include "GCAwareJITStubRoutine.h"
38#include "GetterSetter.h"
39#include "JIT.h"
40#include "JITInlines.h"
41#include "LinkBuffer.h"
42#include "JSCInlines.h"
43#include "PolymorphicGetByIdList.h"
44#include "PolymorphicPutByIdList.h"
45#include "RegExpMatchesArray.h"
46#include "RepatchBuffer.h"
47#include "ScratchRegisterAllocator.h"
48#include "StackAlignment.h"
49#include "StructureRareDataInlines.h"
50#include "StructureStubClearingWatchpoint.h"
51#include "ThunkGenerators.h"
ed1e77d3
A
52#include <wtf/CommaPrinter.h>
53#include <wtf/ListDump.h>
81345200
A
54#include <wtf/StringPrintStream.h>
55
56namespace JSC {
57
58// Beware: in this code, it is not safe to assume anything about the following registers
59// that would ordinarily have well-known values:
60// - tagTypeNumberRegister
61// - tagMaskRegister
62
63static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
64{
65 FunctionPtr result = MacroAssembler::readCallTarget(call);
66#if ENABLE(FTL_JIT)
67 CodeBlock* codeBlock = repatchBuffer.codeBlock();
68 if (codeBlock->jitType() == JITCode::FTLJIT) {
69 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
70 MacroAssemblerCodePtr::createFromExecutableAddress(
71 result.executableAddress())).callTarget());
72 }
73#else
74 UNUSED_PARAM(repatchBuffer);
75#endif // ENABLE(FTL_JIT)
76 return result;
77}
78
79static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
80{
81#if ENABLE(FTL_JIT)
82 CodeBlock* codeBlock = repatchBuffer.codeBlock();
83 if (codeBlock->jitType() == JITCode::FTLJIT) {
84 VM& vm = *codeBlock->vm();
85 FTL::Thunks& thunks = *vm.ftlThunks;
86 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
87 MacroAssemblerCodePtr::createFromExecutableAddress(
88 MacroAssembler::readCallTarget(call).executableAddress()));
89 key = key.withCallTarget(newCalleeFunction.executableAddress());
90 newCalleeFunction = FunctionPtr(
91 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
92 }
93#endif // ENABLE(FTL_JIT)
94 repatchBuffer.relink(call, newCalleeFunction);
95}
96
97static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
98{
99 RepatchBuffer repatchBuffer(codeblock);
100 repatchCall(repatchBuffer, call, newCalleeFunction);
101}
102
ed1e77d3
A
103static void repatchByIdSelfAccess(
104 VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
105 const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
106 bool compact)
81345200
A
107{
108 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
109 vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
ed1e77d3 110
81345200
A
111 RepatchBuffer repatchBuffer(codeBlock);
112
113 // Only optimize once!
114 repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
115
116 // Patch the structure check & the offset of the load.
117 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
118 repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
119#if USE(JSVALUE64)
120 if (compact)
121 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
122 else
123 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
124#elif USE(JSVALUE32_64)
125 if (compact) {
126 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128 } else {
129 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
130 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
131 }
132#endif
133}
134
135static void addStructureTransitionCheck(
136 JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
137 MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
138{
139 if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
140 structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
141 if (!ASSERT_DISABLED) {
142 // If we execute this code, the object must have the structure we expect. Assert
143 // this in debug modes.
144 jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
145 MacroAssembler::Jump ok = branchStructure(
146 jit,
147 MacroAssembler::Equal,
148 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
149 structure);
150 jit.abortWithReason(RepatchIneffectiveWatchpoint);
151 ok.link(&jit);
152 }
153 return;
154 }
155
156 jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
157 failureCases.append(
158 branchStructure(jit,
159 MacroAssembler::NotEqual,
160 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
161 structure));
162}
163
164static void addStructureTransitionCheck(
165 JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
166 MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
167{
168 if (prototype.isNull())
169 return;
170
171 ASSERT(prototype.isCell());
172
173 addStructureTransitionCheck(
174 prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
175 failureCases, scratchGPR);
176}
177
178static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
179{
180 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
181 repatchBuffer.replaceWithJump(
182 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
183 stubInfo.callReturnLocation.dataLabel32AtOffset(
184 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
185 CodeLocationLabel(target));
186 return;
187 }
188
189 repatchBuffer.relink(
190 stubInfo.callReturnLocation.jumpAtOffset(
191 stubInfo.patch.deltaCallToJump),
192 CodeLocationLabel(target));
193}
194
195static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
196{
197 if (needToRestoreScratch) {
198 stubJit.popToRestore(scratchGPR);
199
200 success = stubJit.jump();
201
202 // link failure cases here, so we can pop scratchGPR, and then jump back.
203 failureCases.link(&stubJit);
204
205 stubJit.popToRestore(scratchGPR);
206
207 fail = stubJit.jump();
208 return;
209 }
210
211 success = stubJit.jump();
212}
213
214static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
215{
216 patchBuffer.link(success, successLabel);
217
218 if (needToRestoreScratch) {
219 patchBuffer.link(fail, slowCaseBegin);
220 return;
221 }
222
223 // link failure cases directly back to normal path
224 patchBuffer.link(failureCases, slowCaseBegin);
225}
226
227static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
228{
229 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
230}
231
232enum ByIdStubKind {
233 GetValue,
ed1e77d3 234 GetUndefined,
81345200
A
235 CallGetter,
236 CallCustomGetter,
237 CallSetter,
238 CallCustomSetter
239};
240
241static const char* toString(ByIdStubKind kind)
242{
243 switch (kind) {
244 case GetValue:
245 return "GetValue";
ed1e77d3
A
246 case GetUndefined:
247 return "GetUndefined";
81345200
A
248 case CallGetter:
249 return "CallGetter";
250 case CallCustomGetter:
251 return "CallCustomGetter";
252 case CallSetter:
253 return "CallSetter";
254 case CallCustomSetter:
255 return "CallCustomSetter";
256 default:
257 RELEASE_ASSERT_NOT_REACHED();
258 return nullptr;
259 }
260}
261
262static ByIdStubKind kindFor(const PropertySlot& slot)
263{
264 if (slot.isCacheableValue())
265 return GetValue;
ed1e77d3
A
266 if (slot.isUnset())
267 return GetUndefined;
81345200
A
268 if (slot.isCacheableCustom())
269 return CallCustomGetter;
270 RELEASE_ASSERT(slot.isCacheableGetter());
271 return CallGetter;
272}
273
274static FunctionPtr customFor(const PropertySlot& slot)
275{
276 if (!slot.isCacheableCustom())
277 return FunctionPtr();
278 return FunctionPtr(slot.customGetter());
279}
280
281static ByIdStubKind kindFor(const PutPropertySlot& slot)
282{
283 RELEASE_ASSERT(!slot.isCacheablePut());
284 if (slot.isCacheableSetter())
285 return CallSetter;
286 RELEASE_ASSERT(slot.isCacheableCustom());
287 return CallCustomSetter;
288}
289
290static FunctionPtr customFor(const PutPropertySlot& slot)
291{
292 if (!slot.isCacheableCustom())
293 return FunctionPtr();
294 return FunctionPtr(slot.customSetter());
295}
296
ed1e77d3 297static bool generateByIdStub(
81345200
A
298 ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
299 FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
300 PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
301 CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
302{
ed1e77d3 303
81345200
A
304 VM* vm = &exec->vm();
305 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
306 JSValueRegs valueRegs = JSValueRegs(
307#if USE(JSVALUE32_64)
308 static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
309#endif
310 static_cast<GPRReg>(stubInfo.patch.valueGPR));
311 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
312 bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
ed1e77d3 313 RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
81345200
A
314
315 CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
316 if (needToRestoreScratch) {
317 scratchGPR = AssemblyHelpers::selectScratchGPR(
318 baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
319 stubJit.pushToSave(scratchGPR);
320 needToRestoreScratch = true;
321 }
322
323 MacroAssembler::JumpList failureCases;
324
325 GPRReg baseForGetGPR;
326 if (loadTargetFromProxy) {
327 baseForGetGPR = valueRegs.payloadGPR();
328 failureCases.append(stubJit.branch8(
329 MacroAssembler::NotEqual,
330 MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()),
331 MacroAssembler::TrustedImm32(PureForwardingProxyType)));
332
333 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
334
335 failureCases.append(branchStructure(stubJit,
336 MacroAssembler::NotEqual,
337 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
338 structure));
339 } else {
340 baseForGetGPR = baseGPR;
341
342 failureCases.append(branchStructure(stubJit,
343 MacroAssembler::NotEqual,
344 MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()),
345 structure));
346 }
347
348 CodeBlock* codeBlock = exec->codeBlock();
349 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
350 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
351
352 if (watchpointSet)
353 watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
354
ed1e77d3 355 Structure* currStructure = structure;
81345200
A
356 JSObject* protoObject = 0;
357 if (chain) {
358 WriteBarrier<Structure>* it = chain->head();
359 for (unsigned i = 0; i < count; ++i, ++it) {
360 protoObject = asObject(currStructure->prototypeForLookup(exec));
361 Structure* protoStructure = protoObject->structure();
362 if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
363 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
364 addStructureTransitionCheck(
365 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
366 failureCases, scratchGPR);
367 currStructure = it->get();
368 }
ed1e77d3 369 ASSERT(!protoObject || protoObject->structure() == currStructure);
81345200
A
370 }
371
ed1e77d3
A
372 currStructure->startWatchingPropertyForReplacements(*vm, offset);
373 GPRReg baseForAccessGPR = InvalidGPRReg;
374 if (kind != GetUndefined) {
375 if (chain) {
376 // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
377 if (loadTargetFromProxy)
378 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
379 stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
380 baseForAccessGPR = scratchGPR;
381 } else {
382 // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
383 // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
384 // on the slow path.
385 if (loadTargetFromProxy)
386 stubJit.move(scratchGPR, baseForGetGPR);
387 baseForAccessGPR = baseForGetGPR;
388 }
81345200
A
389 }
390
391 GPRReg loadedValueGPR = InvalidGPRReg;
ed1e77d3
A
392 if (kind == GetUndefined)
393 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
394 else if (kind != CallCustomGetter && kind != CallCustomSetter) {
81345200
A
395 if (kind == GetValue)
396 loadedValueGPR = valueRegs.payloadGPR();
397 else
398 loadedValueGPR = scratchGPR;
399
400 GPRReg storageGPR;
401 if (isInlineOffset(offset))
402 storageGPR = baseForAccessGPR;
403 else {
404 stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
405 storageGPR = loadedValueGPR;
406 }
407
408#if USE(JSVALUE64)
409 stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
410#else
411 if (kind == GetValue)
412 stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
413 stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
414#endif
415 }
416
417 // Stuff for custom getters.
418 MacroAssembler::Call operationCall;
419 MacroAssembler::Call handlerCall;
420
421 // Stuff for JS getters.
422 MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
423 MacroAssembler::Call fastPathCall;
424 MacroAssembler::Call slowPathCall;
425 std::unique_ptr<CallLinkInfo> callLinkInfo;
426
427 MacroAssembler::Jump success, fail;
ed1e77d3 428 if (kind != GetValue && kind != GetUndefined) {
81345200
A
429 // Need to make sure that whenever this call is made in the future, we remember the
430 // place that we made it from. It just so happens to be the place that we are at
431 // right now!
432 stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
433 CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
434
435 if (kind == CallGetter || kind == CallSetter) {
436 // Create a JS call using a JS call inline cache. Assume that:
437 //
438 // - SP is aligned and represents the extent of the calling compiler's stack usage.
439 //
440 // - FP is set correctly (i.e. it points to the caller's call frame header).
441 //
442 // - SP - FP is an aligned difference.
443 //
444 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
445 // code.
446 //
447 // Therefore, we temporarily grow the stack for the purpose of the call and then
448 // shrink it after.
449
450 callLinkInfo = std::make_unique<CallLinkInfo>();
ed1e77d3 451 callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
81345200
A
452
453 MacroAssembler::JumpList done;
454
455 // There is a 'this' argument but nothing else.
456 unsigned numberOfParameters = 1;
457 // ... unless we're calling a setter.
458 if (kind == CallSetter)
459 numberOfParameters++;
460
461 // Get the accessor; if there ain't one then the result is jsUndefined().
462 if (kind == CallSetter) {
463 stubJit.loadPtr(
464 MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
465 loadedValueGPR);
466 } else {
467 stubJit.loadPtr(
468 MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
469 loadedValueGPR);
470 }
471 MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
472 MacroAssembler::Zero, loadedValueGPR);
473
474 unsigned numberOfRegsForCall =
475 JSStack::CallFrameHeaderSize + numberOfParameters;
476
477 unsigned numberOfBytesForCall =
478 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
479
480 unsigned alignedNumberOfBytesForCall =
481 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
482
483 stubJit.subPtr(
484 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
485 MacroAssembler::stackPointerRegister);
486
487 MacroAssembler::Address calleeFrame = MacroAssembler::Address(
488 MacroAssembler::stackPointerRegister,
489 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
490
491 stubJit.store32(
492 MacroAssembler::TrustedImm32(numberOfParameters),
493 calleeFrame.withOffset(
494 JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
495
496 stubJit.storeCell(
497 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
498
499 stubJit.storeCell(
500 baseForGetGPR,
501 calleeFrame.withOffset(
502 virtualRegisterForArgument(0).offset() * sizeof(Register)));
503
504 if (kind == CallSetter) {
505 stubJit.storeValue(
506 valueRegs,
507 calleeFrame.withOffset(
508 virtualRegisterForArgument(1).offset() * sizeof(Register)));
509 }
510
511 MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
512 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
513 MacroAssembler::TrustedImmPtr(0));
514
81345200
A
515 fastPathCall = stubJit.nearCall();
516
517 stubJit.addPtr(
518 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
519 MacroAssembler::stackPointerRegister);
520 if (kind == CallGetter)
521 stubJit.setupResults(valueRegs);
522
523 done.append(stubJit.jump());
524 slowCase.link(&stubJit);
525
526 stubJit.move(loadedValueGPR, GPRInfo::regT0);
527#if USE(JSVALUE32_64)
528 stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
529#endif
530 stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
531 slowPathCall = stubJit.nearCall();
532
533 stubJit.addPtr(
534 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
535 MacroAssembler::stackPointerRegister);
536 if (kind == CallGetter)
537 stubJit.setupResults(valueRegs);
538
539 done.append(stubJit.jump());
540 returnUndefined.link(&stubJit);
541
542 if (kind == CallGetter)
543 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
544
545 done.link(&stubJit);
546 } else {
547 // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
548 // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
549#if USE(JSVALUE64)
550 if (kind == CallCustomGetter)
551 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
552 else
553 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
554#else
555 if (kind == CallCustomGetter)
556 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
557 else
558 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
559#endif
560 stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
561
562 operationCall = stubJit.call();
563 if (kind == CallCustomGetter)
564 stubJit.setupResults(valueRegs);
565 MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
566
567 stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
568 handlerCall = stubJit.call();
569 stubJit.jumpToExceptionHandler();
570
571 noException.link(&stubJit);
572 }
573 }
574 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
575
ed1e77d3
A
576 LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
577 if (patchBuffer.didFailToAllocate())
578 return false;
81345200
A
579
580 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
581 if (kind == CallCustomGetter || kind == CallCustomSetter) {
582 patchBuffer.link(operationCall, custom);
583 patchBuffer.link(handlerCall, lookupExceptionHandler);
584 } else if (kind == CallGetter || kind == CallSetter) {
ed1e77d3
A
585 callLinkInfo->setCallLocations(patchBuffer.locationOfNearCall(slowPathCall),
586 patchBuffer.locationOf(addressOfLinkFunctionCheck),
587 patchBuffer.locationOfNearCall(fastPathCall));
81345200
A
588
589 ThunkGenerator generator = linkThunkGeneratorFor(
590 CodeForCall, RegisterPreservationNotRequired);
591 patchBuffer.link(
592 slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
593 }
594
595 MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
596 exec->codeBlock(), patchBuffer,
597 ("%s access stub for %s, return point %p",
598 toString(kind), toCString(*exec->codeBlock()).data(),
599 successLabel.executableAddress()));
600
601 if (kind == CallGetter || kind == CallSetter)
602 stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
603 else
604 stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
ed1e77d3
A
605
606 return true;
81345200
A
607}
608
609enum InlineCacheAction {
610 GiveUpOnCache,
611 RetryCacheLater,
612 AttemptToCache
613};
614
615static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
616{
617 Structure* structure = cell->structure(vm);
618
619 TypeInfo typeInfo = structure->typeInfo();
620 if (typeInfo.prohibitsPropertyCaching())
621 return GiveUpOnCache;
622
623 if (structure->isUncacheableDictionary()) {
624 if (structure->hasBeenFlattenedBefore())
625 return GiveUpOnCache;
626 // Flattening could have changed the offset, so return early for another try.
627 asObject(cell)->flattenDictionaryObject(vm);
628 return RetryCacheLater;
629 }
630 ASSERT(!structure->isUncacheableDictionary());
631
632 if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
633 return GiveUpOnCache;
634
635 return AttemptToCache;
636}
637
638static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
639{
640 if (Options::forceICFailure())
641 return GiveUpOnCache;
642
643 // FIXME: Write a test that proves we need to check for recursion here just
644 // like the interpreter does, then add a check for recursion.
645
646 CodeBlock* codeBlock = exec->codeBlock();
647 VM* vm = &exec->vm();
648
ed1e77d3 649 if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
81345200
A
650 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
651#if USE(JSVALUE32_64)
652 GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
653#endif
654 GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
655
656 MacroAssembler stubJit;
657
ed1e77d3 658 if (isJSArray(baseValue)) {
81345200
A
659 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
660 bool needToRestoreScratch = false;
661
662 if (scratchGPR == InvalidGPRReg) {
663#if USE(JSVALUE64)
664 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
665#else
666 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
667#endif
668 stubJit.pushToSave(scratchGPR);
669 needToRestoreScratch = true;
670 }
671
672 MacroAssembler::JumpList failureCases;
673
674 stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
675 failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
676 failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
677
678 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
679 stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
680 failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
681
682 stubJit.move(scratchGPR, resultGPR);
683#if USE(JSVALUE64)
684 stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
685#elif USE(JSVALUE32_64)
ed1e77d3 686 stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
81345200
A
687#endif
688
689 MacroAssembler::Jump success, fail;
690
691 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
692
ed1e77d3
A
693 LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
694 if (patchBuffer.didFailToAllocate())
695 return GiveUpOnCache;
81345200
A
696
697 linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
698
699 stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
700 exec->codeBlock(), patchBuffer,
701 ("GetById array length stub for %s, return point %p",
702 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
703 stubInfo.patch.deltaCallToDone).executableAddress()));
704
705 RepatchBuffer repatchBuffer(codeBlock);
706 replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
707 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
708
709 return RetryCacheLater;
710 }
711
712 // String.length case
713 MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
714
715 stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
716
717#if USE(JSVALUE64)
718 stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
719#elif USE(JSVALUE32_64)
ed1e77d3 720 stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
81345200
A
721#endif
722
723 MacroAssembler::Jump success = stubJit.jump();
724
ed1e77d3
A
725 LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
726 if (patchBuffer.didFailToAllocate())
727 return GiveUpOnCache;
728
81345200
A
729 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
730 patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
731
732 stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
733 exec->codeBlock(), patchBuffer,
734 ("GetById string length stub for %s, return point %p",
735 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
736 stubInfo.patch.deltaCallToDone).executableAddress()));
737
738 RepatchBuffer repatchBuffer(codeBlock);
739 replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
740 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
741
742 return RetryCacheLater;
743 }
744
745 // FIXME: Cache property access for immediates.
746 if (!baseValue.isCell())
747 return GiveUpOnCache;
ed1e77d3
A
748
749 if (!slot.isCacheable() && !slot.isUnset())
81345200
A
750 return GiveUpOnCache;
751
ed1e77d3
A
752 JSCell* baseCell = baseValue.asCell();
753 Structure* structure = baseCell->structure(*vm);
754
81345200
A
755 InlineCacheAction action = actionForCell(*vm, baseCell);
756 if (action != AttemptToCache)
757 return action;
758
759 // Optimize self access.
ed1e77d3
A
760 if (slot.isCacheableValue()
761 && slot.slotBase() == baseValue
81345200
A
762 && !slot.watchpointSet()
763 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
ed1e77d3
A
764 structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
765 repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
766 stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
767 return RetryCacheLater;
81345200
A
768 }
769
770 repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
771 return RetryCacheLater;
772}
773
774void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
775{
776 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
777
778 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
779 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
780}
781
782static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
783{
784 RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
785 RepatchBuffer repatchBuffer(codeBlock);
786 if (stubInfo.u.getByIdList.list->didSelfPatching()) {
787 repatchBuffer.relink(
788 stubInfo.callReturnLocation.jumpAtOffset(
789 stubInfo.patch.deltaCallToJump),
790 CodeLocationLabel(stubRoutine->code().code()));
791 return;
792 }
793
794 replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
795}
796
797static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
798{
799 if (!baseValue.isCell()
ed1e77d3 800 || (!slot.isCacheable() && !slot.isUnset()))
81345200
A
801 return GiveUpOnCache;
802
803 JSCell* baseCell = baseValue.asCell();
804 bool loadTargetFromProxy = false;
805 if (baseCell->type() == PureForwardingProxyType) {
806 baseValue = jsCast<JSProxy*>(baseCell)->target();
807 baseCell = baseValue.asCell();
808 loadTargetFromProxy = true;
809 }
810
811 VM* vm = &exec->vm();
812 CodeBlock* codeBlock = exec->codeBlock();
813
814 InlineCacheAction action = actionForCell(*vm, baseCell);
815 if (action != AttemptToCache)
816 return action;
817
818 Structure* structure = baseCell->structure(*vm);
819 TypeInfo typeInfo = structure->typeInfo();
820
821 if (stubInfo.patch.spillMode == NeedToSpill) {
822 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
823 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
824 // if registers were not flushed, don't do non-Value caching.
ed1e77d3 825 if (!slot.isCacheableValue() && !slot.isUnset())
81345200
A
826 return GiveUpOnCache;
827 }
ed1e77d3
A
828
829 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
81345200
A
830 StructureChain* prototypeChain = 0;
831 size_t count = 0;
832
ed1e77d3 833 if (slot.isUnset() || slot.slotBase() != baseValue) {
81345200
A
834 if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
835 return GiveUpOnCache;
ed1e77d3
A
836
837 if (slot.isUnset())
838 count = normalizePrototypeChain(exec, structure);
839 else
840 count = normalizePrototypeChainForChainAccess(
841 exec, structure, slot.slotBase(), ident, offset);
81345200
A
842 if (count == InvalidPrototypeChain)
843 return GiveUpOnCache;
844 prototypeChain = structure->prototypeChain(exec);
845 }
846
847 PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
848 if (list->isFull()) {
849 // We need this extra check because of recursion.
850 return GiveUpOnCache;
851 }
852
853 RefPtr<JITStubRoutine> stubRoutine;
ed1e77d3 854 bool result = generateByIdStub(
81345200
A
855 exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset,
856 structure, loadTargetFromProxy, slot.watchpointSet(),
857 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
858 CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
ed1e77d3
A
859 if (!result)
860 return GiveUpOnCache;
81345200
A
861
862 GetByIdAccess::AccessType accessType;
863 if (slot.isCacheableValue())
864 accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
ed1e77d3
A
865 else if (slot.isUnset())
866 accessType = GetByIdAccess::SimpleMiss;
81345200
A
867 else if (slot.isCacheableGetter())
868 accessType = GetByIdAccess::Getter;
869 else
870 accessType = GetByIdAccess::CustomGetter;
871
872 list->addAccess(GetByIdAccess(
873 *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
874 prototypeChain, count));
875
876 patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
877
878 return list->isFull() ? GiveUpOnCache : RetryCacheLater;
879}
880
881void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
882{
883 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
884
885 if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
886 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
887}
888
889static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
890{
891 if (slot.isStrictMode()) {
892 if (putKind == Direct)
893 return operationPutByIdDirectStrict;
894 return operationPutByIdStrict;
895 }
896 if (putKind == Direct)
897 return operationPutByIdDirectNonStrict;
898 return operationPutByIdNonStrict;
899}
900
901static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
902{
903 if (slot.isStrictMode()) {
904 if (putKind == Direct)
905 return operationPutByIdDirectStrictBuildList;
906 return operationPutByIdStrictBuildList;
907 }
908 if (putKind == Direct)
909 return operationPutByIdDirectNonStrictBuildList;
910 return operationPutByIdNonStrictBuildList;
911}
912
ed1e77d3 913static bool emitPutReplaceStub(
81345200 914 ExecState* exec,
81345200
A
915 const Identifier&,
916 const PutPropertySlot& slot,
917 StructureStubInfo& stubInfo,
81345200
A
918 Structure* structure,
919 CodeLocationLabel failureLabel,
920 RefPtr<JITStubRoutine>& stubRoutine)
921{
922 VM* vm = &exec->vm();
923 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
924#if USE(JSVALUE32_64)
925 GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
926#endif
927 GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
928
929 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
930 allocator.lock(baseGPR);
931#if USE(JSVALUE32_64)
932 allocator.lock(valueTagGPR);
933#endif
934 allocator.lock(valueGPR);
935
936 GPRReg scratchGPR1 = allocator.allocateScratchGPR();
937
938 CCallHelpers stubJit(vm, exec->codeBlock());
939
940 allocator.preserveReusedRegistersByPushing(stubJit);
941
942 MacroAssembler::Jump badStructure = branchStructure(stubJit,
943 MacroAssembler::NotEqual,
944 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
945 structure);
946
947#if USE(JSVALUE64)
948 if (isInlineOffset(slot.cachedOffset()))
949 stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
950 else {
951 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
952 stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
953 }
954#elif USE(JSVALUE32_64)
955 if (isInlineOffset(slot.cachedOffset())) {
956 stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
957 stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
958 } else {
959 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
960 stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
961 stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
962 }
963#endif
964
965 MacroAssembler::Jump success;
966 MacroAssembler::Jump failure;
967
968 if (allocator.didReuseRegisters()) {
969 allocator.restoreReusedRegistersByPopping(stubJit);
970 success = stubJit.jump();
971
972 badStructure.link(&stubJit);
973 allocator.restoreReusedRegistersByPopping(stubJit);
974 failure = stubJit.jump();
975 } else {
976 success = stubJit.jump();
977 failure = badStructure;
978 }
979
ed1e77d3
A
980 LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
981 if (patchBuffer.didFailToAllocate())
982 return false;
983
81345200
A
984 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
985 patchBuffer.link(failure, failureLabel);
986
987 stubRoutine = FINALIZE_CODE_FOR_STUB(
988 exec->codeBlock(), patchBuffer,
989 ("PutById replace stub for %s, return point %p",
990 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
991 stubInfo.patch.deltaCallToDone).executableAddress()));
ed1e77d3
A
992
993 return true;
81345200
A
994}
995
ed1e77d3
A
996static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident,
997 const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
81345200 998{
ed1e77d3
A
999 PropertyName pname(ident);
1000 Structure* oldStructure = structure;
1001 if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
1002 return nullptr;
1003
1004 PropertyOffset propertyOffset;
1005 structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
1006
1007 if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
1008 return nullptr;
1009
1010 // Skip optimizing the case where we need a realloc, if we don't have
1011 // enough registers to make it happen.
1012 if (GPRInfo::numberOfRegisters < 6
1013 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1014 && oldStructure->outOfLineCapacity()) {
1015 return nullptr;
1016 }
1017
1018 // Skip optimizing the case where we need realloc, and the structure has
1019 // indexing storage.
1020 // FIXME: We shouldn't skip this! Implement it!
1021 // https://bugs.webkit.org/show_bug.cgi?id=130914
1022 if (oldStructure->couldHaveIndexingHeader())
1023 return nullptr;
1024
1025 if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1026 return nullptr;
1027
1028 StructureChain* prototypeChain = structure->prototypeChain(exec);
1029
1030 // emitPutTransitionStub
1031
1032 CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1033 RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
81345200
A
1034
1035 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1036#if USE(JSVALUE32_64)
1037 GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1038#endif
1039 GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1040
1041 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1042 allocator.lock(baseGPR);
1043#if USE(JSVALUE32_64)
1044 allocator.lock(valueTagGPR);
1045#endif
1046 allocator.lock(valueGPR);
1047
1048 CCallHelpers stubJit(vm);
1049
1050 bool needThirdScratch = false;
1051 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1052 && oldStructure->outOfLineCapacity()) {
1053 needThirdScratch = true;
1054 }
1055
1056 GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1057 ASSERT(scratchGPR1 != baseGPR);
1058 ASSERT(scratchGPR1 != valueGPR);
1059
1060 GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1061 ASSERT(scratchGPR2 != baseGPR);
1062 ASSERT(scratchGPR2 != valueGPR);
1063 ASSERT(scratchGPR2 != scratchGPR1);
1064
1065 GPRReg scratchGPR3;
1066 if (needThirdScratch) {
1067 scratchGPR3 = allocator.allocateScratchGPR();
1068 ASSERT(scratchGPR3 != baseGPR);
1069 ASSERT(scratchGPR3 != valueGPR);
1070 ASSERT(scratchGPR3 != scratchGPR1);
1071 ASSERT(scratchGPR3 != scratchGPR2);
1072 } else
1073 scratchGPR3 = InvalidGPRReg;
1074
1075 allocator.preserveReusedRegistersByPushing(stubJit);
1076
1077 MacroAssembler::JumpList failureCases;
1078
1079 ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1080
1081 failureCases.append(branchStructure(stubJit,
1082 MacroAssembler::NotEqual,
1083 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1084 oldStructure));
1085
1086 addStructureTransitionCheck(
1087 oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1088 scratchGPR1);
1089
1090 if (putKind == NotDirect) {
1091 for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1092 addStructureTransitionCheck(
1093 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1094 scratchGPR1);
1095 }
1096 }
1097
1098 MacroAssembler::JumpList slowPath;
1099
1100 bool scratchGPR1HasStorage = false;
1101
1102 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1103 size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1104 CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1105
1106 if (!oldStructure->outOfLineCapacity()) {
1107 stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1108 slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1109 stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1110 stubJit.negPtr(scratchGPR1);
1111 stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1112 stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1113 } else {
1114 size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1115 ASSERT(newSize > oldSize);
1116
1117 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1118 stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1119 slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1120 stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1121 stubJit.negPtr(scratchGPR1);
1122 stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1123 stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1124 // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1125 for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1126 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1127 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1128 }
1129 }
1130
1131 stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1132 scratchGPR1HasStorage = true;
1133 }
1134
1135 ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1136 ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1137 ASSERT(oldStructure->indexingType() == structure->indexingType());
1138#if USE(JSVALUE64)
1139 uint32_t val = structure->id();
1140#else
1141 uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1142#endif
1143 stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1144#if USE(JSVALUE64)
1145 if (isInlineOffset(slot.cachedOffset()))
1146 stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1147 else {
1148 if (!scratchGPR1HasStorage)
1149 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1150 stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1151 }
1152#elif USE(JSVALUE32_64)
1153 if (isInlineOffset(slot.cachedOffset())) {
1154 stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1155 stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1156 } else {
1157 if (!scratchGPR1HasStorage)
1158 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1159 stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1160 stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1161 }
1162#endif
1163
ed1e77d3
A
1164 ScratchBuffer* scratchBuffer = nullptr;
1165
1166#if ENABLE(GGC)
1167 MacroAssembler::Call callFlushWriteBarrierBuffer;
1168 MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1169 {
1170 WriteBarrierBuffer& writeBarrierBuffer = stubJit.vm()->heap.writeBarrierBuffer();
1171 stubJit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
1172 MacroAssembler::Jump needToFlush =
1173 stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::TrustedImm32(writeBarrierBuffer.capacity()));
1174
1175 stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1176 stubJit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
1177
1178 stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR1);
1179 // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1180 stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1181
1182 MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1183 needToFlush.link(&stubJit);
1184
1185 scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1186 allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1187 stubJit.setupArgumentsWithExecState(baseGPR);
1188 callFlushWriteBarrierBuffer = stubJit.call();
1189 allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1190
1191 doneWithBarrier.link(&stubJit);
1192 }
1193 ownerIsRememberedOrInEden.link(&stubJit);
1194#endif
1195
81345200
A
1196 MacroAssembler::Jump success;
1197 MacroAssembler::Jump failure;
1198
1199 if (allocator.didReuseRegisters()) {
1200 allocator.restoreReusedRegistersByPopping(stubJit);
1201 success = stubJit.jump();
1202
1203 failureCases.link(&stubJit);
1204 allocator.restoreReusedRegistersByPopping(stubJit);
1205 failure = stubJit.jump();
1206 } else
1207 success = stubJit.jump();
1208
1209 MacroAssembler::Call operationCall;
1210 MacroAssembler::Jump successInSlowPath;
1211
1212 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1213 slowPath.link(&stubJit);
1214
1215 allocator.restoreReusedRegistersByPopping(stubJit);
ed1e77d3
A
1216 if (!scratchBuffer)
1217 scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
81345200
A
1218 allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1219#if USE(JSVALUE64)
1220 stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1221#else
1222 stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1223#endif
1224 operationCall = stubJit.call();
1225 allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1226 successInSlowPath = stubJit.jump();
1227 }
1228
ed1e77d3
A
1229 LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1230 if (patchBuffer.didFailToAllocate())
1231 return nullptr;
1232
81345200
A
1233 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1234 if (allocator.didReuseRegisters())
1235 patchBuffer.link(failure, failureLabel);
1236 else
1237 patchBuffer.link(failureCases, failureLabel);
ed1e77d3
A
1238#if ENABLE(GGC)
1239 patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1240#endif
81345200
A
1241 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1242 patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1243 patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1244 }
1245
1246 stubRoutine =
1247 createJITStubRoutine(
1248 FINALIZE_CODE_FOR(
1249 exec->codeBlock(), patchBuffer,
1250 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1251 structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1252 oldStructure, structure,
1253 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1254 stubInfo.patch.deltaCallToDone).executableAddress())),
1255 *vm,
1256 exec->codeBlock()->ownerExecutable(),
1257 structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1258 structure);
ed1e77d3
A
1259
1260 return oldStructure;
81345200
A
1261}
1262
ed1e77d3 1263static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
81345200
A
1264{
1265 if (Options::forceICFailure())
1266 return GiveUpOnCache;
1267
1268 CodeBlock* codeBlock = exec->codeBlock();
1269 VM* vm = &exec->vm();
1270
1271 if (!baseValue.isCell())
1272 return GiveUpOnCache;
81345200
A
1273
1274 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1275 return GiveUpOnCache;
ed1e77d3 1276
81345200
A
1277 if (!structure->propertyAccessesAreCacheable())
1278 return GiveUpOnCache;
1279
1280 // Optimize self access.
1281 if (slot.base() == baseValue && slot.isCacheablePut()) {
1282 if (slot.type() == PutPropertySlot::NewProperty) {
ed1e77d3
A
1283
1284 Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1285 if (!oldStructure)
81345200
A
1286 return GiveUpOnCache;
1287
1288 StructureChain* prototypeChain = structure->prototypeChain(exec);
1289
81345200
A
1290 RepatchBuffer repatchBuffer(codeBlock);
1291 repatchBuffer.relink(
1292 stubInfo.callReturnLocation.jumpAtOffset(
1293 stubInfo.patch.deltaCallToJump),
1294 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1295 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1296
1297 stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1298
1299 return RetryCacheLater;
1300 }
1301
1302 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1303 return GiveUpOnCache;
1304
ed1e77d3 1305 structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
81345200
A
1306 repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1307 stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1308 return RetryCacheLater;
1309 }
ed1e77d3 1310
81345200
A
1311 if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1312 && stubInfo.patch.spillMode == DontSpill) {
1313 RefPtr<JITStubRoutine> stubRoutine;
1314
1315 StructureChain* prototypeChain = 0;
1316 PropertyOffset offset = slot.cachedOffset();
1317 size_t count = 0;
1318 if (baseValue != slot.base()) {
ed1e77d3 1319 count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
81345200
A
1320 if (count == InvalidPrototypeChain)
1321 return GiveUpOnCache;
81345200
A
1322 prototypeChain = structure->prototypeChain(exec);
1323 }
1324 PolymorphicPutByIdList* list;
1325 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1326
ed1e77d3 1327 bool result = generateByIdStub(
81345200
A
1328 exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1329 offset, structure, false, nullptr,
1330 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1331 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1332 stubRoutine);
ed1e77d3
A
1333 if (!result)
1334 return GiveUpOnCache;
1335
81345200
A
1336 list->addAccess(PutByIdAccess::setter(
1337 *vm, codeBlock->ownerExecutable(),
1338 slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
ed1e77d3 1339 structure, prototypeChain, count, slot.customSetter(), stubRoutine));
81345200
A
1340
1341 RepatchBuffer repatchBuffer(codeBlock);
1342 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1343 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1344 RELEASE_ASSERT(!list->isFull());
1345 return RetryCacheLater;
1346 }
1347
1348 return GiveUpOnCache;
1349}
1350
ed1e77d3 1351void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
81345200
A
1352{
1353 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1354
ed1e77d3 1355 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
81345200
A
1356 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1357}
1358
ed1e77d3 1359static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
81345200
A
1360{
1361 CodeBlock* codeBlock = exec->codeBlock();
1362 VM* vm = &exec->vm();
1363
1364 if (!baseValue.isCell())
1365 return GiveUpOnCache;
ed1e77d3 1366
81345200
A
1367 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1368 return GiveUpOnCache;
1369
1370 if (!structure->propertyAccessesAreCacheable())
1371 return GiveUpOnCache;
1372
1373 // Optimize self access.
1374 if (slot.base() == baseValue && slot.isCacheablePut()) {
1375 PolymorphicPutByIdList* list;
1376 RefPtr<JITStubRoutine> stubRoutine;
1377
1378 if (slot.type() == PutPropertySlot::NewProperty) {
81345200
A
1379 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1380 if (list->isFull())
1381 return GiveUpOnCache; // Will get here due to recursion.
ed1e77d3
A
1382
1383 Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1384
1385 if (!oldStructure)
1386 return GiveUpOnCache;
1387
1388 StructureChain* prototypeChain = structure->prototypeChain(exec);
1389 stubRoutine = stubInfo.stubRoutine;
81345200
A
1390 list->addAccess(
1391 PutByIdAccess::transition(
1392 *vm, codeBlock->ownerExecutable(),
1393 oldStructure, structure, prototypeChain,
1394 stubRoutine));
ed1e77d3 1395
81345200
A
1396 } else {
1397 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1398 if (list->isFull())
1399 return GiveUpOnCache; // Will get here due to recursion.
1400
ed1e77d3
A
1401 structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1402
81345200 1403 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
ed1e77d3
A
1404 bool result = emitPutReplaceStub(
1405 exec, propertyName, slot, stubInfo,
81345200 1406 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
ed1e77d3
A
1407 if (!result)
1408 return GiveUpOnCache;
81345200
A
1409
1410 list->addAccess(
1411 PutByIdAccess::replace(
1412 *vm, codeBlock->ownerExecutable(),
1413 structure, stubRoutine));
1414 }
81345200
A
1415 RepatchBuffer repatchBuffer(codeBlock);
1416 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
81345200
A
1417 if (list->isFull())
1418 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
ed1e77d3 1419
81345200
A
1420 return RetryCacheLater;
1421 }
1422
1423 if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1424 && stubInfo.patch.spillMode == DontSpill) {
1425 RefPtr<JITStubRoutine> stubRoutine;
1426 StructureChain* prototypeChain = 0;
1427 PropertyOffset offset = slot.cachedOffset();
1428 size_t count = 0;
1429 if (baseValue != slot.base()) {
ed1e77d3 1430 count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
81345200
A
1431 if (count == InvalidPrototypeChain)
1432 return GiveUpOnCache;
81345200
A
1433 prototypeChain = structure->prototypeChain(exec);
1434 }
ed1e77d3 1435
81345200
A
1436 PolymorphicPutByIdList* list;
1437 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1438
ed1e77d3 1439 bool result = generateByIdStub(
81345200
A
1440 exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1441 offset, structure, false, nullptr,
1442 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1443 CodeLocationLabel(list->currentSlowPathTarget()),
1444 stubRoutine);
ed1e77d3
A
1445 if (!result)
1446 return GiveUpOnCache;
1447
81345200
A
1448 list->addAccess(PutByIdAccess::setter(
1449 *vm, codeBlock->ownerExecutable(),
1450 slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
ed1e77d3 1451 structure, prototypeChain, count, slot.customSetter(), stubRoutine));
81345200
A
1452
1453 RepatchBuffer repatchBuffer(codeBlock);
1454 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1455 if (list->isFull())
1456 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1457
1458 return RetryCacheLater;
1459 }
1460 return GiveUpOnCache;
1461}
1462
ed1e77d3 1463void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
81345200
A
1464{
1465 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1466
ed1e77d3 1467 if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
81345200
A
1468 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1469}
1470
1471static InlineCacheAction tryRepatchIn(
1472 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1473 const PropertySlot& slot, StructureStubInfo& stubInfo)
1474{
1475 if (Options::forceICFailure())
1476 return GiveUpOnCache;
1477
1478 if (!base->structure()->propertyAccessesAreCacheable())
1479 return GiveUpOnCache;
1480
1481 if (wasFound) {
1482 if (!slot.isCacheable())
1483 return GiveUpOnCache;
1484 }
1485
1486 CodeBlock* codeBlock = exec->codeBlock();
1487 VM* vm = &exec->vm();
ed1e77d3 1488 Structure* structure = base->structure(*vm);
81345200
A
1489
1490 PropertyOffset offsetIgnored;
ed1e77d3
A
1491 JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1492 size_t count = !foundSlotBase || foundSlotBase != base ?
1493 normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
81345200
A
1494 if (count == InvalidPrototypeChain)
1495 return GiveUpOnCache;
1496
1497 PolymorphicAccessStructureList* polymorphicStructureList;
1498 int listIndex;
1499
1500 CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1501 CodeLocationLabel slowCaseLabel;
1502
1503 if (stubInfo.accessType == access_unset) {
1504 polymorphicStructureList = new PolymorphicAccessStructureList();
1505 stubInfo.initInList(polymorphicStructureList, 0);
1506 slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1507 stubInfo.patch.deltaCallToSlowCase);
1508 listIndex = 0;
1509 } else {
1510 RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1511 polymorphicStructureList = stubInfo.u.inList.structureList;
1512 listIndex = stubInfo.u.inList.listSize;
1513 slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1514
1515 if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1516 return GiveUpOnCache;
1517 }
1518
1519 StructureChain* chain = structure->prototypeChain(exec);
1520 RefPtr<JITStubRoutine> stubRoutine;
1521
1522 {
1523 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1524 GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1525 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1526
1527 CCallHelpers stubJit(vm);
1528
1529 bool needToRestoreScratch;
1530 if (scratchGPR == InvalidGPRReg) {
1531 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1532 stubJit.pushToSave(scratchGPR);
1533 needToRestoreScratch = true;
1534 } else
1535 needToRestoreScratch = false;
1536
1537 MacroAssembler::JumpList failureCases;
1538 failureCases.append(branchStructure(stubJit,
1539 MacroAssembler::NotEqual,
1540 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1541 structure));
1542
1543 CodeBlock* codeBlock = exec->codeBlock();
1544 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1545 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1546
1547 if (slot.watchpointSet())
1548 slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1549
1550 Structure* currStructure = structure;
1551 WriteBarrier<Structure>* it = chain->head();
1552 for (unsigned i = 0; i < count; ++i, ++it) {
1553 JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1554 Structure* protoStructure = prototype->structure();
1555 addStructureTransitionCheck(
1556 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1557 failureCases, scratchGPR);
1558 if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1559 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1560 currStructure = it->get();
1561 }
1562
1563#if USE(JSVALUE64)
1564 stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1565#else
1566 stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1567#endif
1568
1569 MacroAssembler::Jump success, fail;
1570
1571 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1572
ed1e77d3
A
1573 LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1574 if (patchBuffer.didFailToAllocate())
1575 return GiveUpOnCache;
1576
81345200
A
1577 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1578
1579 stubRoutine = FINALIZE_CODE_FOR_STUB(
1580 exec->codeBlock(), patchBuffer,
1581 ("In (found = %s) stub for %s, return point %p",
1582 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1583 successLabel.executableAddress()));
1584 }
1585
1586 polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1587 stubInfo.u.inList.listSize++;
1588
1589 RepatchBuffer repatchBuffer(codeBlock);
1590 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1591
1592 return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1593}
1594
1595void repatchIn(
1596 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1597 const PropertySlot& slot, StructureStubInfo& stubInfo)
1598{
1599 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1600 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1601}
1602
ed1e77d3
A
1603static void linkSlowFor(
1604 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1605{
1606 repatchBuffer.relink(
1607 callLinkInfo.callReturnLocation(), vm->getCTIStub(generator).code());
1608}
1609
81345200
A
1610static void linkSlowFor(
1611 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1612 CodeSpecializationKind kind, RegisterPreservationMode registers)
1613{
ed1e77d3 1614 linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
81345200
A
1615}
1616
1617void linkFor(
1618 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1619 JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1620 RegisterPreservationMode registers)
1621{
ed1e77d3 1622 ASSERT(!callLinkInfo.stub());
81345200
A
1623
1624 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1625
81345200
A
1626 VM* vm = callerCodeBlock->vm();
1627
1628 RepatchBuffer repatchBuffer(callerCodeBlock);
1629
1630 ASSERT(!callLinkInfo.isLinked());
ed1e77d3
A
1631 callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
1632 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
81345200 1633 if (shouldShowDisassemblyFor(callerCodeBlock))
ed1e77d3
A
1634 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1635 repatchBuffer.relink(callLinkInfo.hotPathOther(), codePtr);
81345200
A
1636
1637 if (calleeCodeBlock)
1638 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1639
1640 if (kind == CodeForCall) {
ed1e77d3
A
1641 linkSlowFor(
1642 repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
81345200
A
1643 return;
1644 }
1645
1646 ASSERT(kind == CodeForConstruct);
1647 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1648}
1649
1650void linkSlowFor(
1651 ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1652 RegisterPreservationMode registers)
1653{
1654 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1655 VM* vm = callerCodeBlock->vm();
1656
1657 RepatchBuffer repatchBuffer(callerCodeBlock);
1658
1659 linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1660}
1661
ed1e77d3
A
1662static void revertCall(
1663 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1664{
1665 repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1666 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1667 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
1668 linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
1669 callLinkInfo.clearSeen();
1670 callLinkInfo.clearCallee();
1671 callLinkInfo.clearStub();
1672 if (callLinkInfo.isOnList())
1673 callLinkInfo.remove();
1674}
1675
1676void unlinkFor(
1677 RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
1678 CodeSpecializationKind kind, RegisterPreservationMode registers)
1679{
1680 if (Options::showDisassembly())
1681 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1682
1683 revertCall(
1684 repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
1685 linkThunkGeneratorFor(kind, registers));
1686}
1687
1688void linkVirtualFor(
1689 ExecState* exec, CallLinkInfo& callLinkInfo,
1690 CodeSpecializationKind kind, RegisterPreservationMode registers)
1691{
1692 // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1693 // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1694
1695 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1696 VM* vm = callerCodeBlock->vm();
1697
1698 if (shouldShowDisassemblyFor(callerCodeBlock))
1699 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1700
1701 RepatchBuffer repatchBuffer(callerCodeBlock);
1702 revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1703}
1704
1705namespace {
1706struct CallToCodePtr {
1707 CCallHelpers::Call call;
1708 MacroAssemblerCodePtr codePtr;
1709};
1710} // annonymous namespace
1711
1712void linkPolymorphicCall(
1713 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
81345200
A
1714 RegisterPreservationMode registers)
1715{
ed1e77d3
A
1716 // Currently we can't do anything for non-function callees.
1717 // https://bugs.webkit.org/show_bug.cgi?id=140685
1718 if (!newVariant || !newVariant.executable()) {
1719 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1720 return;
1721 }
81345200
A
1722
1723 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1724 VM* vm = callerCodeBlock->vm();
1725
ed1e77d3
A
1726 CallVariantList list;
1727 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
1728 list = stub->variants();
1729 else if (JSFunction* oldCallee = callLinkInfo.callee())
1730 list = CallVariantList{ CallVariant(oldCallee) };
1731
1732 list = variantListWithVariant(list, newVariant);
1733
1734 // If there are any closure calls then it makes sense to treat all of them as closure calls.
1735 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1736 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1737 bool isClosureCall = false;
1738 for (CallVariant variant : list) {
1739 if (variant.isClosureCall()) {
1740 list = despecifiedVariantList(list);
1741 isClosureCall = true;
1742 break;
1743 }
1744 }
1745
1746 if (isClosureCall)
1747 callLinkInfo.setHasSeenClosure();
1748
1749 Vector<PolymorphicCallCase> callCases;
1750
1751 // Figure out what our cases are.
1752 for (CallVariant variant : list) {
1753 CodeBlock* codeBlock;
1754 if (variant.executable()->isHostFunction())
1755 codeBlock = nullptr;
1756 else {
1757 codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1758
1759 // If we cannot handle a callee, assume that it's better for this whole thing to be a
1760 // virtual call.
1761 if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
1762 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1763 return;
1764 }
1765 }
1766
1767 callCases.append(PolymorphicCallCase(variant, codeBlock));
1768 }
1769
1770 // If we are over the limit, just use a normal virtual call.
1771 unsigned maxPolymorphicCallVariantListSize;
1772 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1773 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1774 else
1775 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1776 if (list.size() > maxPolymorphicCallVariantListSize) {
1777 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1778 return;
1779 }
1780
1781 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
81345200
A
1782
1783 CCallHelpers stubJit(vm, callerCodeBlock);
1784
1785 CCallHelpers::JumpList slowPath;
1786
1787 ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1788
1789 if (!ASSERT_DISABLED) {
1790 CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1791 CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1792 stubJit.abortWithReason(RepatchInsaneArgumentCount);
1793 okArgumentCount.link(&stubJit);
1794 }
ed1e77d3
A
1795
1796 GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1797 GPRReg comparisonValueGPR;
1798
1799 if (isClosureCall) {
1800 // Verify that we have a function and stash the executable in scratch.
81345200
A
1801
1802#if USE(JSVALUE64)
ed1e77d3
A
1803 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1804 // being set. So we do this the hard way.
1805 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1806 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
81345200 1807#else
ed1e77d3 1808 // We would have already checked that the callee is a cell.
81345200
A
1809#endif
1810
ed1e77d3
A
1811 slowPath.append(
1812 stubJit.branch8(
1813 CCallHelpers::NotEqual,
1814 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1815 CCallHelpers::TrustedImm32(JSFunctionType)));
81345200 1816
ed1e77d3 1817 stubJit.loadPtr(
81345200 1818 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
ed1e77d3
A
1819 scratch);
1820
1821 comparisonValueGPR = scratch;
1822 } else
1823 comparisonValueGPR = calleeGPR;
81345200 1824
ed1e77d3
A
1825 Vector<int64_t> caseValues(callCases.size());
1826 Vector<CallToCodePtr> calls(callCases.size());
1827 std::unique_ptr<uint32_t[]> fastCounts;
81345200 1828
ed1e77d3
A
1829 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1830 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1831
1832 for (size_t i = 0; i < callCases.size(); ++i) {
1833 if (fastCounts)
1834 fastCounts[i] = 0;
1835
1836 CallVariant variant = callCases[i].variant();
1837 int64_t newCaseValue;
1838 if (isClosureCall)
1839 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1840 else
1841 newCaseValue = bitwise_cast<intptr_t>(variant.function());
1842
1843 if (!ASSERT_DISABLED) {
1844 for (size_t j = 0; j < i; ++j) {
1845 if (caseValues[j] != newCaseValue)
1846 continue;
1847
1848 dataLog("ERROR: Attempt to add duplicate case value.\n");
1849 dataLog("Existing case values: ");
1850 CommaPrinter comma;
1851 for (size_t k = 0; k < i; ++k)
1852 dataLog(comma, caseValues[k]);
1853 dataLog("\n");
1854 dataLog("Attempting to add: ", newCaseValue, "\n");
1855 dataLog("Variant list: ", listDump(callCases), "\n");
1856 RELEASE_ASSERT_NOT_REACHED();
1857 }
1858 }
1859
1860 caseValues[i] = newCaseValue;
1861 }
81345200 1862
ed1e77d3
A
1863 GPRReg fastCountsBaseGPR =
1864 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1865 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1866
1867 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1868 CCallHelpers::JumpList done;
1869 while (binarySwitch.advance(stubJit)) {
1870 size_t caseIndex = binarySwitch.caseIndex();
1871
1872 CallVariant variant = callCases[caseIndex].variant();
1873
1874 ASSERT(variant.executable()->hasJITCodeForCall());
1875 MacroAssemblerCodePtr codePtr =
1876 variant.executable()->generatedJITCodeForCall()->addressForCall(
1877 *vm, variant.executable(), ArityCheckNotRequired, registers);
1878
1879 if (fastCounts) {
1880 stubJit.add32(
1881 CCallHelpers::TrustedImm32(1),
1882 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1883 }
1884 calls[caseIndex].call = stubJit.nearCall();
1885 calls[caseIndex].codePtr = codePtr;
1886 done.append(stubJit.jump());
1887 }
81345200
A
1888
1889 slowPath.link(&stubJit);
ed1e77d3 1890 binarySwitch.fallThrough().link(&stubJit);
81345200
A
1891 stubJit.move(calleeGPR, GPRInfo::regT0);
1892#if USE(JSVALUE32_64)
1893 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1894#endif
1895 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
ed1e77d3 1896 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
81345200
A
1897
1898 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1899 AssemblyHelpers::Jump slow = stubJit.jump();
ed1e77d3
A
1900
1901 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1902 if (patchBuffer.didFailToAllocate()) {
1903 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1904 return;
1905 }
81345200 1906
ed1e77d3
A
1907 RELEASE_ASSERT(callCases.size() == calls.size());
1908 for (CallToCodePtr callToCodePtr : calls) {
1909 patchBuffer.link(
1910 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1911 }
81345200 1912 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
ed1e77d3 1913 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
81345200 1914 else
ed1e77d3
A
1915 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1916 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
81345200 1917
ed1e77d3 1918 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
81345200
A
1919 FINALIZE_CODE_FOR(
1920 callerCodeBlock, patchBuffer,
ed1e77d3
A
1921 ("Polymorphic call stub for %s, return point %p, targets %s",
1922 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1923 toCString(listDump(callCases)).data())),
1924 *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1925 WTF::move(fastCounts)));
81345200
A
1926
1927 RepatchBuffer repatchBuffer(callerCodeBlock);
1928
1929 repatchBuffer.replaceWithJump(
ed1e77d3 1930 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
81345200 1931 CodeLocationLabel(stubRoutine->code().code()));
ed1e77d3 1932 // This is weird. The original slow path should no longer be reachable.
81345200
A
1933 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1934
ed1e77d3
A
1935 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1936 // that it's no longer on stack.
1937 callLinkInfo.setStub(stubRoutine.release());
81345200 1938
ed1e77d3
A
1939 // The call link info no longer has a call cache apart from the jump to the polymorphic call
1940 // stub.
1941 if (callLinkInfo.isOnList())
1942 callLinkInfo.remove();
81345200
A
1943}
1944
1945void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1946{
1947 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1948 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1949 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1950 repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1951 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1952 MacroAssembler::Address(
1953 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1954 JSCell::structureIDOffset()),
1955 static_cast<int32_t>(unusedPointer));
1956 }
1957 repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1958#if USE(JSVALUE64)
1959 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1960#else
1961 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1962 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1963#endif
1964 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1965}
1966
1967void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1968{
1969 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1970 V_JITOperation_ESsiJJI optimizedFunction;
1971 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1972 optimizedFunction = operationPutByIdStrictOptimize;
1973 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1974 optimizedFunction = operationPutByIdNonStrictOptimize;
1975 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1976 optimizedFunction = operationPutByIdDirectStrictOptimize;
1977 else {
1978 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1979 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1980 }
1981 repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1982 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1983 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1984 repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1985 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1986 MacroAssembler::Address(
1987 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1988 JSCell::structureIDOffset()),
1989 static_cast<int32_t>(unusedPointer));
1990 }
1991 repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1992#if USE(JSVALUE64)
1993 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1994#else
1995 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1996 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1997#endif
1998 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1999}
2000
2001void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
2002{
2003 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
2004}
2005
2006} // namespace JSC
2007
2008#endif