X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/a253471d7f8e4d91bf6ebabab00155c3b387d3d0..93a3786624b2768d89bfa27e46598dc64e2fb70a:/dfg/DFGRepatch.cpp?ds=sidebyside diff --git a/dfg/DFGRepatch.cpp b/dfg/DFGRepatch.cpp index 7945381..d803b33 100644 --- a/dfg/DFGRepatch.cpp +++ b/dfg/DFGRepatch.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2011 Apple Inc. All rights reserved. + * Copyright (C) 2011, 2012 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -29,11 +29,16 @@ #if ENABLE(DFG_JIT) #include "DFGCCallHelpers.h" +#include "DFGScratchRegisterAllocator.h" #include "DFGSpeculativeJIT.h" +#include "DFGThunks.h" +#include "GCAwareJITStubRoutine.h" #include "LinkBuffer.h" #include "Operations.h" #include "PolymorphicPutByIdList.h" #include "RepatchBuffer.h" +#include "StructureRareDataInlines.h" +#include namespace JSC { namespace DFG { @@ -43,7 +48,7 @@ static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, Function repatchBuffer.relink(call, newCalleeFunction); } -static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, size_t offset, const FunctionPtr &slowPathFunction, bool compact) +static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact) { RepatchBuffer repatchBuffer(codeBlock); @@ -52,33 +57,93 @@ static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& st // Patch the structure check & the offset of the load. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure); + repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset)); #if USE(JSVALUE64) if (compact) - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), sizeof(JSValue) * offset); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset)); else - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), sizeof(JSValue) * offset); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset)); #elif USE(JSVALUE32_64) if (compact) { - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); } else { - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); + repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); } #endif } +static void addStructureTransitionCheck( + JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo, + MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR) +{ + if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) { + structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock)); +#if DFG_ENABLE(JIT_ASSERT) + // If we execute this code, the object must have the structure we expect. Assert + // this in debug modes. + jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR); + MacroAssembler::Jump ok = jit.branchPtr( + MacroAssembler::Equal, + MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), + MacroAssembler::TrustedImmPtr(structure)); + jit.breakpoint(); + ok.link(&jit); +#endif + return; + } + + jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR); + failureCases.append( + jit.branchPtr( + MacroAssembler::NotEqual, + MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), + MacroAssembler::TrustedImmPtr(structure))); +} + +static void addStructureTransitionCheck( + JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo, + MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR) +{ + if (prototype.isNull()) + return; + + ASSERT(prototype.isCell()); + + addStructureTransitionCheck( + prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit, + failureCases, scratchGPR); +} + +static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target) +{ + if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { + repatchBuffer.replaceWithJump( + RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress( + stubInfo.callReturnLocation.dataLabelPtrAtOffset( + -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)), + CodeLocationLabel(target)); + return; + } + + repatchBuffer.relink( + stubInfo.callReturnLocation.jumpAtOffset( + stubInfo.patch.dfg.deltaCallToStructCheck), + CodeLocationLabel(target)); +} + static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases) { if (needToRestoreScratch) { - stubJit.pop(scratchGPR); + stubJit.popToRestore(scratchGPR); success = stubJit.jump(); // link failure cases here, so we can pop scratchGPR, and then jump back. failureCases.link(&stubJit); - stubJit.pop(scratchGPR); + stubJit.popToRestore(scratchGPR); fail = stubJit.jump(); return; @@ -105,9 +170,9 @@ static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratc linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); } -static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, size_t offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, MacroAssemblerCodeRef& stubRoutine) +static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr& stubRoutine) { - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); MacroAssembler stubJit; @@ -116,12 +181,16 @@ static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stu GPRReg resultTagGPR = static_cast(stubInfo.patch.dfg.valueTagGPR); #endif GPRReg resultGPR = static_cast(stubInfo.patch.dfg.valueGPR); - GPRReg scratchGPR = static_cast(stubInfo.patch.dfg.scratchGPR); + GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); bool needToRestoreScratch = false; if (scratchGPR == InvalidGPRReg) { +#if USE(JSVALUE64) scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); - stubJit.push(scratchGPR); +#else + scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR); +#endif + stubJit.pushToSave(scratchGPR); needToRestoreScratch = true; } @@ -134,28 +203,42 @@ static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stu JSObject* protoObject = 0; for (unsigned i = 0; i < count; ++i, ++it) { protoObject = asObject(currStructure->prototypeForLookup(exec)); - stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR); - failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(protoObject->structure()))); + addStructureTransitionCheck( + protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit, + failureCases, scratchGPR); currStructure = it->get(); } - stubJit.loadPtr(protoObject->addressOfPropertyStorage(), resultGPR); + if (isInlineOffset(offset)) { +#if USE(JSVALUE64) + stubJit.load64(protoObject->locationForOffset(offset), resultGPR); +#elif USE(JSVALUE32_64) + stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR); + stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); + stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); +#endif + } else { + stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR); #if USE(JSVALUE64) - stubJit.loadPtr(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier)), resultGPR); + stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier)), resultGPR); #elif USE(JSVALUE32_64) - stubJit.load32(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); - stubJit.load32(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); + stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); + stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); #endif + } MacroAssembler::Jump success, fail; emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); - LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); + LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel); - stubRoutine = patchBuffer.finalizeCode(); + stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( + patchBuffer, + ("DFG prototype chain access stub for %s, return point %p", + toCString(*exec->codeBlock()).data(), successLabel.executableAddress())); } static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) @@ -164,7 +247,7 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier // like the interpreter does, then add a check for recursion. CodeBlock* codeBlock = exec->codeBlock(); - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) { GPRReg baseGPR = static_cast(stubInfo.patch.dfg.baseGPR); @@ -172,27 +255,34 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier GPRReg resultTagGPR = static_cast(stubInfo.patch.dfg.valueTagGPR); #endif GPRReg resultGPR = static_cast(stubInfo.patch.dfg.valueGPR); - GPRReg scratchGPR = static_cast(stubInfo.patch.dfg.scratchGPR); + GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); bool needToRestoreScratch = false; MacroAssembler stubJit; if (scratchGPR == InvalidGPRReg) { +#if USE(JSVALUE64) scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); - stubJit.push(scratchGPR); +#else + scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR); +#endif + stubJit.pushToSave(scratchGPR); needToRestoreScratch = true; } MacroAssembler::JumpList failureCases; + + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR); + stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR); + failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray))); + failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask))); - failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info))); - - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSArray::storageOffset()), scratchGPR); - stubJit.load32(MacroAssembler::Address(scratchGPR, OBJECT_OFFSETOF(ArrayStorage, m_length)), scratchGPR); + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); + stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR); failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0))); #if USE(JSVALUE64) - stubJit.orPtr(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR); + stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR); #elif USE(JSVALUE32_64) stubJit.move(scratchGPR, resultGPR); stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag @@ -202,14 +292,18 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); - LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock); + LinkBuffer patchBuffer(*vm, &stubJit, codeBlock); linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases); - stubInfo.stubRoutine = patchBuffer.finalizeCode(); + stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( + patchBuffer, + ("DFG GetById array length stub for %s, return point %p", + toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( + stubInfo.patch.dfg.deltaCallToDone).executableAddress())); RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); + replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code()); repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById); return true; @@ -224,18 +318,19 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier Structure* structure = baseCell->structure(); if (!slot.isCacheable()) return false; - if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) + if (!structure->propertyAccessesAreCacheable()) return false; // Optimize self access. if (slot.slotBase() == baseValue) { - if ((slot.cachedPropertyType() != PropertySlot::Value) || ((slot.cachedOffset() * sizeof(JSValue)) > (unsigned)MacroAssembler::MaximumCompactPtrAlignedAddressOffset)) { + if ((slot.cachedPropertyType() != PropertySlot::Value) + || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) { dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList); return true; } dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true); - stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure); + stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure); return true; } @@ -246,9 +341,9 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier if (slot.cachedPropertyType() != PropertySlot::Value) return false; - size_t offset = slot.cachedOffset(); - size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset); - if (!count) + PropertyOffset offset = slot.cachedOffset(); + size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset); + if (count == InvalidPrototypeChain) return false; StructureChain* prototypeChain = structure->prototypeChain(exec); @@ -258,10 +353,10 @@ static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine); RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); + replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code()); repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList); - stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain); + stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, true); return true; } @@ -291,7 +386,7 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi CodeBlock* codeBlock = exec->codeBlock(); JSCell* baseCell = baseValue.asCell(); Structure* structure = baseCell->structure(); - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); ASSERT(slot.slotBase().isObject()); @@ -305,7 +400,7 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi listIndex = 0; } else if (stubInfo.accessType == access_get_by_id_self) { ASSERT(!stubInfo.stubRoutine); - polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), MacroAssemblerCodeRef::createSelfManagedCodeRef(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true); + polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true); stubInfo.initGetByIdSelfList(polymorphicStructureList, 1); listIndex = 1; } else { @@ -321,9 +416,9 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi GPRReg resultTagGPR = static_cast(stubInfo.patch.dfg.valueTagGPR); #endif GPRReg resultGPR = static_cast(stubInfo.patch.dfg.valueGPR); - GPRReg scratchGPR = static_cast(stubInfo.patch.dfg.scratchGPR); + GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); - CCallHelpers stubJit(globalData, codeBlock); + CCallHelpers stubJit(vm, codeBlock); MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)); @@ -341,13 +436,22 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi if (slot.cachedPropertyType() == PropertySlot::Getter || slot.cachedPropertyType() == PropertySlot::Custom) { if (slot.cachedPropertyType() == PropertySlot::Getter) { + ASSERT(scratchGPR != InvalidGPRReg); ASSERT(baseGPR != scratchGPR); - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); + if (isInlineOffset(slot.cachedOffset())) { #if USE(JSVALUE64) - stubJit.loadPtr(MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue)), scratchGPR); -#elif USE(JSVALUE32_64) - stubJit.load32(MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), scratchGPR); + stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); +#else + stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); +#endif + } else { + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); +#if USE(JSVALUE64) + stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); +#else + stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); #endif + } stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR); operationFunction = operationCallGetter; } else { @@ -363,7 +467,7 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi // right now! stubJit.store32( MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()), - CCallHelpers::tagFor(static_cast(RegisterFile::ArgumentCount))); + CCallHelpers::tagFor(static_cast(JSStack::ArgumentCount))); operationCall = stubJit.call(); #if USE(JSVALUE64) @@ -378,22 +482,36 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi handlerCall = stubJit.call(); stubJit.jump(GPRInfo::returnValueGPR2); } else { - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), resultGPR); + if (isInlineOffset(slot.cachedOffset())) { #if USE(JSVALUE64) - stubJit.loadPtr(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue)), resultGPR); -#elif USE(JSVALUE32_64) - stubJit.load32(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); - stubJit.load32(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); + stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR); +#else + if (baseGPR == resultTagGPR) { + stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); + stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); + } else { + stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); + stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); + } +#endif + } else { + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR); +#if USE(JSVALUE64) + stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR); +#else + stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); + stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); #endif + } success = stubJit.jump(); isDirect = true; } - LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock); + LinkBuffer patchBuffer(*vm, &stubJit, codeBlock); CodeLocationLabel lastProtoBegin; if (listIndex) - lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code()); + lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code()); else lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase); ASSERT(!!lastProtoBegin); @@ -405,13 +523,25 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi patchBuffer.link(handlerCall, lookupExceptionHandlerInStub); } - MacroAssemblerCodeRef stubRoutine = patchBuffer.finalizeCode(); + RefPtr stubRoutine = + createJITStubRoutine( + FINALIZE_DFG_CODE( + patchBuffer, + ("DFG GetById polymorphic list access for %s, return point %p", + toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( + stubInfo.patch.dfg.deltaCallToDone).executableAddress())), + *vm, + codeBlock->ownerExecutable(), + slot.cachedPropertyType() == PropertySlot::Getter + || slot.cachedPropertyType() == PropertySlot::Custom); - polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect); + polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect); - CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck); RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); + repatchBuffer.relink( + stubInfo.callReturnLocation.jumpAtOffset( + stubInfo.patch.dfg.deltaCallToStructCheck), + CodeLocationLabel(stubRoutine->code().code())); if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) return true; @@ -439,23 +569,23 @@ static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const I ASSERT(slot.slotBase().isObject()); - size_t offset = slot.cachedOffset(); - size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset); - if (!count) + PropertyOffset offset = slot.cachedOffset(); + size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset); + if (count == InvalidPrototypeChain) return false; Structure* structure = baseValue.asCell()->structure(); StructureChain* prototypeChain = structure->prototypeChain(exec); CodeBlock* codeBlock = exec->codeBlock(); - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); PolymorphicAccessStructureList* polymorphicStructureList; int listIndex = 1; if (stubInfo.accessType == access_get_by_id_chain) { ASSERT(!!stubInfo.stubRoutine); - polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true); - stubInfo.stubRoutine = MacroAssemblerCodeRef(); + polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true); + stubInfo.stubRoutine.clear(); stubInfo.initGetByIdProtoList(polymorphicStructureList, 1); } else { ASSERT(stubInfo.accessType == access_get_by_id_proto_list); @@ -466,18 +596,17 @@ static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const I if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { stubInfo.u.getByIdProtoList.listSize++; - CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code()); + CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code()); ASSERT(!!lastProtoBegin); - MacroAssemblerCodeRef stubRoutine; + RefPtr stubRoutine; generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine); - polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true); + polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true); - CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck); RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); + replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code()); if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) return true; @@ -517,17 +646,6 @@ static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPrope return operationPutByIdNonStrictBuildList; } -static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue prototype, MacroAssembler::JumpList& failureCases) -{ - if (prototype.isNull()) - return; - - ASSERT(prototype.isCell()); - - stubJit.move(MacroAssembler::TrustedImmPtr(prototype.asCell()), scratchGPR); - failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype.asCell()->structure()))); -} - static void emitPutReplaceStub( ExecState* exec, JSValue, @@ -537,17 +655,17 @@ static void emitPutReplaceStub( PutKind, Structure* structure, CodeLocationLabel failureLabel, - MacroAssemblerCodeRef& stubRoutine) + RefPtr& stubRoutine) { - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); GPRReg baseGPR = static_cast(stubInfo.patch.dfg.baseGPR); #if USE(JSVALUE32_64) GPRReg valueTagGPR = static_cast(stubInfo.patch.dfg.valueTagGPR); #endif GPRReg valueGPR = static_cast(stubInfo.patch.dfg.valueGPR); - GPRReg scratchGPR = static_cast(stubInfo.patch.dfg.scratchGPR); + GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); bool needToRestoreScratch = false; -#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) +#if ENABLE(WRITE_BARRIER_PROFILING) GPRReg scratchGPR2; const bool writeBarrierNeeded = true; #else @@ -556,10 +674,14 @@ static void emitPutReplaceStub( MacroAssembler stubJit; - if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || !structure->isUsingInlineStorage())) { + if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) { +#if USE(JSVALUE64) scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR); +#else + scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR); +#endif needToRestoreScratch = true; - stubJit.push(scratchGPR); + stubJit.pushToSave(scratchGPR); } MacroAssembler::Jump badStructure = stubJit.branchPtr( @@ -567,28 +689,32 @@ static void emitPutReplaceStub( MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)); -#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) +#if ENABLE(WRITE_BARRIER_PROFILING) +#if USE(JSVALUE64) scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR); - stubJit.push(scratchGPR2); +#else + scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR); +#endif + stubJit.pushToSave(scratchGPR2); SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess); - stubJit.pop(scratchGPR2); + stubJit.popToRestore(scratchGPR2); #endif #if USE(JSVALUE64) - if (structure->isUsingInlineStorage()) - stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue))); + if (isInlineOffset(slot.cachedOffset())) + stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue))); else { - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); - stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue))); + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); + stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue))); } #elif USE(JSVALUE32_64) - if (structure->isUsingInlineStorage()) { - stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); - stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); + if (isInlineOffset(slot.cachedOffset())) { + stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); + stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); } else { - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); - stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); - stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); + stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); + stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); } #endif @@ -596,22 +722,26 @@ static void emitPutReplaceStub( MacroAssembler::Jump failure; if (needToRestoreScratch) { - stubJit.pop(scratchGPR); + stubJit.popToRestore(scratchGPR); success = stubJit.jump(); badStructure.link(&stubJit); - stubJit.pop(scratchGPR); + stubJit.popToRestore(scratchGPR); failure = stubJit.jump(); } else { success = stubJit.jump(); failure = badStructure; } - LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); + LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); patchBuffer.link(failure, failureLabel); - stubRoutine = patchBuffer.finalizeCode(); + stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( + patchBuffer, + ("DFG PutById replace stub for %s, return point %p", + toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( + stubInfo.patch.dfg.deltaCallToDone).executableAddress())); } static void emitPutTransitionStub( @@ -625,93 +755,205 @@ static void emitPutTransitionStub( Structure* oldStructure, StructureChain* prototypeChain, CodeLocationLabel failureLabel, - MacroAssemblerCodeRef& stubRoutine) + RefPtr& stubRoutine) { - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); GPRReg baseGPR = static_cast(stubInfo.patch.dfg.baseGPR); #if USE(JSVALUE32_64) GPRReg valueTagGPR = static_cast(stubInfo.patch.dfg.valueTagGPR); #endif GPRReg valueGPR = static_cast(stubInfo.patch.dfg.valueGPR); - GPRReg scratchGPR = static_cast(stubInfo.patch.dfg.scratchGPR); - bool needToRestoreScratch = false; - - ASSERT(scratchGPR != baseGPR); + + ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters); + allocator.lock(baseGPR); +#if USE(JSVALUE32_64) + allocator.lock(valueTagGPR); +#endif + allocator.lock(valueGPR); + + CCallHelpers stubJit(vm); - MacroAssembler stubJit; + GPRReg scratchGPR1 = allocator.allocateScratchGPR(); + ASSERT(scratchGPR1 != baseGPR); + ASSERT(scratchGPR1 != valueGPR); + + bool needSecondScratch = false; + bool needThirdScratch = false; +#if ENABLE(WRITE_BARRIER_PROFILING) + needSecondScratch = true; +#endif + if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() + && oldStructure->outOfLineCapacity()) { + needSecondScratch = true; + needThirdScratch = true; + } + + GPRReg scratchGPR2; + if (needSecondScratch) { + scratchGPR2 = allocator.allocateScratchGPR(); + ASSERT(scratchGPR2 != baseGPR); + ASSERT(scratchGPR2 != valueGPR); + ASSERT(scratchGPR2 != scratchGPR1); + } else + scratchGPR2 = InvalidGPRReg; + GPRReg scratchGPR3; + if (needThirdScratch) { + scratchGPR3 = allocator.allocateScratchGPR(); + ASSERT(scratchGPR3 != baseGPR); + ASSERT(scratchGPR3 != valueGPR); + ASSERT(scratchGPR3 != scratchGPR1); + ASSERT(scratchGPR3 != scratchGPR2); + } else + scratchGPR3 = InvalidGPRReg; + allocator.preserveReusedRegistersByPushing(stubJit); + MacroAssembler::JumpList failureCases; - if (scratchGPR == InvalidGPRReg) { - scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR); - stubJit.push(scratchGPR); - needToRestoreScratch = true; - } + ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated()); failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure))); - - testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases); + + addStructureTransitionCheck( + oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, + scratchGPR1); if (putKind == NotDirect) { - for (WriteBarrier* it = prototypeChain->head(); *it; ++it) - testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases); + for (WriteBarrier* it = prototypeChain->head(); *it; ++it) { + addStructureTransitionCheck( + (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, + scratchGPR1); + } } -#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) +#if ENABLE(WRITE_BARRIER_PROFILING) + ASSERT(needSecondScratch); + ASSERT(scratchGPR2 != InvalidGPRReg); // Must always emit this write barrier as the structure transition itself requires it - GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR); - stubJit.push(scratch2); - SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess); - stubJit.pop(scratch2); + SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess); #endif + + MacroAssembler::JumpList slowPath; + + bool scratchGPR1HasStorage = false; + + if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { + size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue); + CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator(); + + if (!oldStructure->outOfLineCapacity()) { + stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1); + slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1)); + stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining); + stubJit.negPtr(scratchGPR1); + stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1); + stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1); + } else { + size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue); + ASSERT(newSize > oldSize); + + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3); + stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1); + slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1)); + stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining); + stubJit.negPtr(scratchGPR1); + stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1); + stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1); + // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available + for (ptrdiff_t offset = 0; offset < static_cast(oldSize); offset += sizeof(void*)) { + stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2); + stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*)))); + } + } + + stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset())); + scratchGPR1HasStorage = true; + } stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset())); #if USE(JSVALUE64) - if (structure->isUsingInlineStorage()) - stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue))); + if (isInlineOffset(slot.cachedOffset())) + stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue))); else { - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); - stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue))); + if (!scratchGPR1HasStorage) + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1); + stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue))); } #elif USE(JSVALUE32_64) - if (structure->isUsingInlineStorage()) { - stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); - stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); + if (isInlineOffset(slot.cachedOffset())) { + stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); + stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); } else { - stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); - stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); - stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); + if (!scratchGPR1HasStorage) + stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1); + stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); + stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); } #endif MacroAssembler::Jump success; MacroAssembler::Jump failure; - if (needToRestoreScratch) { - stubJit.pop(scratchGPR); + if (allocator.didReuseRegisters()) { + allocator.restoreReusedRegistersByPopping(stubJit); success = stubJit.jump(); failureCases.link(&stubJit); - stubJit.pop(scratchGPR); + allocator.restoreReusedRegistersByPopping(stubJit); failure = stubJit.jump(); } else success = stubJit.jump(); - - LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); + + MacroAssembler::Call operationCall; + MacroAssembler::Jump successInSlowPath; + + if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { + slowPath.link(&stubJit); + + allocator.restoreReusedRegistersByPopping(stubJit); + ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSize()); + allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1); +#if USE(JSVALUE64) + stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR); +#else + stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR); +#endif + operationCall = stubJit.call(); + allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1); + successInSlowPath = stubJit.jump(); + } + + LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); - if (needToRestoreScratch) + if (allocator.didReuseRegisters()) patchBuffer.link(failure, failureLabel); else patchBuffer.link(failureCases, failureLabel); - - stubRoutine = patchBuffer.finalizeCode(); + if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { + patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut); + patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); + } + + stubRoutine = + createJITStubRoutine( + FINALIZE_DFG_CODE( + patchBuffer, + ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p", + structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "", + oldStructure, structure, + toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( + stubInfo.patch.dfg.deltaCallToDone).executableAddress())), + *vm, + exec->codeBlock()->ownerExecutable(), + structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(), + structure); } static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) { CodeBlock* codeBlock = exec->codeBlock(); - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); if (!baseValue.isCell()) return false; @@ -730,11 +972,20 @@ static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier if (structure->isDictionary()) return false; - // skip optimizing the case where we need a realloc - if (oldStructure->propertyStorageCapacity() != structure->propertyStorageCapacity()) + // Skip optimizing the case where we need a realloc, if we don't have + // enough registers to make it happen. + if (GPRInfo::numberOfRegisters < 6 + && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity() + && oldStructure->outOfLineCapacity()) return false; - normalizePrototypeChain(exec, baseCell); + // Skip optimizing the case where we need realloc, and the structure has + // indexing storage. + if (hasIndexingHeader(oldStructure->indexingType())) + return false; + + if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain) + return false; StructureChain* prototypeChain = structure->prototypeChain(exec); @@ -745,16 +996,22 @@ static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier stubInfo.stubRoutine); RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); + repatchBuffer.relink( + stubInfo.callReturnLocation.jumpAtOffset( + stubInfo.patch.dfg.deltaCallToStructCheck), + CodeLocationLabel(stubInfo.stubRoutine->code().code())); repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind)); - stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct); + stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct); return true; } - + + if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset()))) + return false; + dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false); - stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure); + stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure); return true; } @@ -771,7 +1028,7 @@ void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& pro static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) { CodeBlock* codeBlock = exec->codeBlock(); - JSGlobalData* globalData = &exec->globalData(); + VM* vm = &exec->vm(); if (!baseValue.isCell()) return false; @@ -787,17 +1044,26 @@ static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identi // Optimize self access. if (slot.base() == baseValue) { PolymorphicPutByIdList* list; - MacroAssemblerCodeRef stubRoutine; + RefPtr stubRoutine; if (slot.type() == PutPropertySlot::NewProperty) { if (structure->isDictionary()) return false; - // skip optimizing the case where we need a realloc - if (oldStructure->propertyStorageCapacity() != structure->propertyStorageCapacity()) + // Skip optimizing the case where we need a realloc, if we don't have + // enough registers to make it happen. + if (GPRInfo::numberOfRegisters < 6 + && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity() + && oldStructure->outOfLineCapacity()) + return false; + + // Skip optimizing the case where we need realloc, and the structure has + // indexing storage. + if (hasIndexingHeader(oldStructure->indexingType())) return false; - normalizePrototypeChain(exec, baseCell); + if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain) + return false; StructureChain* prototypeChain = structure->prototypeChain(exec); @@ -814,7 +1080,7 @@ static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identi list->addAccess( PutByIdAccess::transition( - *globalData, codeBlock->ownerExecutable(), + *vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, stubRoutine)); } else { @@ -829,12 +1095,12 @@ static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identi list->addAccess( PutByIdAccess::replace( - *globalData, codeBlock->ownerExecutable(), + *vm, codeBlock->ownerExecutable(), structure, stubRoutine)); } RepatchBuffer repatchBuffer(codeBlock); - repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine.code())); + repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code())); if (list->isFull()) repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); @@ -852,32 +1118,153 @@ void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& p dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); } +static void linkSlowFor(RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind) +{ + if (kind == CodeForCall) { + repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualCallThunkGenerator).code()); + return; + } + ASSERT(kind == CodeForConstruct); + repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualConstructThunkGenerator).code()); +} + void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind) { + ASSERT(!callLinkInfo.stub); + CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); + VM* vm = callerCodeBlock->vm(); RepatchBuffer repatchBuffer(callerCodeBlock); ASSERT(!callLinkInfo.isLinked()); - callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee); - callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee); + callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee); + callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee); repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr); if (calleeCodeBlock) calleeCodeBlock->linkIncomingCall(&callLinkInfo); if (kind == CodeForCall) { - repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualCall); + repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGenerator).code()); return; } + ASSERT(kind == CodeForConstruct); - repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct); + linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct); +} + +void dfgLinkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind) +{ + CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); + VM* vm = callerCodeBlock->vm(); + + RepatchBuffer repatchBuffer(callerCodeBlock); + + linkSlowFor(repatchBuffer, vm, callLinkInfo, kind); +} + +void dfgLinkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr) +{ + ASSERT(!callLinkInfo.stub); + + CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); + VM* vm = callerCodeBlock->vm(); + + GPRReg calleeGPR = static_cast(callLinkInfo.calleeGPR); + + CCallHelpers stubJit(vm, callerCodeBlock); + + CCallHelpers::JumpList slowPath; + +#if USE(JSVALUE64) + slowPath.append( + stubJit.branchTest64( + CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister)); +#else + // We would have already checked that the callee is a cell. +#endif + + slowPath.append( + stubJit.branchPtr( + CCallHelpers::NotEqual, + CCallHelpers::Address(calleeGPR, JSCell::structureOffset()), + CCallHelpers::TrustedImmPtr(structure))); + + slowPath.append( + stubJit.branchPtr( + CCallHelpers::NotEqual, + CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()), + CCallHelpers::TrustedImmPtr(executable))); + + stubJit.loadPtr( + CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()), + GPRInfo::returnValueGPR); + +#if USE(JSVALUE64) + stubJit.store64( + GPRInfo::returnValueGPR, + CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast(sizeof(Register) * JSStack::ScopeChain))); +#else + stubJit.storePtr( + GPRInfo::returnValueGPR, + CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); + stubJit.store32( + CCallHelpers::TrustedImm32(JSValue::CellTag), + CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); +#endif + + JITCompiler::Call call = stubJit.nearCall(); + JITCompiler::Jump done = stubJit.jump(); + + slowPath.link(&stubJit); + stubJit.move(calleeGPR, GPRInfo::nonArgGPR0); +#if USE(JSVALUE32_64) + stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1); +#endif + stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2); + stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2); + JITCompiler::Jump slow = stubJit.jump(); + + LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock); + + patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); + patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0)); + patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualCallThunkGenerator).code())); + + RefPtr stubRoutine = adoptRef(new ClosureCallStubRoutine( + FINALIZE_DFG_CODE( + patchBuffer, + ("DFG closure call stub for %s, return point %p, target %p (%s)", + toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(), + codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())), + *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin)); + + RepatchBuffer repatchBuffer(callerCodeBlock); + + repatchBuffer.replaceWithJump( + RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), + CodeLocationLabel(stubRoutine->code().code())); + linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall); + + callLinkInfo.stub = stubRoutine.release(); + + ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo)); } void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) { repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize); - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast(-1)); + CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall); + if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { + repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch( + RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel), + MacroAssembler::Address( + static_cast(stubInfo.patch.dfg.baseGPR), + JSCell::structureOffset()), + reinterpret_cast(unusedPointer)); + } + repatchBuffer.repatch(structureLabel, reinterpret_cast(unusedPointer)); #if USE(JSVALUE64) repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); #else @@ -902,7 +1289,16 @@ void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) optimizedFunction = operationPutByIdDirectNonStrictOptimize; } repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction); - repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast(-1)); + CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall); + if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { + repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch( + RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel), + MacroAssembler::Address( + static_cast(stubInfo.patch.dfg.baseGPR), + JSCell::structureOffset()), + reinterpret_cast(unusedPointer)); + } + repatchBuffer.repatch(structureLabel, reinterpret_cast(unusedPointer)); #if USE(JSVALUE64) repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); #else