- // 7) Dump all doubles into the stack, or to the scratch storage if the
- // destination virtual register is poisoned.
- if (haveFPRs) {
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- if (recovery.technique() != InFPR)
- continue;
- if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
- m_jit.storeDouble(recovery.fpr(), scratchDataBuffer + currentPoisonIndex);
- m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
- currentPoisonIndex++;
- } else
- m_jit.storeDouble(recovery.fpr(), AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
- }
- }
-
- // At this point all GPRs are available for scratch use.
-
- ASSERT(currentPoisonIndex == numberOfPoisonedVirtualRegisters);
-
- // 8) Reshuffle displaced virtual registers. Optimize for the case that
- // the number of displaced virtual registers is not more than the number
- // of available physical registers.
-
- if (numberOfDisplacedVirtualRegisters) {
- if (numberOfDisplacedVirtualRegisters * 2 <= GPRInfo::numberOfRegisters) {
- // So far this appears to be the case that triggers all the time, but
- // that is far from guaranteed.
-
- unsigned displacementIndex = 0;
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- switch (recovery.technique()) {
- case DisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
- m_jit.load32(AssemblyHelpers::tagFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
- break;
- case Int32DisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
- m_jit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), GPRInfo::toRegister(displacementIndex++));
- break;
- case CellDisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
- m_jit.move(AssemblyHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::toRegister(displacementIndex++));
- break;
- case BooleanDisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
- m_jit.move(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), GPRInfo::toRegister(displacementIndex++));
- break;
- default:
- break;
- }
- }
-
- displacementIndex = 0;
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- switch (recovery.technique()) {
- case DisplacedInJSStack:
- case Int32DisplacedInJSStack:
- case CellDisplacedInJSStack:
- case BooleanDisplacedInJSStack:
- m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- break;
- default:
- break;
- }
- }
- } else {
- // FIXME: This should use the shuffling algorithm that we use
- // for speculative->non-speculative jumps, if we ever discover that
- // some hot code with lots of live values that get displaced and
- // spilled really enjoys frequently failing speculation.
-
- // For now this code is engineered to be correct but probably not
- // super. In particular, it correctly handles cases where for example
- // the displacements are a permutation of the destination values, like
- //
- // 1 -> 2
- // 2 -> 1
- //
- // It accomplishes this by simply lifting all of the virtual registers
- // from their old (DFG JIT) locations and dropping them in a scratch
- // location in memory, and then transferring from that scratch location
- // to their new (old JIT) locations.
-
- unsigned scratchIndex = numberOfPoisonedVirtualRegisters;
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- switch (recovery.technique()) {
- case DisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::regT0);
- m_jit.load32(AssemblyHelpers::tagFor(recovery.virtualRegister()), GPRInfo::regT1);
- m_jit.store32(GPRInfo::regT0, reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
- m_jit.store32(GPRInfo::regT1, reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
- scratchIndex++;
- break;
- case Int32DisplacedInJSStack:
- case CellDisplacedInJSStack:
- case BooleanDisplacedInJSStack:
- m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::regT0);
- m_jit.store32(GPRInfo::regT0, reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
- break;
- default:
- break;
- }
- }
-
- scratchIndex = numberOfPoisonedVirtualRegisters;
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- switch (recovery.technique()) {
- case DisplacedInJSStack:
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), GPRInfo::regT1);
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- scratchIndex++;
- break;
- case Int32DisplacedInJSStack:
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- break;
- case CellDisplacedInJSStack:
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- break;
- case BooleanDisplacedInJSStack:
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- break;
- default:
- break;
- }
- }
-
- ASSERT(scratchIndex == numberOfPoisonedVirtualRegisters + numberOfDisplacedVirtualRegisters);
- }
- }
-
- // 9) Dump all poisoned virtual registers.
-
- if (numberOfPoisonedVirtualRegisters) {
- for (int virtualRegister = 0; virtualRegister < (int)operands.numberOfLocals(); ++virtualRegister) {
- if (!poisonedVirtualRegisters[virtualRegister])
- continue;
-
- const ValueRecovery& recovery = operands.local(virtualRegister);
- switch (recovery.technique()) {
- case InGPR:
- case UnboxedInt32InGPR:
- case UnboxedBooleanInGPR: {
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + poisonIndex(virtualRegister)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)virtualRegister));
- uint32_t tag = JSValue::EmptyValueTag;
- if (recovery.technique() == InGPR)
- tag = JSValue::CellTag;
- else if (recovery.technique() == UnboxedInt32InGPR)
- tag = JSValue::Int32Tag;
- else
- tag = JSValue::BooleanTag;
- m_jit.store32(AssemblyHelpers::TrustedImm32(tag), AssemblyHelpers::tagFor((VirtualRegister)virtualRegister));
- break;
- }
-
- case InFPR:
- case InPair:
- case UInt32InGPR:
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + poisonIndex(virtualRegister)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + poisonIndex(virtualRegister)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), GPRInfo::regT1);
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)virtualRegister));
- m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)virtualRegister));
- break;
-
- default:
- break;
- }
- }
- }
-
- // 10) Dump all constants. Optimize for Undefined, since that's a constant we see
- // often.
-
- if (haveConstants) {
- if (haveUndefined) {
- m_jit.move(AssemblyHelpers::TrustedImm32(jsUndefined().payload()), GPRInfo::regT0);
- m_jit.move(AssemblyHelpers::TrustedImm32(jsUndefined().tag()), GPRInfo::regT1);
- }
-
- for (size_t index = 0; index < operands.size(); ++index) {
- const ValueRecovery& recovery = operands[index];
- if (recovery.technique() != Constant)
- continue;
- if (recovery.constant().isUndefined()) {
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- } else {
- m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().payload()), AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
- m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().tag()), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
- }
- }
- }
-
- // 12) Adjust the old JIT's execute counter. Since we are exiting OSR, we know
- // that all new calls into this code will go to the new JIT, so the execute
- // counter only affects call frames that performed OSR exit and call frames
- // that were still executing the old JIT at the time of another call frame's
- // OSR exit. We want to ensure that the following is true: