+ case GetEnumerableLength: {
+ SpeculateCellOperand enumerator(this, node->child1());
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+
+ m_jit.load32(MacroAssembler::Address(enumerator.gpr(), JSPropertyNameEnumerator::indexedLengthOffset()), resultGPR);
+ int32Result(resultGPR, node);
+ break;
+ }
+ case HasGenericProperty: {
+ JSValueOperand base(this, node->child1());
+ SpeculateCellOperand property(this, node->child2());
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+
+ flushRegisters();
+ callOperation(operationHasGenericProperty, resultGPR, base.gpr(), property.gpr());
+ jsValueResult(resultGPR, node, DataFormatJSBoolean);
+ break;
+ }
+ case HasStructureProperty: {
+ JSValueOperand base(this, node->child1());
+ SpeculateCellOperand property(this, node->child2());
+ SpeculateCellOperand enumerator(this, node->child3());
+ GPRTemporary result(this);
+
+ GPRReg baseGPR = base.gpr();
+ GPRReg propertyGPR = property.gpr();
+ GPRReg resultGPR = result.gpr();
+
+ m_jit.load32(MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), resultGPR);
+ MacroAssembler::Jump wrongStructure = m_jit.branch32(MacroAssembler::NotEqual,
+ resultGPR,
+ MacroAssembler::Address(enumerator.gpr(), JSPropertyNameEnumerator::cachedStructureIDOffset()));
+
+ moveTrueTo(resultGPR);
+ MacroAssembler::Jump done = m_jit.jump();
+
+ done.link(&m_jit);
+
+ addSlowPathGenerator(slowPathCall(wrongStructure, this, operationHasGenericProperty, resultGPR, baseGPR, propertyGPR));
+ jsValueResult(resultGPR, node, DataFormatJSBoolean);
+ break;
+ }
+ case HasIndexedProperty: {
+ SpeculateCellOperand base(this, node->child1());
+ SpeculateStrictInt32Operand index(this, node->child2());
+ GPRTemporary result(this);
+
+ GPRReg baseGPR = base.gpr();
+ GPRReg indexGPR = index.gpr();
+ GPRReg resultGPR = result.gpr();
+
+ MacroAssembler::JumpList slowCases;
+ ArrayMode mode = node->arrayMode();
+ switch (mode.type()) {
+ case Array::Int32:
+ case Array::Contiguous: {
+ ASSERT(!!node->child3());
+ StorageOperand storage(this, node->child3());
+ GPRTemporary scratch(this);
+
+ GPRReg storageGPR = storage.gpr();
+ GPRReg scratchGPR = scratch.gpr();
+
+ MacroAssembler::Jump outOfBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, indexGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
+ if (mode.isInBounds())
+ speculationCheck(OutOfBounds, JSValueRegs(), 0, outOfBounds);
+ else
+ slowCases.append(outOfBounds);
+
+ m_jit.load64(MacroAssembler::BaseIndex(storageGPR, indexGPR, MacroAssembler::TimesEight), scratchGPR);
+ slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, scratchGPR));
+ moveTrueTo(resultGPR);
+ break;
+ }
+ case Array::Double: {
+ ASSERT(!!node->child3());
+ StorageOperand storage(this, node->child3());
+ FPRTemporary scratch(this);
+ FPRReg scratchFPR = scratch.fpr();
+ GPRReg storageGPR = storage.gpr();
+
+ MacroAssembler::Jump outOfBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, indexGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
+ if (mode.isInBounds())
+ speculationCheck(OutOfBounds, JSValueRegs(), 0, outOfBounds);
+ else
+ slowCases.append(outOfBounds);
+
+ m_jit.loadDouble(MacroAssembler::BaseIndex(storageGPR, indexGPR, MacroAssembler::TimesEight), scratchFPR);
+ slowCases.append(m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, scratchFPR, scratchFPR));
+ moveTrueTo(resultGPR);
+ break;
+ }
+ case Array::ArrayStorage: {
+ ASSERT(!!node->child3());
+ StorageOperand storage(this, node->child3());
+ GPRTemporary scratch(this);
+
+ GPRReg storageGPR = storage.gpr();
+ GPRReg scratchGPR = scratch.gpr();
+
+ MacroAssembler::Jump outOfBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, indexGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset()));
+ if (mode.isInBounds())
+ speculationCheck(OutOfBounds, JSValueRegs(), 0, outOfBounds);
+ else
+ slowCases.append(outOfBounds);
+
+ m_jit.load64(MacroAssembler::BaseIndex(storageGPR, indexGPR, MacroAssembler::TimesEight, ArrayStorage::vectorOffset()), scratchGPR);
+ slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, scratchGPR));
+ moveTrueTo(resultGPR);
+ break;
+ }
+ default: {
+ slowCases.append(m_jit.jump());
+ break;
+ }
+ }
+
+ addSlowPathGenerator(slowPathCall(slowCases, this, operationHasIndexedProperty, resultGPR, baseGPR, indexGPR));
+
+ jsValueResult(resultGPR, node, DataFormatJSBoolean);
+ break;
+ }
+ case GetDirectPname: {
+ Edge& baseEdge = m_jit.graph().varArgChild(node, 0);
+ Edge& propertyEdge = m_jit.graph().varArgChild(node, 1);
+ Edge& indexEdge = m_jit.graph().varArgChild(node, 2);
+ Edge& enumeratorEdge = m_jit.graph().varArgChild(node, 3);
+
+ SpeculateCellOperand base(this, baseEdge);
+ SpeculateCellOperand property(this, propertyEdge);
+ SpeculateStrictInt32Operand index(this, indexEdge);
+ SpeculateCellOperand enumerator(this, enumeratorEdge);
+ GPRTemporary result(this);
+ GPRTemporary scratch1(this);
+ GPRTemporary scratch2(this);
+
+ GPRReg baseGPR = base.gpr();
+ GPRReg propertyGPR = property.gpr();
+ GPRReg indexGPR = index.gpr();
+ GPRReg enumeratorGPR = enumerator.gpr();
+ GPRReg resultGPR = result.gpr();
+ GPRReg scratch1GPR = scratch1.gpr();
+ GPRReg scratch2GPR = scratch2.gpr();
+
+ // Check the structure
+ m_jit.load32(MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), scratch1GPR);
+ MacroAssembler::Jump wrongStructure = m_jit.branch32(MacroAssembler::NotEqual,
+ scratch1GPR, MacroAssembler::Address(enumeratorGPR, JSPropertyNameEnumerator::cachedStructureIDOffset()));
+
+ // Compute the offset
+ // If index is less than the enumerator's cached inline storage, then it's an inline access
+ MacroAssembler::Jump outOfLineAccess = m_jit.branch32(MacroAssembler::AboveOrEqual,
+ indexGPR, MacroAssembler::Address(enumeratorGPR, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
+
+ m_jit.load64(MacroAssembler::BaseIndex(baseGPR, indexGPR, MacroAssembler::TimesEight, JSObject::offsetOfInlineStorage()), resultGPR);
+
+ MacroAssembler::Jump done = m_jit.jump();
+
+ // Otherwise it's out of line
+ outOfLineAccess.link(&m_jit);
+ m_jit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratch2GPR);
+ m_jit.move(indexGPR, scratch1GPR);
+ m_jit.sub32(MacroAssembler::Address(enumeratorGPR, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), scratch1GPR);
+ m_jit.neg32(scratch1GPR);
+ m_jit.signExtend32ToPtr(scratch1GPR, scratch1GPR);
+ int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
+ m_jit.load64(MacroAssembler::BaseIndex(scratch2GPR, scratch1GPR, MacroAssembler::TimesEight, offsetOfFirstProperty), resultGPR);
+
+ done.link(&m_jit);
+
+ addSlowPathGenerator(slowPathCall(wrongStructure, this, operationGetByVal, resultGPR, baseGPR, propertyGPR));
+
+ jsValueResult(resultGPR, node);
+ break;
+ }
+ case GetPropertyEnumerator: {
+ SpeculateCellOperand base(this, node->child1());
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+
+ flushRegisters();
+ callOperation(operationGetPropertyEnumerator, resultGPR, base.gpr());
+ cellResult(resultGPR, node);
+ break;
+ }
+ case GetEnumeratorStructurePname:
+ case GetEnumeratorGenericPname: {
+ SpeculateCellOperand enumerator(this, node->child1());
+ SpeculateStrictInt32Operand index(this, node->child2());
+ GPRTemporary scratch1(this);
+ GPRTemporary result(this);
+
+ GPRReg enumeratorGPR = enumerator.gpr();
+ GPRReg indexGPR = index.gpr();
+ GPRReg scratch1GPR = scratch1.gpr();
+ GPRReg resultGPR = result.gpr();
+
+ MacroAssembler::Jump inBounds = m_jit.branch32(MacroAssembler::Below, indexGPR,
+ MacroAssembler::Address(enumeratorGPR, (op == GetEnumeratorStructurePname)
+ ? JSPropertyNameEnumerator::endStructurePropertyIndexOffset()
+ : JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
+
+ m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsNull())), resultGPR);
+
+ MacroAssembler::Jump done = m_jit.jump();
+ inBounds.link(&m_jit);
+
+ m_jit.loadPtr(MacroAssembler::Address(enumeratorGPR, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), scratch1GPR);
+ m_jit.load64(MacroAssembler::BaseIndex(scratch1GPR, indexGPR, MacroAssembler::TimesEight), resultGPR);
+
+ done.link(&m_jit);
+ jsValueResult(resultGPR, node);
+ break;
+ }
+ case ToIndexString: {
+ SpeculateInt32Operand index(this, node->child1());
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+
+ flushRegisters();
+ callOperation(operationToIndexString, resultGPR, index.gpr());
+ cellResult(resultGPR, node);
+ break;
+ }
+ case ProfileType: {
+ JSValueOperand value(this, node->child1());
+ GPRTemporary scratch1(this);
+ GPRTemporary scratch2(this);
+ GPRTemporary scratch3(this);
+
+ GPRReg scratch1GPR = scratch1.gpr();
+ GPRReg scratch2GPR = scratch2.gpr();
+ GPRReg scratch3GPR = scratch3.gpr();
+ GPRReg valueGPR = value.gpr();
+
+ MacroAssembler::JumpList jumpToEnd;
+
+ TypeLocation* cachedTypeLocation = node->typeLocation();
+ // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
+ // These typechecks are inlined to match those of the 64-bit JSValue type checks.
+ if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
+ jumpToEnd.append(m_jit.branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined()))));
+ else if (cachedTypeLocation->m_lastSeenType == TypeNull)
+ jumpToEnd.append(m_jit.branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsNull()))));
+ else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
+ m_jit.move(valueGPR, scratch2GPR);
+ m_jit.and64(TrustedImm32(~1), scratch2GPR);
+ jumpToEnd.append(m_jit.branch64(MacroAssembler::Equal, scratch2GPR, MacroAssembler::TrustedImm64(ValueFalse)));
+ } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
+ jumpToEnd.append(m_jit.branch64(MacroAssembler::AboveOrEqual, valueGPR, GPRInfo::tagTypeNumberRegister));
+ else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
+ jumpToEnd.append(m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagTypeNumberRegister));
+ else if (cachedTypeLocation->m_lastSeenType == TypeString) {
+ MacroAssembler::Jump isNotCell = m_jit.branchIfNotCell(JSValueRegs(valueGPR));
+ jumpToEnd.append(m_jit.branchIfString(valueGPR));
+ isNotCell.link(&m_jit);
+ }
+
+ // Load the TypeProfilerLog into Scratch2.
+ TypeProfilerLog* cachedTypeProfilerLog = m_jit.vm()->typeProfilerLog();
+ m_jit.move(TrustedImmPtr(cachedTypeProfilerLog), scratch2GPR);
+
+ // Load the next LogEntry into Scratch1.
+ m_jit.loadPtr(MacroAssembler::Address(scratch2GPR, TypeProfilerLog::currentLogEntryOffset()), scratch1GPR);
+
+ // Store the JSValue onto the log entry.
+ m_jit.store64(valueGPR, MacroAssembler::Address(scratch1GPR, TypeProfilerLog::LogEntry::valueOffset()));
+
+ // Store the structureID of the cell if valueGPR is a cell, otherwise, store 0 on the log entry.
+ MacroAssembler::Jump isNotCell = m_jit.branchIfNotCell(JSValueRegs(valueGPR));
+ m_jit.load32(MacroAssembler::Address(valueGPR, JSCell::structureIDOffset()), scratch3GPR);
+ m_jit.store32(scratch3GPR, MacroAssembler::Address(scratch1GPR, TypeProfilerLog::LogEntry::structureIDOffset()));
+ MacroAssembler::Jump skipIsCell = m_jit.jump();
+ isNotCell.link(&m_jit);
+ m_jit.store32(TrustedImm32(0), MacroAssembler::Address(scratch1GPR, TypeProfilerLog::LogEntry::structureIDOffset()));
+ skipIsCell.link(&m_jit);
+
+ // Store the typeLocation on the log entry.
+ m_jit.move(TrustedImmPtr(cachedTypeLocation), scratch3GPR);
+ m_jit.storePtr(scratch3GPR, MacroAssembler::Address(scratch1GPR, TypeProfilerLog::LogEntry::locationOffset()));
+
+ // Increment the current log entry.
+ m_jit.addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), scratch1GPR);
+ m_jit.storePtr(scratch1GPR, MacroAssembler::Address(scratch2GPR, TypeProfilerLog::currentLogEntryOffset()));
+ MacroAssembler::Jump clearLog = m_jit.branchPtr(MacroAssembler::Equal, scratch1GPR, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
+ addSlowPathGenerator(
+ slowPathCall(clearLog, this, operationProcessTypeProfilerLogDFG, NoResult));
+
+ jumpToEnd.link(&m_jit);
+
+ noResult(node);
+ break;
+ }
+ case ProfileControlFlow: {
+ BasicBlockLocation* basicBlockLocation = node->basicBlockLocation();
+ if (!basicBlockLocation->hasExecuted()) {
+ GPRTemporary scratch1(this);
+ basicBlockLocation->emitExecuteCode(m_jit, scratch1.gpr());
+ }
+ noResult(node);
+ break;
+ }
+