+ }
+
+ if (verbose) {
+ dataLog("Doing hard inlining...\n");
+ dataLog("Stack: ", currentCodeOrigin(), "\n");
+ }
+
+ int registerOffset = registerOffsetOrFirstFreeReg;
+
+ // This makes me wish that we were in SSA all the time. We need to pick a variable into which to
+ // store the callee so that it will be accessible to all of the blocks we're about to create. We
+ // get away with doing an immediate-set here because we wouldn't have performed any side effects
+ // yet.
+ if (verbose)
+ dataLog("Register offset: ", registerOffset);
+ VirtualRegister calleeReg(registerOffset + JSStack::Callee);
+ calleeReg = m_inlineStackTop->remapOperand(calleeReg);
+ if (verbose)
+ dataLog("Callee is going to be ", calleeReg, "\n");
+ setDirect(calleeReg, callTargetNode, ImmediateSetWithFlush);
+
+ SwitchData& data = *m_graph.m_switchData.add();
+ data.kind = SwitchCell;
+ addToGraph(Switch, OpInfo(&data), thingToSwitchOn);
+
+ BasicBlock* originBlock = m_currentBlock;
+ if (verbose)
+ dataLog("Marking ", RawPointer(originBlock), " as linked (origin of poly inline)\n");
+ originBlock->didLink();
+ cancelLinkingForBlock(m_inlineStackTop, originBlock);
+
+ // Each inlined callee will have a landing block that it returns at. They should all have jumps
+ // to the continuation block, which we create last.
+ Vector<BasicBlock*> landingBlocks;
+
+ // We may force this true if we give up on inlining any of the edges.
+ bool couldTakeSlowPath = callLinkStatus.couldTakeSlowPath();
+
+ if (verbose)
+ dataLog("About to loop over functions at ", currentCodeOrigin(), ".\n");
+
+ for (unsigned i = 0; i < callLinkStatus.size(); ++i) {
+ m_currentIndex = oldOffset;
+ RefPtr<BasicBlock> block = adoptRef(new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
+ m_currentBlock = block.get();
+ m_graph.appendBlock(block);
+ prepareToParseBlock();
+
+ Node* myCallTargetNode = getDirect(calleeReg);
+
+ bool inliningResult = attemptToInlineCall(
+ myCallTargetNode, resultOperand, callLinkStatus[i], registerOffset,
+ argumentCountIncludingThis, nextOffset, kind, CallerLinksManually, prediction,
+ inliningBalance, [&] (CodeBlock*) { });
+
+ if (!inliningResult) {
+ // That failed so we let the block die. Nothing interesting should have been added to
+ // the block. We also give up on inlining any of the (less frequent) callees.
+ ASSERT(m_currentBlock == block.get());
+ ASSERT(m_graph.m_blocks.last() == block);
+ m_graph.killBlockAndItsContents(block.get());
+ m_graph.m_blocks.removeLast();
+
+ // The fact that inlining failed means we need a slow path.
+ couldTakeSlowPath = true;
+ break;
+ }
+
+ JSCell* thingToCaseOn;
+ if (allAreDirectCalls)
+ thingToCaseOn = callLinkStatus[i].nonExecutableCallee();
+ else {
+ ASSERT(allAreClosureCalls);
+ thingToCaseOn = callLinkStatus[i].executable();
+ }
+ data.cases.append(SwitchCase(m_graph.freeze(thingToCaseOn), block.get()));
+ m_currentIndex = nextOffset;
+ processSetLocalQueue(); // This only comes into play for intrinsics, since normal inlined code will leave an empty queue.
+ addToGraph(Jump);
+ if (verbose)
+ dataLog("Marking ", RawPointer(m_currentBlock), " as linked (tail of poly inlinee)\n");
+ m_currentBlock->didLink();
+ landingBlocks.append(m_currentBlock);
+
+ if (verbose)
+ dataLog("Finished inlining ", callLinkStatus[i], " at ", currentCodeOrigin(), ".\n");
+ }
+
+ RefPtr<BasicBlock> slowPathBlock = adoptRef(
+ new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
+ m_currentIndex = oldOffset;
+ data.fallThrough = BranchTarget(slowPathBlock.get());
+ m_graph.appendBlock(slowPathBlock);
+ if (verbose)
+ dataLog("Marking ", RawPointer(slowPathBlock.get()), " as linked (slow path block)\n");
+ slowPathBlock->didLink();
+ prepareToParseBlock();
+ m_currentBlock = slowPathBlock.get();
+ Node* myCallTargetNode = getDirect(calleeReg);
+ if (couldTakeSlowPath) {
+ addCall(
+ resultOperand, callOp, OpInfo(), myCallTargetNode, argumentCountIncludingThis,
+ registerOffset, prediction);
+ } else {
+ addToGraph(CheckBadCell);
+ addToGraph(Phantom, myCallTargetNode);
+ emitArgumentPhantoms(registerOffset, argumentCountIncludingThis);
+
+ set(VirtualRegister(resultOperand), addToGraph(BottomValue));
+ }
+
+ m_currentIndex = nextOffset;
+ processSetLocalQueue();
+ addToGraph(Jump);
+ landingBlocks.append(m_currentBlock);
+
+ RefPtr<BasicBlock> continuationBlock = adoptRef(
+ new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
+ m_graph.appendBlock(continuationBlock);
+ if (verbose)
+ dataLog("Adding unlinked block ", RawPointer(continuationBlock.get()), " (continuation)\n");
+ m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(continuationBlock.get()));
+ prepareToParseBlock();
+ m_currentBlock = continuationBlock.get();
+
+ for (unsigned i = landingBlocks.size(); i--;)
+ landingBlocks[i]->terminal()->targetBlock() = continuationBlock.get();
+
+ m_currentIndex = oldOffset;
+
+ if (verbose) {
+ dataLog("Done inlining (hard).\n");
+ dataLog("Stack: ", currentCodeOrigin(), "\n");
+ }
+ return true;
+}
+
+template<typename ChecksFunctor>
+bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis, const ChecksFunctor& insertChecks)
+{
+ if (argumentCountIncludingThis == 1) { // Math.min()
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
+ return true;
+ }
+
+ if (argumentCountIncludingThis == 2) { // Math.min(x)
+ insertChecks();
+ Node* result = get(VirtualRegister(virtualRegisterForArgument(1, registerOffset)));
+ addToGraph(Phantom, Edge(result, NumberUse));
+ set(VirtualRegister(resultOperand), result);
+ return true;
+ }
+
+ if (argumentCountIncludingThis == 3) { // Math.min(x, y)
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(op, get(virtualRegisterForArgument(1, registerOffset)), get(virtualRegisterForArgument(2, registerOffset))));
+ return true;
+ }
+
+ // Don't handle >=3 arguments for now.
+ return false;
+}
+
+template<typename ChecksFunctor>
+bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, const ChecksFunctor& insertChecks)
+{
+ switch (intrinsic) {
+ case AbsIntrinsic: {
+ if (argumentCountIncludingThis == 1) { // Math.abs()
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
+ return true;
+ }
+
+ if (!MacroAssembler::supportsFloatingPointAbs())
+ return false;
+
+ insertChecks();
+ Node* node = addToGraph(ArithAbs, get(virtualRegisterForArgument(1, registerOffset)));
+ if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
+ node->mergeFlags(NodeMayOverflowInDFG);
+ set(VirtualRegister(resultOperand), node);
+ return true;
+ }
+
+ case MinIntrinsic:
+ return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis, insertChecks);
+
+ case MaxIntrinsic:
+ return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis, insertChecks);
+
+ case SqrtIntrinsic:
+ case CosIntrinsic:
+ case SinIntrinsic:
+ case LogIntrinsic: {
+ if (argumentCountIncludingThis == 1) {
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
+ return true;
+ }
+
+ switch (intrinsic) {
+ case SqrtIntrinsic:
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(ArithSqrt, get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+
+ case CosIntrinsic:
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(ArithCos, get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+
+ case SinIntrinsic:
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(ArithSin, get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+
+ case LogIntrinsic:
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(ArithLog, get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ return false;
+ }
+ }
+
+ case PowIntrinsic: {
+ if (argumentCountIncludingThis < 3) {
+ // Math.pow() and Math.pow(x) return NaN.
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
+ return true;
+ }
+ insertChecks();
+ VirtualRegister xOperand = virtualRegisterForArgument(1, registerOffset);
+ VirtualRegister yOperand = virtualRegisterForArgument(2, registerOffset);
+ set(VirtualRegister(resultOperand), addToGraph(ArithPow, get(xOperand), get(yOperand)));
+ return true;
+ }
+
+ case ArrayPushIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
+ if (!arrayMode.isJSArray())
+ return false;
+ switch (arrayMode.type()) {
+ case Array::Undecided:
+ case Array::Int32:
+ case Array::Double:
+ case Array::Contiguous:
+ case Array::ArrayStorage: {
+ insertChecks();
+ Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
+ set(VirtualRegister(resultOperand), arrayPush);
+
+ return true;
+ }
+
+ default:
+ return false;
+ }
+ }
+
+ case ArrayPopIntrinsic: {
+ if (argumentCountIncludingThis != 1)
+ return false;
+
+ ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
+ if (!arrayMode.isJSArray())
+ return false;
+ switch (arrayMode.type()) {
+ case Array::Int32:
+ case Array::Double:
+ case Array::Contiguous:
+ case Array::ArrayStorage: {
+ insertChecks();
+ Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)));
+ set(VirtualRegister(resultOperand), arrayPop);
+ return true;
+ }
+
+ default:
+ return false;
+ }
+ }
+
+ case CharCodeAtIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
+ VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
+ Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
+
+ set(VirtualRegister(resultOperand), charCode);
+ return true;
+ }
+
+ case CharAtIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
+ VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
+ Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
+
+ set(VirtualRegister(resultOperand), charCode);
+ return true;
+ }
+ case Clz32Intrinsic: {
+ insertChecks();
+ if (argumentCountIncludingThis == 1)
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_graph.freeze(jsNumber(32)))));
+ else {
+ Node* operand = get(virtualRegisterForArgument(1, registerOffset));
+ set(VirtualRegister(resultOperand), addToGraph(ArithClz32, operand));
+ }
+ return true;
+ }
+ case FromCharCodeIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
+ Node* charCode = addToGraph(StringFromCharCode, get(indexOperand));
+
+ set(VirtualRegister(resultOperand), charCode);
+
+ return true;
+ }
+
+ case RegExpExecIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
+ set(VirtualRegister(resultOperand), regExpExec);
+
+ return true;
+ }
+
+ case RegExpTestIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
+ set(VirtualRegister(resultOperand), regExpExec);
+
+ return true;
+ }
+ case RoundIntrinsic: {
+ if (argumentCountIncludingThis == 1) {
+ insertChecks();
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
+ return true;
+ }
+ if (argumentCountIncludingThis == 2) {
+ insertChecks();
+ Node* operand = get(virtualRegisterForArgument(1, registerOffset));
+ Node* roundNode = addToGraph(ArithRound, OpInfo(0), OpInfo(prediction), operand);
+ set(VirtualRegister(resultOperand), roundNode);
+ return true;
+ }
+ return false;
+ }
+ case IMulIntrinsic: {
+ if (argumentCountIncludingThis != 3)
+ return false;
+ insertChecks();
+ VirtualRegister leftOperand = virtualRegisterForArgument(1, registerOffset);
+ VirtualRegister rightOperand = virtualRegisterForArgument(2, registerOffset);
+ Node* left = get(leftOperand);
+ Node* right = get(rightOperand);
+ set(VirtualRegister(resultOperand), addToGraph(ArithIMul, left, right));
+ return true;
+ }
+
+ case FRoundIntrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+ insertChecks();
+ VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
+ set(VirtualRegister(resultOperand), addToGraph(ArithFRound, get(operand)));
+ return true;
+ }
+
+ case DFGTrueIntrinsic: {
+ insertChecks();
+ set(VirtualRegister(resultOperand), jsConstant(jsBoolean(true)));
+ return true;
+ }
+
+ case OSRExitIntrinsic: {
+ insertChecks();
+ addToGraph(ForceOSRExit);
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantUndefined)));
+ return true;
+ }
+
+ case IsFinalTierIntrinsic: {
+ insertChecks();
+ set(VirtualRegister(resultOperand),
+ jsConstant(jsBoolean(Options::useFTLJIT() ? isFTL(m_graph.m_plan.mode) : true)));
+ return true;
+ }
+
+ case SetInt32HeapPredictionIntrinsic: {
+ insertChecks();
+ for (int i = 1; i < argumentCountIncludingThis; ++i) {
+ Node* node = get(virtualRegisterForArgument(i, registerOffset));
+ if (node->hasHeapPrediction())
+ node->setHeapPrediction(SpecInt32);
+ }
+ set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantUndefined)));
+ return true;
+ }
+
+ case CheckInt32Intrinsic: {
+ insertChecks();
+ for (int i = 1; i < argumentCountIncludingThis; ++i) {
+ Node* node = get(virtualRegisterForArgument(i, registerOffset));
+ addToGraph(Phantom, Edge(node, Int32Use));
+ }
+ set(VirtualRegister(resultOperand), jsConstant(jsBoolean(true)));
+ return true;
+ }
+
+ case FiatInt52Intrinsic: {
+ if (argumentCountIncludingThis != 2)
+ return false;
+ insertChecks();
+ VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
+ if (enableInt52())
+ set(VirtualRegister(resultOperand), addToGraph(FiatInt52, get(operand)));
+ else
+ set(VirtualRegister(resultOperand), get(operand));
+ return true;
+ }
+
+ default:
+ return false;
+ }
+}
+
+template<typename ChecksFunctor>
+bool ByteCodeParser::handleTypedArrayConstructor(
+ int resultOperand, InternalFunction* function, int registerOffset,
+ int argumentCountIncludingThis, TypedArrayType type, const ChecksFunctor& insertChecks)
+{
+ if (!isTypedView(type))
+ return false;
+
+ if (function->classInfo() != constructorClassInfoForType(type))
+ return false;
+
+ if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
+ return false;
+
+ // We only have an intrinsic for the case where you say:
+ //
+ // new FooArray(blah);
+ //
+ // Of course, 'blah' could be any of the following:
+ //
+ // - Integer, indicating that you want to allocate an array of that length.
+ // This is the thing we're hoping for, and what we can actually do meaningful
+ // optimizations for.
+ //
+ // - Array buffer, indicating that you want to create a view onto that _entire_
+ // buffer.
+ //
+ // - Non-buffer object, indicating that you want to create a copy of that
+ // object by pretending that it quacks like an array.
+ //
+ // - Anything else, indicating that you want to have an exception thrown at
+ // you.
+ //
+ // The intrinsic, NewTypedArray, will behave as if it could do any of these
+ // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
+ // predicted Int32, then we lock it in as a normal typed array allocation.
+ // Otherwise, NewTypedArray turns into a totally opaque function call that
+ // may clobber the world - by virtue of it accessing properties on what could
+ // be an object.
+ //
+ // Note that although the generic form of NewTypedArray sounds sort of awful,
+ // it is actually quite likely to be more efficient than a fully generic
+ // Construct. So, we might want to think about making NewTypedArray variadic,
+ // or else making Construct not super slow.
+
+ if (argumentCountIncludingThis != 2)
+ return false;
+
+ insertChecks();
+ set(VirtualRegister(resultOperand),
+ addToGraph(NewTypedArray, OpInfo(type), get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+}
+
+template<typename ChecksFunctor>
+bool ByteCodeParser::handleConstantInternalFunction(
+ int resultOperand, InternalFunction* function, int registerOffset,
+ int argumentCountIncludingThis, CodeSpecializationKind kind, const ChecksFunctor& insertChecks)
+{
+ if (verbose)
+ dataLog(" Handling constant internal function ", JSValue(function), "\n");
+
+ // If we ever find that we have a lot of internal functions that we specialize for,
+ // then we should probably have some sort of hashtable dispatch, or maybe even
+ // dispatch straight through the MethodTable of the InternalFunction. But for now,
+ // it seems that this case is hit infrequently enough, and the number of functions
+ // we know about is small enough, that having just a linear cascade of if statements
+ // is good enough.
+
+ if (function->classInfo() == ArrayConstructor::info()) {
+ if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
+ return false;
+
+ insertChecks();
+ if (argumentCountIncludingThis == 2) {
+ set(VirtualRegister(resultOperand),
+ addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(virtualRegisterForArgument(1, registerOffset))));
+ return true;
+ }
+
+ // FIXME: Array constructor should use "this" as newTarget.
+ for (int i = 1; i < argumentCountIncludingThis; ++i)
+ addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
+ set(VirtualRegister(resultOperand),
+ addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
+ return true;
+ }
+
+ if (function->classInfo() == StringConstructor::info()) {
+ insertChecks();
+
+ Node* result;
+
+ if (argumentCountIncludingThis <= 1)
+ result = jsConstant(m_vm->smallStrings.emptyString());
+ else
+ result = addToGraph(CallStringConstructor, get(virtualRegisterForArgument(1, registerOffset)));
+
+ if (kind == CodeForConstruct)
+ result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
+
+ set(VirtualRegister(resultOperand), result);
+ return true;
+ }
+
+ for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
+ bool result = handleTypedArrayConstructor(
+ resultOperand, function, registerOffset, argumentCountIncludingThis,
+ indexToTypedArrayType(typeIndex), insertChecks);
+ if (result)
+ return true;
+ }
+
+ return false;
+}
+
+Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, const StructureSet& structureSet, unsigned identifierNumber, PropertyOffset offset, NodeType op)
+{
+ if (base->hasConstant()) {
+ if (JSValue constant = m_graph.tryGetConstantProperty(base->asJSValue(), structureSet, offset)) {
+ addToGraph(Phantom, base);
+ return weakJSConstant(constant);
+ }
+ }
+
+ Node* propertyStorage;
+ if (isInlineOffset(offset))
+ propertyStorage = base;
+ else
+ propertyStorage = addToGraph(GetButterfly, base);
+
+ StorageAccessData* data = m_graph.m_storageAccessData.add();
+ data->offset = offset;
+ data->identifierNumber = identifierNumber;
+
+ Node* getByOffset = addToGraph(op, OpInfo(data), OpInfo(prediction), propertyStorage, base);
+
+ return getByOffset;
+}
+
+Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
+{
+ Node* propertyStorage;
+ if (isInlineOffset(offset))
+ propertyStorage = base;
+ else
+ propertyStorage = addToGraph(GetButterfly, base);
+
+ StorageAccessData* data = m_graph.m_storageAccessData.add();
+ data->offset = offset;
+ data->identifierNumber = identifier;
+
+ Node* result = addToGraph(PutByOffset, OpInfo(data), propertyStorage, base, value);
+
+ return result;
+}
+
+void ByteCodeParser::emitChecks(const ConstantStructureCheckVector& vector)
+{
+ for (unsigned i = 0; i < vector.size(); ++i)
+ cellConstantWithStructureCheck(vector[i].constant(), vector[i].structure());
+}
+
+void ByteCodeParser::handleGetById(
+ int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
+ const GetByIdStatus& getByIdStatus)
+{
+ NodeType getById = getByIdStatus.makesCalls() ? GetByIdFlush : GetById;
+
+ if (!getByIdStatus.isSimple() || !getByIdStatus.numVariants() || !Options::enableAccessInlining()) {
+ set(VirtualRegister(destinationOperand),
+ addToGraph(getById, OpInfo(identifierNumber), OpInfo(prediction), base));
+ return;
+ }
+
+ if (getByIdStatus.numVariants() > 1) {
+ if (getByIdStatus.makesCalls() || !isFTL(m_graph.m_plan.mode)
+ || !Options::enablePolymorphicAccessInlining()) {
+ set(VirtualRegister(destinationOperand),
+ addToGraph(getById, OpInfo(identifierNumber), OpInfo(prediction), base));
+ return;
+ }
+
+ if (m_graph.compilation())
+ m_graph.compilation()->noticeInlinedGetById();