- void addMethodCallLinkInfos(unsigned n) { ASSERT(m_globalData->canUseJIT()); m_methodCallLinkInfos.grow(n); }
- MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
- size_t numberOfMethodCallLinkInfos() { return m_methodCallLinkInfos.size(); }
-#endif
-
-#if ENABLE(VALUE_PROFILER)
- unsigned numberOfArgumentValueProfiles()
- {
- ASSERT(m_numParameters >= 0);
- ASSERT(m_argumentValueProfiles.size() == static_cast<unsigned>(m_numParameters));
- return m_argumentValueProfiles.size();
- }
- ValueProfile* valueProfileForArgument(unsigned argumentIndex)
- {
- ValueProfile* result = &m_argumentValueProfiles[argumentIndex];
- ASSERT(result->m_bytecodeOffset == -1);
- return result;
- }
-
- ValueProfile* addValueProfile(int bytecodeOffset)
- {
- ASSERT(bytecodeOffset != -1);
- ASSERT(m_valueProfiles.isEmpty() || m_valueProfiles.last().m_bytecodeOffset < bytecodeOffset);
- m_valueProfiles.append(ValueProfile(bytecodeOffset));
- return &m_valueProfiles.last();
- }
- unsigned numberOfValueProfiles() { return m_valueProfiles.size(); }
- ValueProfile* valueProfile(int index)
- {
- ValueProfile* result = &m_valueProfiles[index];
- ASSERT(result->m_bytecodeOffset != -1);
- return result;
- }
- ValueProfile* valueProfileForBytecodeOffset(int bytecodeOffset)
- {
- ValueProfile* result = WTF::genericBinarySearch<ValueProfile, int, getValueProfileBytecodeOffset>(m_valueProfiles, m_valueProfiles.size(), bytecodeOffset);
- ASSERT(result->m_bytecodeOffset != -1);
- ASSERT(instructions()[bytecodeOffset + opcodeLength(
- m_globalData->interpreter->getOpcodeID(
- instructions()[
- bytecodeOffset].u.opcode)) - 1].u.profile == result);
- return result;
- }
- PredictedType valueProfilePredictionForBytecodeOffset(int bytecodeOffset)
- {
- return valueProfileForBytecodeOffset(bytecodeOffset)->computeUpdatedPrediction();
- }
-
- unsigned totalNumberOfValueProfiles()
- {
- return numberOfArgumentValueProfiles() + numberOfValueProfiles();
- }
- ValueProfile* getFromAllValueProfiles(unsigned index)
- {
- if (index < numberOfArgumentValueProfiles())
- return valueProfileForArgument(index);
- return valueProfile(index - numberOfArgumentValueProfiles());
- }
-
- RareCaseProfile* addRareCaseProfile(int bytecodeOffset)
- {
- m_rareCaseProfiles.append(RareCaseProfile(bytecodeOffset));
- return &m_rareCaseProfiles.last();
- }
- unsigned numberOfRareCaseProfiles() { return m_rareCaseProfiles.size(); }
- RareCaseProfile* rareCaseProfile(int index) { return &m_rareCaseProfiles[index]; }
- RareCaseProfile* rareCaseProfileForBytecodeOffset(int bytecodeOffset)
- {
- return WTF::genericBinarySearch<RareCaseProfile, int, getRareCaseProfileBytecodeOffset>(m_rareCaseProfiles, m_rareCaseProfiles.size(), bytecodeOffset);
- }
-
- bool likelyToTakeSlowCase(int bytecodeOffset)
- {
- if (!numberOfRareCaseProfiles())
- return false;
- unsigned value = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- return value >= Options::likelyToTakeSlowCaseMinimumCount && static_cast<double>(value) / m_executionEntryCount >= Options::likelyToTakeSlowCaseThreshold;
- }
-
- bool couldTakeSlowCase(int bytecodeOffset)
- {
- if (!numberOfRareCaseProfiles())
- return false;
- unsigned value = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- return value >= Options::couldTakeSlowCaseMinimumCount && static_cast<double>(value) / m_executionEntryCount >= Options::couldTakeSlowCaseThreshold;
- }
-
- RareCaseProfile* addSpecialFastCaseProfile(int bytecodeOffset)
- {
- m_specialFastCaseProfiles.append(RareCaseProfile(bytecodeOffset));
- return &m_specialFastCaseProfiles.last();
- }
- unsigned numberOfSpecialFastCaseProfiles() { return m_specialFastCaseProfiles.size(); }
- RareCaseProfile* specialFastCaseProfile(int index) { return &m_specialFastCaseProfiles[index]; }
- RareCaseProfile* specialFastCaseProfileForBytecodeOffset(int bytecodeOffset)
- {
- return WTF::genericBinarySearch<RareCaseProfile, int, getRareCaseProfileBytecodeOffset>(m_specialFastCaseProfiles, m_specialFastCaseProfiles.size(), bytecodeOffset);
- }
-
- bool likelyToTakeSpecialFastCase(int bytecodeOffset)
- {
- if (!numberOfRareCaseProfiles())
- return false;
- unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- return specialFastCaseCount >= Options::likelyToTakeSlowCaseMinimumCount && static_cast<double>(specialFastCaseCount) / m_executionEntryCount >= Options::likelyToTakeSlowCaseThreshold;
- }
-
- bool likelyToTakeDeepestSlowCase(int bytecodeOffset)
- {
- if (!numberOfRareCaseProfiles())
- return false;
- unsigned slowCaseCount = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- unsigned value = slowCaseCount - specialFastCaseCount;
- return value >= Options::likelyToTakeSlowCaseMinimumCount && static_cast<double>(value) / m_executionEntryCount >= Options::likelyToTakeSlowCaseThreshold;
- }
-
- bool likelyToTakeAnySlowCase(int bytecodeOffset)
- {
- if (!numberOfRareCaseProfiles())
- return false;
- unsigned slowCaseCount = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
- unsigned value = slowCaseCount + specialFastCaseCount;
- return value >= Options::likelyToTakeSlowCaseMinimumCount && static_cast<double>(value) / m_executionEntryCount >= Options::likelyToTakeSlowCaseThreshold;
- }
-
- unsigned executionEntryCount() const { return m_executionEntryCount; }
-#endif