+
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ }
+}
+#endif // ENABLE(DFG_JIT)
+
+#if ENABLE(VERBOSE_VALUE_PROFILE)
+void CodeBlock::dumpValueProfiles()
+{
+ dataLog("ValueProfile for ", *this, ":\n");
+ for (unsigned i = 0; i < totalNumberOfValueProfiles(); ++i) {
+ ValueProfile* profile = getFromAllValueProfiles(i);
+ if (profile->m_bytecodeOffset < 0) {
+ ASSERT(profile->m_bytecodeOffset == -1);
+ dataLogF(" arg = %u: ", i);
+ } else
+ dataLogF(" bc = %d: ", profile->m_bytecodeOffset);
+ if (!profile->numberOfSamples() && profile->m_prediction == SpecNone) {
+ dataLogF("<empty>\n");
+ continue;
+ }
+ profile->dump(WTF::dataFile());
+ dataLogF("\n");
+ }
+ dataLog("RareCaseProfile for ", *this, ":\n");
+ for (unsigned i = 0; i < numberOfRareCaseProfiles(); ++i) {
+ RareCaseProfile* profile = rareCaseProfile(i);
+ dataLogF(" bc = %d: %u\n", profile->m_bytecodeOffset, profile->m_counter);
+ }
+ dataLog("SpecialFastCaseProfile for ", *this, ":\n");
+ for (unsigned i = 0; i < numberOfSpecialFastCaseProfiles(); ++i) {
+ RareCaseProfile* profile = specialFastCaseProfile(i);
+ dataLogF(" bc = %d: %u\n", profile->m_bytecodeOffset, profile->m_counter);
+ }
+}
+#endif // ENABLE(VERBOSE_VALUE_PROFILE)
+
+unsigned CodeBlock::frameRegisterCount()
+{
+ switch (jitType()) {
+ case JITCode::InterpreterThunk:
+ return LLInt::frameRegisterCountFor(this);
+
+#if ENABLE(JIT)
+ case JITCode::BaselineJIT:
+ return JIT::frameRegisterCountFor(this);
+#endif // ENABLE(JIT)
+
+#if ENABLE(DFG_JIT)
+ case JITCode::DFGJIT:
+ case JITCode::FTLJIT:
+ return jitCode()->dfgCommon()->frameRegisterCount;
+#endif // ENABLE(DFG_JIT)
+
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ return 0;
+ }
+}
+
+int CodeBlock::stackPointerOffset()
+{
+ return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
+}
+
+size_t CodeBlock::predictedMachineCodeSize()
+{
+ // This will be called from CodeBlock::CodeBlock before either m_vm or the
+ // instructions have been initialized. It's OK to return 0 because what will really
+ // matter is the recomputation of this value when the slow path is triggered.
+ if (!m_vm)
+ return 0;
+
+ if (!m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT)
+ return 0; // It's as good of a prediction as we'll get.
+
+ // Be conservative: return a size that will be an overestimation 84% of the time.
+ double multiplier = m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT.mean() +
+ m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT.standardDeviation();
+
+ // Be paranoid: silently reject bogus multipiers. Silently doing the "wrong" thing
+ // here is OK, since this whole method is just a heuristic.
+ if (multiplier < 0 || multiplier > 1000)
+ return 0;
+
+ double doubleResult = multiplier * m_instructions.size();
+
+ // Be even more paranoid: silently reject values that won't fit into a size_t. If
+ // the function is so huge that we can't even fit it into virtual memory then we
+ // should probably have some other guards in place to prevent us from even getting
+ // to this point.
+ if (doubleResult > std::numeric_limits<size_t>::max())
+ return 0;
+
+ return static_cast<size_t>(doubleResult);
+}
+
+bool CodeBlock::usesOpcode(OpcodeID opcodeID)
+{
+ Interpreter* interpreter = vm()->interpreter;
+ Instruction* instructionsBegin = instructions().begin();
+ unsigned instructionCount = instructions().size();
+
+ for (unsigned bytecodeOffset = 0; bytecodeOffset < instructionCount; ) {
+ switch (interpreter->getOpcodeID(instructionsBegin[bytecodeOffset].u.opcode)) {
+#define DEFINE_OP(curOpcode, length) \
+ case curOpcode: \
+ if (curOpcode == opcodeID) \
+ return true; \
+ bytecodeOffset += length; \
+ break;
+ FOR_EACH_OPCODE_ID(DEFINE_OP)
+#undef DEFINE_OP
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ }
+ }
+
+ return false;
+}
+
+String CodeBlock::nameForRegister(VirtualRegister virtualRegister)
+{
+ ConcurrentJITLocker locker(symbolTable()->m_lock);
+ SymbolTable::Map::iterator end = symbolTable()->end(locker);
+ for (SymbolTable::Map::iterator ptr = symbolTable()->begin(locker); ptr != end; ++ptr) {
+ if (ptr->value.getIndex() == virtualRegister.offset()) {
+ // FIXME: This won't work from the compilation thread.
+ // https://bugs.webkit.org/show_bug.cgi?id=115300
+ return String(ptr->key);
+ }
+ }
+ if (needsActivation() && virtualRegister == activationRegister())
+ return ASCIILiteral("activation");
+ if (virtualRegister == thisRegister())
+ return ASCIILiteral("this");
+ if (usesArguments()) {
+ if (virtualRegister == argumentsRegister())
+ return ASCIILiteral("arguments");
+ if (unmodifiedArgumentsRegister(argumentsRegister()) == virtualRegister)
+ return ASCIILiteral("real arguments");
+ }
+ if (virtualRegister.isArgument())
+ return String::format("arguments[%3d]", virtualRegister.toArgument()).impl();
+
+ return "";
+}
+
+namespace {
+
+struct VerifyCapturedDef {
+ void operator()(CodeBlock* codeBlock, Instruction* instruction, OpcodeID opcodeID, int operand)
+ {
+ unsigned bytecodeOffset = instruction - codeBlock->instructions().begin();
+
+ if (codeBlock->isConstantRegisterIndex(operand)) {
+ codeBlock->beginValidationDidFail();
+ dataLog(" At bc#", bytecodeOffset, " encountered a definition of a constant.\n");
+ codeBlock->endValidationDidFail();
+ return;
+ }
+
+ switch (opcodeID) {
+ case op_enter:
+ case op_captured_mov:
+ case op_init_lazy_reg:
+ case op_create_arguments:
+ case op_new_captured_func:
+ return;
+ default:
+ break;
+ }
+
+ VirtualRegister virtualReg(operand);
+ if (!virtualReg.isLocal())
+ return;
+
+ if (codeBlock->captureCount() && codeBlock->symbolTable()->isCaptured(operand)) {
+ codeBlock->beginValidationDidFail();
+ dataLog(" At bc#", bytecodeOffset, " encountered invalid assignment to captured variable loc", virtualReg.toLocal(), ".\n");
+ codeBlock->endValidationDidFail();
+ return;
+ }
+
+ return;
+ }
+};
+
+} // anonymous namespace
+
+void CodeBlock::validate()
+{
+ BytecodeLivenessAnalysis liveness(this); // Compute directly from scratch so it doesn't effect CodeBlock footprint.
+
+ FastBitVector liveAtHead = liveness.getLivenessInfoAtBytecodeOffset(0);
+
+ if (liveAtHead.numBits() != static_cast<size_t>(m_numCalleeRegisters)) {
+ beginValidationDidFail();
+ dataLog(" Wrong number of bits in result!\n");
+ dataLog(" Result: ", liveAtHead, "\n");
+ dataLog(" Bit count: ", liveAtHead.numBits(), "\n");
+ endValidationDidFail();
+ }
+
+ for (unsigned i = m_numCalleeRegisters; i--;) {
+ bool isCaptured = false;
+ VirtualRegister reg = virtualRegisterForLocal(i);
+
+ if (captureCount())
+ isCaptured = reg.offset() <= captureStart() && reg.offset() > captureEnd();
+
+ if (isCaptured) {
+ if (!liveAtHead.get(i)) {
+ beginValidationDidFail();
+ dataLog(" Variable loc", i, " is expected to be live because it is captured, but it isn't live.\n");
+ dataLog(" Result: ", liveAtHead, "\n");
+ endValidationDidFail();
+ }
+ } else {
+ if (liveAtHead.get(i)) {
+ beginValidationDidFail();
+ dataLog(" Variable loc", i, " is expected to be dead.\n");
+ dataLog(" Result: ", liveAtHead, "\n");
+ endValidationDidFail();
+ }
+ }