/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#ifndef JITCode_h
#define JITCode_h
-#include <wtf/Platform.h>
-
-#if ENABLE(JIT)
-
+#if ENABLE(JIT) || ENABLE(LLINT)
#include "CallFrame.h"
-#include "JSValue.h"
+#include "Disassembler.h"
+#include "JITStubs.h"
+#include "JSCJSValue.h"
+#include "LegacyProfiler.h"
#include "MacroAssemblerCodeRef.h"
-#include "Profiler.h"
+#endif
namespace JSC {
- class JSGlobalData;
- class RegisterFile;
-
+#if ENABLE(JIT)
+ class VM;
+ class JSStack;
+#endif
+
class JITCode {
+#if ENABLE(JIT) || ENABLE(LLINT)
typedef MacroAssemblerCodeRef CodeRef;
typedef MacroAssemblerCodePtr CodePtr;
+#else
+ JITCode() { }
+#endif
public:
+ enum JITType { None, HostCallThunk, InterpreterThunk, BaselineJIT, DFGJIT };
+
+ static JITType bottomTierJIT()
+ {
+ return BaselineJIT;
+ }
+
+ static JITType topTierJIT()
+ {
+ return DFGJIT;
+ }
+
+ static JITType nextTierJIT(JITType jitType)
+ {
+ ASSERT_UNUSED(jitType, jitType == BaselineJIT || jitType == DFGJIT);
+ return DFGJIT;
+ }
+
+ static bool isOptimizingJIT(JITType jitType)
+ {
+ return jitType == DFGJIT;
+ }
+
+ static bool isBaselineCode(JITType jitType)
+ {
+ return jitType == InterpreterThunk || jitType == BaselineJIT;
+ }
+
+#if ENABLE(JIT) || ENABLE(LLINT)
JITCode()
+ : m_jitType(None)
{
}
- JITCode(const CodeRef ref)
+ JITCode(const CodeRef ref, JITType jitType)
: m_ref(ref)
+ , m_jitType(jitType)
{
+ ASSERT(jitType != None);
}
-
+
bool operator !() const
{
- return !m_ref.m_code.executableAddress();
+ return !m_ref;
}
CodePtr addressForCall()
{
- return m_ref.m_code;
+ return m_ref.code();
+ }
+
+ void* executableAddressAtOffset(size_t offset) const
+ {
+ ASSERT(offset < size());
+ return reinterpret_cast<char*>(m_ref.code().executableAddress()) + offset;
+ }
+
+ void* executableAddress() const
+ {
+ return executableAddressAtOffset(0);
+ }
+
+ void* dataAddressAtOffset(size_t offset) const
+ {
+ ASSERT(offset <= size()); // use <= instead of < because it is valid to ask for an address at the exclusive end of the code.
+ return reinterpret_cast<char*>(m_ref.code().dataLocation()) + offset;
}
// This function returns the offset in bytes of 'pointerIntoCode' into
// block of code. It is ASSERTed that no codeblock >4gb in size.
unsigned offsetOf(void* pointerIntoCode)
{
- intptr_t result = reinterpret_cast<intptr_t>(pointerIntoCode) - reinterpret_cast<intptr_t>(m_ref.m_code.executableAddress());
+ intptr_t result = reinterpret_cast<intptr_t>(pointerIntoCode) - reinterpret_cast<intptr_t>(m_ref.code().executableAddress());
ASSERT(static_cast<intptr_t>(static_cast<unsigned>(result)) == result);
return static_cast<unsigned>(result);
}
+#if ENABLE(JIT)
// Execute the code!
- inline JSValue execute(RegisterFile* registerFile, CallFrame* callFrame, JSGlobalData* globalData, JSValue* exception)
+ inline JSValue execute(JSStack* stack, CallFrame* callFrame, VM* vm)
{
- return JSValue::decode(ctiTrampoline(m_ref.m_code.executableAddress(), registerFile, callFrame, exception, Profiler::enabledProfilerReference(), globalData));
+ JSValue result = JSValue::decode(ctiTrampoline(m_ref.code().executableAddress(), stack, callFrame, 0, 0, vm));
+ return vm->exception ? jsNull() : result;
}
+#endif
- void* start()
+ void* start() const
{
- return m_ref.m_code.dataLocation();
+ return m_ref.code().dataLocation();
}
- size_t size()
+ size_t size() const
{
- ASSERT(m_ref.m_code.executableAddress());
- return m_ref.m_size;
+ ASSERT(m_ref.code().executableAddress());
+ return m_ref.size();
+ }
+
+ bool tryToDisassemble(const char* prefix) const
+ {
+ return m_ref.tryToDisassemble(prefix);
}
- ExecutablePool* getExecutablePool()
+ ExecutableMemoryHandle* getExecutableMemory()
+ {
+ return m_ref.executableMemory();
+ }
+
+ JITType jitType() const
{
- return m_ref.m_executablePool.get();
+ return m_jitType;
}
// Host functions are a bit special; they have a m_code pointer but they
// do not individully ref the executable pool containing the trampoline.
- static JITCode HostFunction(CodePtr code)
+ static JITCode HostFunction(CodeRef code)
{
- return JITCode(code.dataLocation(), 0, 0);
+ return JITCode(code, HostCallThunk);
+ }
+
+ void clear()
+ {
+ m_ref.~CodeRef();
+ new (NotNull, &m_ref) CodeRef();
}
private:
- JITCode(void* code, PassRefPtr<ExecutablePool> executablePool, size_t size)
- : m_ref(code, executablePool, size)
+ JITCode(PassRefPtr<ExecutableMemoryHandle> executableMemory, JITType jitType)
+ : m_ref(executableMemory)
+ , m_jitType(jitType)
{
}
CodeRef m_ref;
+ JITType m_jitType;
+#endif // ENABLE(JIT) || ENABLE(LLINT)
};
-};
+} // namespace JSC
-#endif
+namespace WTF {
+
+class PrintStream;
+void printInternal(PrintStream&, JSC::JITCode::JITType);
+
+} // namespace WTF
#endif