]> git.saurik.com Git - apple/javascriptcore.git/blobdiff - dfg/DFGJITCompiler.h
JavaScriptCore-1218.35.tar.gz
[apple/javascriptcore.git] / dfg / DFGJITCompiler.h
index 7ed9a2fe4044f22b28468bfc9a0188e4783c7324..85a752ef38d9eb6a5c2a3b21e3dc941b7730dde6 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
 
 #if ENABLE(DFG_JIT)
 
-#include <assembler/MacroAssembler.h>
-#include <bytecode/CodeBlock.h>
-#include <dfg/DFGGraph.h>
-#include <dfg/DFGRegisterBank.h>
-#include <jit/JITCode.h>
-
-#include <dfg/DFGFPRInfo.h>
-#include <dfg/DFGGPRInfo.h>
+#include "CodeBlock.h"
+#include "DFGCCallHelpers.h"
+#include "DFGDisassembler.h"
+#include "DFGFPRInfo.h"
+#include "DFGGPRInfo.h"
+#include "DFGGraph.h"
+#include "DFGOSRExitCompilationInfo.h"
+#include "DFGRegisterBank.h"
+#include "DFGRegisterSet.h"
+#include "JITCode.h"
+#include "LinkBuffer.h"
+#include "MacroAssembler.h"
 
 namespace JSC {
 
 class AbstractSamplingCounter;
 class CodeBlock;
-class JSGlobalData;
+class VM;
 
 namespace DFG {
 
 class JITCodeGenerator;
-class NonSpeculativeJIT;
+class NodeToRegisterMap;
+class OSRExitJumpPlaceholder;
+class SlowPathGenerator;
 class SpeculativeJIT;
 class SpeculationRecovery;
 
 struct EntryLocation;
-struct SpeculationCheck;
+struct OSRExit;
 
-// === CallRecord ===
+// === CallLinkRecord ===
 //
-// A record of a call out from JIT code to a helper function.
-// Every CallRecord contains a reference to the call instruction & the function
-// that it needs to be linked to. Calls that might throw an exception also record
-// the Jump taken on exception (unset if not present), and ExceptionInfo (presently
-// an unsigned, bytecode index) used to recover handler/source info.
-struct CallRecord {
-    // Constructor for a call with no exception handler.
-    CallRecord(MacroAssembler::Call call, FunctionPtr function)
+// A record of a call out from JIT code that needs linking to a helper function.
+// Every CallLinkRecord contains a reference to the call instruction & the function
+// that it needs to be linked to.
+struct CallLinkRecord {
+    CallLinkRecord(MacroAssembler::Call call, FunctionPtr function)
         : m_call(call)
         , m_function(function)
     {
     }
 
-    // Constructor for a call with an exception handler.
-    CallRecord(MacroAssembler::Call call, FunctionPtr function, MacroAssembler::Jump exceptionCheck, ExceptionInfo exceptionInfo)
+    MacroAssembler::Call m_call;
+    FunctionPtr m_function;
+};
+
+class CallBeginToken {
+public:
+    CallBeginToken()
+#if !ASSERT_DISABLED
+        : m_registered(false)
+        , m_exceptionCheckIndex(std::numeric_limits<unsigned>::max())
+#endif
+    {
+    }
+    
+    ~CallBeginToken()
+    {
+        ASSERT(m_registered || !m_codeOrigin.isSet());
+        ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
+    }
+    
+    void set(CodeOrigin codeOrigin, unsigned index)
+    {
+#if !ASSERT_DISABLED
+        ASSERT(m_registered || !m_codeOrigin.isSet());
+        ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
+        m_codeOrigin = codeOrigin;
+        m_registered = false;
+        m_exceptionCheckIndex = index;
+#else
+        UNUSED_PARAM(codeOrigin);
+        UNUSED_PARAM(index);
+#endif
+    }
+    
+    void registerWithExceptionCheck(CodeOrigin codeOrigin, unsigned index)
+    {
+#if !ASSERT_DISABLED
+        ASSERT(m_codeOrigin == codeOrigin);
+        if (m_registered)
+            return;
+        ASSERT(m_exceptionCheckIndex == index);
+        m_registered = true;
+#else
+        UNUSED_PARAM(codeOrigin);
+        UNUSED_PARAM(index);
+#endif
+    }
+
+#if !ASSERT_DISABLED
+    const CodeOrigin& codeOrigin() const
+    {
+        return m_codeOrigin;
+    }
+#endif
+    
+private:
+#if !ASSERT_DISABLED
+    CodeOrigin m_codeOrigin;
+    bool m_registered;
+    unsigned m_exceptionCheckIndex;
+#endif
+};
+
+// === CallExceptionRecord ===
+//
+// A record of a call out from JIT code that might throw an exception.
+// Calls that might throw an exception also record the Jump taken on exception
+// (unset if not present) and code origin used to recover handler/source info.
+struct CallExceptionRecord {
+    CallExceptionRecord(MacroAssembler::Call call, CodeOrigin codeOrigin)
+        : m_call(call)
+        , m_codeOrigin(codeOrigin)
+    {
+    }
+
+    CallExceptionRecord(MacroAssembler::Call call, MacroAssembler::Jump exceptionCheck, CodeOrigin codeOrigin)
         : m_call(call)
-        , m_function(function)
         , m_exceptionCheck(exceptionCheck)
-        , m_exceptionInfo(exceptionInfo)
+        , m_codeOrigin(codeOrigin)
     {
     }
 
     MacroAssembler::Call m_call;
-    FunctionPtr m_function;
     MacroAssembler::Jump m_exceptionCheck;
-    ExceptionInfo m_exceptionInfo;
+    CodeOrigin m_codeOrigin;
+};
+
+struct PropertyAccessRecord {
+    enum RegisterMode { RegistersFlushed, RegistersInUse };
+    
+#if USE(JSVALUE64)
+    PropertyAccessRecord(
+        CodeOrigin codeOrigin,
+        MacroAssembler::DataLabelPtr structureImm,
+        MacroAssembler::PatchableJump structureCheck,
+        MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
+        MacroAssembler::DataLabelCompact loadOrStore,
+        SlowPathGenerator* slowPathGenerator,
+        MacroAssembler::Label done,
+        int8_t baseGPR,
+        int8_t valueGPR,
+        const RegisterSet& usedRegisters,
+        RegisterMode registerMode = RegistersInUse)
+#elif USE(JSVALUE32_64)
+    PropertyAccessRecord(
+        CodeOrigin codeOrigin,
+        MacroAssembler::DataLabelPtr structureImm,
+        MacroAssembler::PatchableJump structureCheck,
+        MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
+        MacroAssembler::DataLabelCompact tagLoadOrStore,
+        MacroAssembler::DataLabelCompact payloadLoadOrStore,
+        SlowPathGenerator* slowPathGenerator,
+        MacroAssembler::Label done,
+        int8_t baseGPR,
+        int8_t valueTagGPR,
+        int8_t valueGPR,
+        const RegisterSet& usedRegisters,
+        RegisterMode registerMode = RegistersInUse)
+#endif
+        : m_codeOrigin(codeOrigin)
+        , m_structureImm(structureImm)
+        , m_structureCheck(structureCheck)
+        , m_propertyStorageLoad(propertyStorageLoad)
+#if USE(JSVALUE64)
+        , m_loadOrStore(loadOrStore)
+#elif USE(JSVALUE32_64)
+        , m_tagLoadOrStore(tagLoadOrStore)
+        , m_payloadLoadOrStore(payloadLoadOrStore)
+#endif
+        , m_slowPathGenerator(slowPathGenerator)
+        , m_done(done)
+        , m_baseGPR(baseGPR)
+#if USE(JSVALUE32_64)
+        , m_valueTagGPR(valueTagGPR)
+#endif
+        , m_valueGPR(valueGPR)
+        , m_usedRegisters(usedRegisters)
+        , m_registerMode(registerMode)
+    {
+    }
+
+    CodeOrigin m_codeOrigin;
+    MacroAssembler::DataLabelPtr m_structureImm;
+    MacroAssembler::PatchableJump m_structureCheck;
+    MacroAssembler::ConvertibleLoadLabel m_propertyStorageLoad;
+#if USE(JSVALUE64)
+    MacroAssembler::DataLabelCompact m_loadOrStore;
+#elif USE(JSVALUE32_64)
+    MacroAssembler::DataLabelCompact m_tagLoadOrStore;
+    MacroAssembler::DataLabelCompact m_payloadLoadOrStore;
+#endif
+    SlowPathGenerator* m_slowPathGenerator;
+    MacroAssembler::Label m_done;
+    int8_t m_baseGPR;
+#if USE(JSVALUE32_64)
+    int8_t m_valueTagGPR;
+#endif
+    int8_t m_valueGPR;
+    RegisterSet m_usedRegisters;
+    RegisterMode m_registerMode;
 };
 
 // === JITCompiler ===
@@ -91,166 +241,234 @@ struct CallRecord {
 // relationship). The JITCompiler holds references to information required during
 // compilation, and also records information used in linking (e.g. a list of all
 // call to be linked).
-class JITCompiler : public MacroAssembler {
+class JITCompiler : public CCallHelpers {
 public:
-    JITCompiler(JSGlobalData* globalData, Graph& dfg, CodeBlock* codeBlock)
-        : m_globalData(globalData)
-        , m_graph(dfg)
-        , m_codeBlock(codeBlock)
-    {
-    }
-
-    void compileFunction(JITCode& entry, MacroAssemblerCodePtr& entryWithArityCheck);
+    JITCompiler(Graph& dfg);
+    
+    bool compile(JITCode& entry);
+    bool compileFunction(JITCode& entry, MacroAssemblerCodePtr& entryWithArityCheck);
 
     // Accessors for properties.
     Graph& graph() { return m_graph; }
-    CodeBlock* codeBlock() { return m_codeBlock; }
-    JSGlobalData* globalData() { return m_globalData; }
-
-#if CPU(X86_64)
-    void preserveReturnAddressAfterCall(GPRReg reg)
+    
+    // Methods to set labels for the disassembler.
+    void setStartOfCode()
     {
-        pop(reg);
+        if (LIKELY(!m_disassembler))
+            return;
+        m_disassembler->setStartOfCode(labelIgnoringWatchpoints());
     }
-
-    void restoreReturnAddressBeforeReturn(GPRReg reg)
+    
+    void setForBlock(BlockIndex blockIndex)
     {
-        push(reg);
+        if (LIKELY(!m_disassembler))
+            return;
+        m_disassembler->setForBlock(blockIndex, labelIgnoringWatchpoints());
     }
-
-    void restoreReturnAddressBeforeReturn(Address address)
+    
+    void setForNode(Node* node)
     {
-        push(address);
+        if (LIKELY(!m_disassembler))
+            return;
+        m_disassembler->setForNode(node, labelIgnoringWatchpoints());
     }
-
-    void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, GPRReg to)
+    
+    void setEndOfMainPath()
     {
-        loadPtr(Address(GPRInfo::callFrameRegister, entry * sizeof(Register)), to);
+        if (LIKELY(!m_disassembler))
+            return;
+        m_disassembler->setEndOfMainPath(labelIgnoringWatchpoints());
     }
-    void emitPutToCallFrameHeader(GPRReg from, RegisterFile::CallFrameHeaderEntry entry)
+    
+    void setEndOfCode()
     {
-        storePtr(from, Address(GPRInfo::callFrameRegister, entry * sizeof(Register)));
+        if (LIKELY(!m_disassembler))
+            return;
+        m_disassembler->setEndOfCode(labelIgnoringWatchpoints());
     }
-
-    void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
+    
+    unsigned currentCodeOriginIndex() const
     {
-        storePtr(TrustedImmPtr(value), Address(GPRInfo::callFrameRegister, entry * sizeof(Register)));
+        return m_currentCodeOriginIndex;
     }
-#endif
-
-    static Address addressForGlobalVar(GPRReg global, int32_t varNumber)
+    
+    // Get a token for beginning a call, and set the current code origin index in
+    // the call frame. For each beginCall() there must be at least one exception
+    // check, and all of the exception checks must have the same CodeOrigin as the
+    // beginCall().
+    void beginCall(CodeOrigin codeOrigin, CallBeginToken& token)
     {
-        return Address(global, varNumber * sizeof(Register));
+        unsigned index = m_exceptionChecks.size();
+        store32(TrustedImm32(index), tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
+        token.set(codeOrigin, index);
     }
 
-    static Address addressFor(VirtualRegister virtualRegister)
+    // Notify the JIT of a call that does not require linking.
+    void notifyCall(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
     {
-        return Address(GPRInfo::callFrameRegister, virtualRegister * sizeof(Register));
+        token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
+        m_exceptionChecks.append(CallExceptionRecord(functionCall, codeOrigin));
     }
 
-    static Address tagFor(VirtualRegister virtualRegister)
+    // Add a call out from JIT code, without an exception check.
+    Call appendCall(const FunctionPtr& function)
     {
-        return Address(GPRInfo::callFrameRegister, virtualRegister * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        Call functionCall = call();
+        m_calls.append(CallLinkRecord(functionCall, function));
+        return functionCall;
     }
-
-    static Address payloadFor(VirtualRegister virtualRegister)
+    
+    void prepareForExceptionCheck()
     {
-        return Address(GPRInfo::callFrameRegister, virtualRegister * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        move(TrustedImm32(m_exceptionChecks.size()), GPRInfo::nonPreservedNonReturnGPR);
     }
 
-    // Add a call out from JIT code, without an exception check.
-    void appendCall(const FunctionPtr& function)
+    // Add a call out from JIT code, with an exception check.
+    void addExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
     {
-        m_calls.append(CallRecord(call(), function));
-        // FIXME: should be able to JIT_ASSERT here that globalData->exception is null on return back to JIT code.
+        prepareForExceptionCheck();
+        token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
+        m_exceptionChecks.append(CallExceptionRecord(functionCall, emitExceptionCheck(), codeOrigin));
     }
-
-    // Add a call out from JIT code, with an exception check.
-    void appendCallWithExceptionCheck(const FunctionPtr& function, unsigned exceptionInfo)
+    
+    // Add a call out from JIT code, with a fast exception check that tests if the return value is zero.
+    void addFastExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
     {
-        Call functionCall = call();
-        Jump exceptionCheck = branchTestPtr(NonZero, AbsoluteAddress(&globalData()->exception));
-        m_calls.append(CallRecord(functionCall, function, exceptionCheck, exceptionInfo));
+        prepareForExceptionCheck();
+        Jump exceptionCheck = branchTestPtr(Zero, GPRInfo::returnValueGPR);
+        token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
+        m_exceptionChecks.append(CallExceptionRecord(functionCall, exceptionCheck, codeOrigin));
+    }
+    
+    void appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList())
+    {
+        OSRExitCompilationInfo info;
+        info.m_failureJumps = jumpsToFail;
+        m_exitCompilationInfo.append(info);
     }
 
-    // Helper methods to check nodes for constants.
-    bool isConstant(NodeIndex nodeIndex)
+#if USE(JSVALUE32_64)
+    void* addressOfDoubleConstant(Node* node)
     {
-        return graph()[nodeIndex].isConstant();
+        ASSERT(m_graph.isNumberConstant(node));
+        unsigned constantIndex = node->constantNumber();
+        return &(codeBlock()->constantRegister(FirstConstantRegisterIndex + constantIndex));
     }
-    bool isInt32Constant(NodeIndex nodeIndex)
+#endif
+
+    void addPropertyAccess(const PropertyAccessRecord& record)
     {
-        return graph()[nodeIndex].op == Int32Constant;
+        m_propertyAccesses.append(record);
     }
-    bool isDoubleConstant(NodeIndex nodeIndex)
+
+    void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
     {
-        return graph()[nodeIndex].op == DoubleConstant;
+        m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, callType, callee, codeOrigin));
     }
-    bool isJSConstant(NodeIndex nodeIndex)
+    
+    void addWeakReference(JSCell* target)
     {
-        return graph()[nodeIndex].op == JSConstant;
+        m_codeBlock->appendWeakReference(target);
     }
-
-    // Helper methods get constant values from nodes.
-    int32_t valueOfInt32Constant(NodeIndex nodeIndex)
+    
+    void addWeakReferences(const StructureSet& structureSet)
     {
-        ASSERT(isInt32Constant(nodeIndex));
-        return graph()[nodeIndex].int32Constant();
+        for (unsigned i = structureSet.size(); i--;)
+            addWeakReference(structureSet[i]);
     }
-    double valueOfDoubleConstant(NodeIndex nodeIndex)
+    
+    void addWeakReferenceTransition(JSCell* codeOrigin, JSCell* from, JSCell* to)
     {
-        ASSERT(isDoubleConstant(nodeIndex));
-        return graph()[nodeIndex].numericConstant();
+        m_codeBlock->appendWeakReferenceTransition(codeOrigin, from, to);
     }
-    JSValue valueOfJSConstant(NodeIndex nodeIndex)
+    
+    template<typename T>
+    Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr)
     {
-        ASSERT(isJSConstant(nodeIndex));
-        unsigned constantIndex = graph()[nodeIndex].constantNumber();
-        return codeBlock()->constantRegister(FirstConstantRegisterIndex + constantIndex).get();
+        Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr));
+        addWeakReference(weakPtr);
+        return result;
     }
-
-    // These methods JIT generate dynamic, debug-only checks - akin to ASSERTs.
-#if DFG_JIT_ASSERT
-    void jitAssertIsInt32(GPRReg);
-    void jitAssertIsJSInt32(GPRReg);
-    void jitAssertIsJSNumber(GPRReg);
-    void jitAssertIsJSDouble(GPRReg);
+    
+    void noticeOSREntry(BasicBlock& basicBlock, JITCompiler::Label blockHead, LinkBuffer& linkBuffer)
+    {
+#if DFG_ENABLE(OSR_ENTRY)
+        // OSR entry is not allowed into blocks deemed unreachable by control flow analysis.
+        if (!basicBlock.cfaHasVisited)
+            return;
+        
+        OSREntryData* entry = codeBlock()->appendDFGOSREntryData(basicBlock.bytecodeBegin, linkBuffer.offsetOf(blockHead));
+        
+        entry->m_expectedValues = basicBlock.valuesAtHead;
+        
+        // Fix the expected values: in our protocol, a dead variable will have an expected
+        // value of (None, []). But the old JIT may stash some values there. So we really
+        // need (Top, TOP).
+        for (size_t argument = 0; argument < basicBlock.variablesAtHead.numberOfArguments(); ++argument) {
+            Node* node = basicBlock.variablesAtHead.argument(argument);
+            if (!node || !node->shouldGenerate())
+                entry->m_expectedValues.argument(argument).makeTop();
+        }
+        for (size_t local = 0; local < basicBlock.variablesAtHead.numberOfLocals(); ++local) {
+            Node* node = basicBlock.variablesAtHead.local(local);
+            if (!node || !node->shouldGenerate())
+                entry->m_expectedValues.local(local).makeTop();
+            else if (node->variableAccessData()->shouldUseDoubleFormat())
+                entry->m_localsForcedDouble.set(local);
+        }
 #else
-    void jitAssertIsInt32(GPRReg) {}
-    void jitAssertIsJSInt32(GPRReg) {}
-    void jitAssertIsJSNumber(GPRReg) {}
-    void jitAssertIsJSDouble(GPRReg) {}
-#endif
-
-#if ENABLE(SAMPLING_COUNTERS)
-    // Debug profiling tool.
-    void emitCount(AbstractSamplingCounter&, uint32_t increment = 1);
-#endif
-
-#if ENABLE(SAMPLING_FLAGS)
-    void setSamplingFlag(int32_t flag);
-    void clearSamplingFlag(int32_t flag);
+        UNUSED_PARAM(basicBlock);
+        UNUSED_PARAM(blockHead);
+        UNUSED_PARAM(linkBuffer);
 #endif
+    }
 
 private:
-    // These methods used in linking the speculative & non-speculative paths together.
-    void fillNumericToDouble(NodeIndex, FPRReg, GPRReg temporary);
-    void fillInt32ToInteger(NodeIndex, GPRReg);
-    void fillToJS(NodeIndex, GPRReg);
-    void jumpFromSpeculativeToNonSpeculative(const SpeculationCheck&, const EntryLocation&, SpeculationRecovery*);
-    void linkSpeculationChecks(SpeculativeJIT&, NonSpeculativeJIT&);
-
-    // The globalData, used to access constants such as the vPtrs.
-    JSGlobalData* m_globalData;
-
+    friend class OSRExitJumpPlaceholder;
+    
+    // Internal implementation to compile.
+    void compileEntry();
+    void compileBody(SpeculativeJIT&);
+    void link(LinkBuffer&);
+
+    void exitSpeculativeWithOSR(const OSRExit&, SpeculationRecovery*);
+    void compileExceptionHandlers();
+    void linkOSRExits();
+    
     // The dataflow graph currently being generated.
     Graph& m_graph;
 
-    // The codeBlock currently being generated, used to access information such as constant values, immediates.
-    CodeBlock* m_codeBlock;
-
+    OwnPtr<Disassembler> m_disassembler;
+    
     // Vector of calls out from JIT code, including exception handler information.
-    Vector<CallRecord> m_calls;
+    // Count of the number of CallRecords with exception handlers.
+    Vector<CallLinkRecord> m_calls;
+    Vector<CallExceptionRecord> m_exceptionChecks;
+    
+    struct JSCallRecord {
+        JSCallRecord(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
+            : m_fastCall(fastCall)
+            , m_slowCall(slowCall)
+            , m_targetToCheck(targetToCheck)
+            , m_callType(callType)
+            , m_callee(callee)
+            , m_codeOrigin(codeOrigin)
+        {
+        }
+        
+        Call m_fastCall;
+        Call m_slowCall;
+        DataLabelPtr m_targetToCheck;
+        CallLinkInfo::CallType m_callType;
+        GPRReg m_callee;
+        CodeOrigin m_codeOrigin;
+    };
+    
+    Vector<PropertyAccessRecord, 4> m_propertyAccesses;
+    Vector<JSCallRecord, 4> m_jsCalls;
+    Vector<OSRExitCompilationInfo> m_exitCompilationInfo;
+    Vector<Vector<Label> > m_exitSiteLabels;
+    unsigned m_currentCodeOriginIndex;
 };
 
 } } // namespace JSC::DFG