X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/6fe7ccc865dc7d7541b93c5bcaf6368d2c98a174..ed1e77d3adeb83d26fd1dfb16dd84cabdcefd250:/bytecode/CodeOrigin.h?ds=sidebyside diff --git a/bytecode/CodeOrigin.h b/bytecode/CodeOrigin.h index eda1764..d1879a3 100644 --- a/bytecode/CodeOrigin.h +++ b/bytecode/CodeOrigin.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2011 Apple Inc. All rights reserved. + * Copyright (C) 2011-2015 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,47 +26,59 @@ #ifndef CodeOrigin_h #define CodeOrigin_h +#include "CodeBlockHash.h" +#include "CodeSpecializationKind.h" #include "ValueRecovery.h" #include "WriteBarrier.h" +#include +#include +#include #include #include namespace JSC { struct InlineCallFrame; -class ExecutableBase; +class ExecState; +class ScriptExecutable; class JSFunction; struct CodeOrigin { - // Bytecode offset that you'd use to re-execute this instruction. - unsigned bytecodeIndex : 29; - // Bytecode offset corresponding to the opcode that gives the result (needed to handle - // op_call/op_call_put_result and op_method_check/op_get_by_id). - unsigned valueProfileOffset : 3; + static const unsigned invalidBytecodeIndex = UINT_MAX; + + // Bytecode offset that you'd use to re-execute this instruction, and the + // bytecode index of the bytecode instruction that produces some result that + // you're interested in (used for mapping Nodes whose values you're using + // to bytecode instructions that have the appropriate value profile). + unsigned bytecodeIndex; InlineCallFrame* inlineCallFrame; CodeOrigin() - : bytecodeIndex(std::numeric_limits::max()) - , valueProfileOffset(0) + : bytecodeIndex(invalidBytecodeIndex) , inlineCallFrame(0) { } - explicit CodeOrigin(unsigned bytecodeIndex, InlineCallFrame* inlineCallFrame = 0, unsigned valueProfileOffset = 0) + CodeOrigin(WTF::HashTableDeletedValueType) + : bytecodeIndex(invalidBytecodeIndex) + , inlineCallFrame(deletedMarker()) + { + } + + explicit CodeOrigin(unsigned bytecodeIndex, InlineCallFrame* inlineCallFrame = 0) : bytecodeIndex(bytecodeIndex) - , valueProfileOffset(valueProfileOffset) , inlineCallFrame(inlineCallFrame) { - ASSERT(bytecodeIndex < (1u << 29)); - ASSERT(valueProfileOffset < (1u << 3)); + ASSERT(bytecodeIndex < invalidBytecodeIndex); } - bool isSet() const { return bytecodeIndex != std::numeric_limits::max(); } + bool isSet() const { return bytecodeIndex != invalidBytecodeIndex; } + bool operator!() const { return !isSet(); } - unsigned bytecodeIndexForValueProfile() const + bool isHashTableDeletedValue() const { - return bytecodeIndex + valueProfileOffset; + return bytecodeIndex == invalidBytecodeIndex && !!inlineCallFrame; } // The inline depth is the depth of the inline stack, so 1 = not inlined, @@ -75,75 +87,209 @@ struct CodeOrigin { // If the code origin corresponds to inlined code, gives you the heap object that // would have owned the code if it had not been inlined. Otherwise returns 0. - ExecutableBase* codeOriginOwner() const; + ScriptExecutable* codeOriginOwner() const; + + int stackOffset() const; static unsigned inlineDepthForCallFrame(InlineCallFrame*); + unsigned hash() const; bool operator==(const CodeOrigin& other) const; - bool operator!=(const CodeOrigin& other) const { return !(*this == other); } + // This checks if the two code origins correspond to the same stack trace snippets, + // but ignore whether the InlineCallFrame's are identical. + bool isApproximatelyEqualTo(const CodeOrigin& other) const; + + unsigned approximateHash() const; + // Get the inline stack. This is slow, and is intended for debugging only. Vector inlineStack() const; + + void dump(PrintStream&) const; + void dumpInContext(PrintStream&, DumpContext*) const; + +private: + static InlineCallFrame* deletedMarker() + { + return bitwise_cast(static_cast(1)); + } }; struct InlineCallFrame { - Vector arguments; - WriteBarrier executable; - WriteBarrier callee; + enum Kind { + Call, + Construct, + CallVarargs, + ConstructVarargs, + + // For these, the stackOffset incorporates the argument count plus the true return PC + // slot. + GetterCall, + SetterCall + }; + + static Kind kindFor(CodeSpecializationKind kind) + { + switch (kind) { + case CodeForCall: + return Call; + case CodeForConstruct: + return Construct; + } + RELEASE_ASSERT_NOT_REACHED(); + return Call; + } + + static Kind varargsKindFor(CodeSpecializationKind kind) + { + switch (kind) { + case CodeForCall: + return CallVarargs; + case CodeForConstruct: + return ConstructVarargs; + } + RELEASE_ASSERT_NOT_REACHED(); + return Call; + } + + static CodeSpecializationKind specializationKindFor(Kind kind) + { + switch (kind) { + case Call: + case CallVarargs: + case GetterCall: + case SetterCall: + return CodeForCall; + case Construct: + case ConstructVarargs: + return CodeForConstruct; + } + RELEASE_ASSERT_NOT_REACHED(); + return CodeForCall; + } + + static bool isVarargs(Kind kind) + { + switch (kind) { + case CallVarargs: + case ConstructVarargs: + return true; + default: + return false; + } + } + bool isVarargs() const + { + return isVarargs(static_cast(kind)); + } + + Vector arguments; // Includes 'this'. + WriteBarrier executable; + ValueRecovery calleeRecovery; CodeOrigin caller; - unsigned stackOffset : 31; - bool isCall : 1; -}; -struct CodeOriginAtCallReturnOffset { - CodeOrigin codeOrigin; - unsigned callReturnOffset; + signed stackOffset : 28; + unsigned kind : 3; // real type is Kind + bool isClosureCall : 1; // If false then we know that callee/scope are constants and the DFG won't treat them as variables, i.e. they have to be recovered manually. + VirtualRegister argumentCountRegister; // Only set when we inline a varargs call. + + // There is really no good notion of a "default" set of values for + // InlineCallFrame's fields. This constructor is here just to reduce confusion if + // we forgot to initialize explicitly. + InlineCallFrame() + : stackOffset(0) + , kind(Call) + , isClosureCall(false) + { + } + + CodeSpecializationKind specializationKind() const { return specializationKindFor(static_cast(kind)); } + + JSFunction* calleeConstant() const; + void visitAggregate(SlotVisitor&); + + // Get the callee given a machine call frame to which this InlineCallFrame belongs. + JSFunction* calleeForCallFrame(ExecState*) const; + + CString inferredName() const; + CodeBlockHash hash() const; + CString hashAsStringIfPossible() const; + + CodeBlock* baselineCodeBlock() const; + + void setStackOffset(signed offset) + { + stackOffset = offset; + RELEASE_ASSERT(static_cast(stackOffset) == offset); + } + + ptrdiff_t callerFrameOffset() const { return stackOffset * sizeof(Register) + CallFrame::callerFrameOffset(); } + ptrdiff_t returnPCOffset() const { return stackOffset * sizeof(Register) + CallFrame::returnPCOffset(); } + + void dumpBriefFunctionInformation(PrintStream&) const; + void dump(PrintStream&) const; + void dumpInContext(PrintStream&, DumpContext*) const; + + MAKE_PRINT_METHOD(InlineCallFrame, dumpBriefFunctionInformation, briefFunctionInformation); }; -inline unsigned CodeOrigin::inlineDepthForCallFrame(InlineCallFrame* inlineCallFrame) +inline int CodeOrigin::stackOffset() const { - unsigned result = 1; - for (InlineCallFrame* current = inlineCallFrame; current; current = current->caller.inlineCallFrame) - result++; - return result; + if (!inlineCallFrame) + return 0; + + return inlineCallFrame->stackOffset; } -inline unsigned CodeOrigin::inlineDepth() const +inline unsigned CodeOrigin::hash() const { - return inlineDepthForCallFrame(inlineCallFrame); + return WTF::IntHash::hash(bytecodeIndex) + + WTF::PtrHash::hash(inlineCallFrame); } - + inline bool CodeOrigin::operator==(const CodeOrigin& other) const { return bytecodeIndex == other.bytecodeIndex && inlineCallFrame == other.inlineCallFrame; } -// Get the inline stack. This is slow, and is intended for debugging only. -inline Vector CodeOrigin::inlineStack() const -{ - Vector result(inlineDepth()); - result.last() = *this; - unsigned index = result.size() - 2; - for (InlineCallFrame* current = inlineCallFrame; current; current = current->caller.inlineCallFrame) - result[index--] = current->caller; - return result; -} - -inline unsigned getCallReturnOffsetForCodeOrigin(CodeOriginAtCallReturnOffset* data) -{ - return data->callReturnOffset; -} - -inline ExecutableBase* CodeOrigin::codeOriginOwner() const +inline ScriptExecutable* CodeOrigin::codeOriginOwner() const { if (!inlineCallFrame) return 0; return inlineCallFrame->executable.get(); } +struct CodeOriginHash { + static unsigned hash(const CodeOrigin& key) { return key.hash(); } + static bool equal(const CodeOrigin& a, const CodeOrigin& b) { return a == b; } + static const bool safeToCompareToEmptyOrDeleted = true; +}; + +struct CodeOriginApproximateHash { + static unsigned hash(const CodeOrigin& key) { return key.approximateHash(); } + static bool equal(const CodeOrigin& a, const CodeOrigin& b) { return a.isApproximatelyEqualTo(b); } + static const bool safeToCompareToEmptyOrDeleted = true; +}; + } // namespace JSC +namespace WTF { + +void printInternal(PrintStream&, JSC::InlineCallFrame::Kind); + +template struct DefaultHash; +template<> struct DefaultHash { + typedef JSC::CodeOriginHash Hash; +}; + +template struct HashTraits; +template<> struct HashTraits : SimpleClassHashTraits { + static const bool emptyValueIsZero = false; +}; + +} // namespace WTF + #endif // CodeOrigin_h