]> git.saurik.com Git - apple/javascriptcore.git/blobdiff - bytecode/GetByIdStatus.cpp
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / bytecode / GetByIdStatus.cpp
index db4aa9b99f1cecff01b64f424c4f86408485638c..235cdbd7247667e31a017572d802ef012217b7bf 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
 #include "GetByIdStatus.h"
 
 #include "CodeBlock.h"
+#include "JSCInlines.h"
 #include "JSScope.h"
 #include "LLIntData.h"
 #include "LowLevelInterpreter.h"
-#include "Operations.h"
+#include "PolymorphicGetByIdList.h"
+#include <wtf/ListDump.h>
 
 namespace JSC {
 
-GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
+bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
+{
+    for (unsigned i = 0; i < m_variants.size(); ++i) {
+        if (m_variants[i].structureSet().overlaps(variant.structureSet()))
+            return false;
+    }
+    m_variants.append(variant);
+    return true;
+}
+
+#if ENABLE(DFG_JIT)
+bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
+{
+    return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, jitType));
+}
+#endif
+
+GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
 {
     UNUSED_PARAM(profiledBlock);
     UNUSED_PARAM(bytecodeIndex);
-    UNUSED_PARAM(ident);
-#if ENABLE(LLINT)
+    UNUSED_PARAM(uid);
     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
     
-    if (instruction[0].u.opcode == LLInt::getOpcode(llint_op_get_array_length))
+    if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
         return GetByIdStatus(NoInformation, false);
 
     Structure* structure = instruction[4].u.structure.get();
     if (!structure)
         return GetByIdStatus(NoInformation, false);
-    
+
+    if (structure->takesSlowPathInDFGForImpureProperty())
+        return GetByIdStatus(NoInformation, false);
+
     unsigned attributesIgnored;
     JSCell* specificValue;
-    PropertyOffset offset = structure->get(
-        *profiledBlock->vm(), ident, attributesIgnored, specificValue);
+    PropertyOffset offset = structure->getConcurrently(
+        *profiledBlock->vm(), uid, attributesIgnored, specificValue);
     if (structure->isDictionary())
         specificValue = 0;
     if (!isValidOffset(offset))
         return GetByIdStatus(NoInformation, false);
     
-    return GetByIdStatus(Simple, false, StructureSet(structure), offset, specificValue);
-#else
-    return GetByIdStatus(NoInformation, false);
-#endif
+    return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
 }
 
-void GetByIdStatus::computeForChain(GetByIdStatus& result, CodeBlock* profiledBlock, Identifier& ident, Structure* structure)
+bool GetByIdStatus::computeForChain(CodeBlock* profiledBlock, StringImpl* uid, PassRefPtr<IntendedStructureChain> passedChain)
 {
-#if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
+#if ENABLE(JIT)
+    RefPtr<IntendedStructureChain> chain = passedChain;
+    
     // Validate the chain. If the chain is invalid, then currently the best thing
     // we can do is to assume that TakesSlow is true. In the future, it might be
     // worth exploring reifying the structure chain from the structure we've got
@@ -77,213 +100,294 @@ void GetByIdStatus::computeForChain(GetByIdStatus& result, CodeBlock* profiledBl
     // have now is that if the structure chain has changed between when it was
     // cached on in the baseline JIT and when the DFG tried to inline the access,
     // then we fall back on a polymorphic access.
-    Structure* currentStructure = structure;
-    JSObject* currentObject = 0;
-    for (unsigned i = 0; i < result.m_chain.size(); ++i) {
-        ASSERT(!currentStructure->isDictionary());
-        currentObject = asObject(currentStructure->prototypeForLookup(profiledBlock));
-        currentStructure = result.m_chain[i];
-        if (currentObject->structure() != currentStructure)
-            return;
+    if (!chain->isStillValid())
+        return false;
+
+    if (chain->head()->takesSlowPathInDFGForImpureProperty())
+        return false;
+    size_t chainSize = chain->size();
+    for (size_t i = 0; i < chainSize; i++) {
+        if (chain->at(i)->takesSlowPathInDFGForImpureProperty())
+            return false;
     }
+
+    JSObject* currentObject = chain->terminalPrototype();
+    Structure* currentStructure = chain->last();
+    
+    ASSERT_UNUSED(currentObject, currentObject);
     
-    ASSERT(currentObject);
-        
     unsigned attributesIgnored;
     JSCell* specificValue;
-        
-    result.m_offset = currentStructure->get(
-        *profiledBlock->vm(), ident, attributesIgnored, specificValue);
+    
+    PropertyOffset offset = currentStructure->getConcurrently(
+        *profiledBlock->vm(), uid, attributesIgnored, specificValue);
     if (currentStructure->isDictionary())
         specificValue = 0;
-    if (!isValidOffset(result.m_offset))
-        return;
-        
-    result.m_structureSet.add(structure);
-    result.m_specificValue = JSValue(specificValue);
-#else
-    UNUSED_PARAM(result);
+    if (!isValidOffset(offset))
+        return false;
+    
+    return appendVariant(GetByIdVariant(StructureSet(chain->head()), offset, specificValue, chain));
+#else // ENABLE(JIT)
     UNUSED_PARAM(profiledBlock);
-    UNUSED_PARAM(ident);
-    UNUSED_PARAM(structure);
+    UNUSED_PARAM(uid);
+    UNUSED_PARAM(passedChain);
     UNREACHABLE_FOR_PLATFORM();
-#endif
+    return false;
+#endif // ENABLE(JIT)
 }
 
-GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
+GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
 {
-    UNUSED_PARAM(profiledBlock);
-    UNUSED_PARAM(bytecodeIndex);
-    UNUSED_PARAM(ident);
-#if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
-    if (!profiledBlock->numberOfStructureStubInfos())
-        return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
+    ConcurrentJITLocker locker(profiledBlock->m_lock);
+
+    GetByIdStatus result;
+
+#if ENABLE(DFG_JIT)
+    result = computeForStubInfo(
+        locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid);
     
-    // First check if it makes either calls, in which case we want to be super careful, or
-    // if it's not set at all, in which case we punt.
-    StructureStubInfo& stubInfo = profiledBlock->getStubInfo(bytecodeIndex);
-    if (!stubInfo.seen)
-        return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
+    if (!result.takesSlowPath()
+        && (hasExitSite(locker, profiledBlock, bytecodeIndex)
+            || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
+        return GetByIdStatus(TakesSlowPath, true);
+#else
+    UNUSED_PARAM(map);
+#endif
+
+    if (!result)
+        return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
+    
+    return result;
+}
+
+#if ENABLE(JIT)
+GetByIdStatus GetByIdStatus::computeForStubInfo(
+    const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
+    StringImpl* uid)
+{
+    if (!stubInfo || !stubInfo->seen)
+        return GetByIdStatus(NoInformation);
     
-    if (stubInfo.resetByGC)
+    if (stubInfo->resetByGC)
         return GetByIdStatus(TakesSlowPath, true);
 
-    PolymorphicAccessStructureList* list;
-    int listSize;
-    switch (stubInfo.accessType) {
-    case access_get_by_id_self_list:
-        list = stubInfo.u.getByIdSelfList.structureList;
-        listSize = stubInfo.u.getByIdSelfList.listSize;
-        break;
-    case access_get_by_id_proto_list:
-        list = stubInfo.u.getByIdProtoList.structureList;
-        listSize = stubInfo.u.getByIdProtoList.listSize;
-        break;
-    default:
-        list = 0;
-        listSize = 0;
-        break;
-    }
-    for (int i = 0; i < listSize; ++i) {
-        if (!list->list[i].isDirect)
+    PolymorphicGetByIdList* list = 0;
+    if (stubInfo->accessType == access_get_by_id_list) {
+        list = stubInfo->u.getByIdList.list;
+        bool makesCalls = false;
+        bool isWatched = false;
+        for (unsigned i = 0; i < list->size(); ++i) {
+            const GetByIdAccess& access = list->at(i);
+            if (access.doesCalls()) {
+                makesCalls = true;
+                break;
+            }
+            if (access.isWatched()) {
+                isWatched = true;
+                continue;
+            }
+        }
+        if (makesCalls)
             return GetByIdStatus(MakesCalls, true);
+        if (isWatched)
+            return GetByIdStatus(TakesSlowPath, true);
     }
     
-    // Next check if it takes slow case, in which case we want to be kind of careful.
-    if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex))
-        return GetByIdStatus(TakesSlowPath, true);
-    
     // Finally figure out if we can derive an access strategy.
     GetByIdStatus result;
+    result.m_state = Simple;
     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
-    switch (stubInfo.accessType) {
+    switch (stubInfo->accessType) {
     case access_unset:
-        return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
+        return GetByIdStatus(NoInformation);
         
     case access_get_by_id_self: {
-        Structure* structure = stubInfo.u.getByIdSelf.baseObjectStructure.get();
+        Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
+        if (structure->takesSlowPathInDFGForImpureProperty())
+            return GetByIdStatus(TakesSlowPath, true);
         unsigned attributesIgnored;
         JSCell* specificValue;
-        result.m_offset = structure->get(
-            *profiledBlock->vm(), ident, attributesIgnored, specificValue);
+        GetByIdVariant variant;
+        variant.m_offset = structure->getConcurrently(
+            *profiledBlock->vm(), uid, attributesIgnored, specificValue);
+        if (!isValidOffset(variant.m_offset))
+            return GetByIdStatus(TakesSlowPath, true);
+        
         if (structure->isDictionary())
             specificValue = 0;
         
-        if (isValidOffset(result.m_offset)) {
-            result.m_structureSet.add(structure);
-            result.m_specificValue = JSValue(specificValue);
-        }
-        
-        if (isValidOffset(result.m_offset))
-            ASSERT(result.m_structureSet.size());
-        break;
+        variant.m_structureSet.add(structure);
+        variant.m_specificValue = JSValue(specificValue);
+        result.appendVariant(variant);
+        return result;
     }
         
-    case access_get_by_id_self_list: {
-        for (int i = 0; i < listSize; ++i) {
-            ASSERT(list->list[i].isDirect);
+    case access_get_by_id_list: {
+        for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
+            ASSERT(list->at(listIndex).isSimple());
+            
+            Structure* structure = list->at(listIndex).structure();
+            
+            // FIXME: We should assert that we never see a structure that
+            // hasImpureGetOwnPropertySlot() but for which we don't
+            // newImpurePropertyFiresWatchpoints(). We're not at a point where we can do
+            // that, yet.
+            // https://bugs.webkit.org/show_bug.cgi?id=131810
             
-            Structure* structure = list->list[i].base.get();
-            if (result.m_structureSet.contains(structure))
+            if (structure->takesSlowPathInDFGForImpureProperty())
+                return GetByIdStatus(TakesSlowPath, true);
+            
+            if (list->at(listIndex).chain()) {
+                RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
+                    profiledBlock, structure, list->at(listIndex).chain(),
+                    list->at(listIndex).chainCount()));
+                if (!result.computeForChain(profiledBlock, uid, chain))
+                    return GetByIdStatus(TakesSlowPath, true);
                 continue;
+            }
             
             unsigned attributesIgnored;
             JSCell* specificValue;
-            PropertyOffset myOffset = structure->get(
-                *profiledBlock->vm(), ident, attributesIgnored, specificValue);
+            PropertyOffset myOffset = structure->getConcurrently(
+                *profiledBlock->vm(), uid, attributesIgnored, specificValue);
             if (structure->isDictionary())
                 specificValue = 0;
             
-            if (!isValidOffset(myOffset)) {
-                result.m_offset = invalidOffset;
+            if (!isValidOffset(myOffset))
+                return GetByIdStatus(TakesSlowPath, true);
+
+            bool found = false;
+            for (unsigned variantIndex = 0; variantIndex < result.m_variants.size(); ++variantIndex) {
+                GetByIdVariant& variant = result.m_variants[variantIndex];
+                if (variant.m_chain)
+                    continue;
+                
+                if (variant.m_offset != myOffset)
+                    continue;
+
+                found = true;
+                if (variant.m_structureSet.contains(structure))
+                    break;
+                
+                if (variant.m_specificValue != JSValue(specificValue))
+                    variant.m_specificValue = JSValue();
+                
+                variant.m_structureSet.add(structure);
                 break;
             }
-                    
-            if (!i) {
-                result.m_offset = myOffset;
-                result.m_specificValue = JSValue(specificValue);
-            } else if (result.m_offset != myOffset) {
-                result.m_offset = invalidOffset;
-                break;
-            } else if (result.m_specificValue != JSValue(specificValue))
-                result.m_specificValue = JSValue();
             
-            result.m_structureSet.add(structure);
+            if (found)
+                continue;
+            
+            if (!result.appendVariant(GetByIdVariant(StructureSet(structure), myOffset, specificValue)))
+                return GetByIdStatus(TakesSlowPath, true);
         }
-                    
-        if (isValidOffset(result.m_offset))
-            ASSERT(result.m_structureSet.size());
-        break;
-    }
         
-    case access_get_by_id_proto: {
-        if (!stubInfo.u.getByIdProto.isDirect)
-            return GetByIdStatus(MakesCalls, true);
-        result.m_chain.append(stubInfo.u.getByIdProto.prototypeStructure.get());
-        computeForChain(
-            result, profiledBlock, ident,
-            stubInfo.u.getByIdProto.baseObjectStructure.get());
-        break;
+        return result;
     }
         
     case access_get_by_id_chain: {
-        if (!stubInfo.u.getByIdChain.isDirect)
+        if (!stubInfo->u.getByIdChain.isDirect)
             return GetByIdStatus(MakesCalls, true);
-        for (unsigned i = 0; i < stubInfo.u.getByIdChain.count; ++i)
-            result.m_chain.append(stubInfo.u.getByIdChain.chain->head()[i].get());
-        computeForChain(
-            result, profiledBlock, ident,
-            stubInfo.u.getByIdChain.baseObjectStructure.get());
-        break;
+        RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
+            profiledBlock,
+            stubInfo->u.getByIdChain.baseObjectStructure.get(),
+            stubInfo->u.getByIdChain.chain.get(),
+            stubInfo->u.getByIdChain.count));
+        if (result.computeForChain(profiledBlock, uid, chain))
+            return result;
+        return GetByIdStatus(TakesSlowPath, true);
     }
         
     default:
-        ASSERT(!isValidOffset(result.m_offset));
-        break;
+        return GetByIdStatus(TakesSlowPath, true);
     }
     
-    if (!isValidOffset(result.m_offset)) {
-        result.m_state = TakesSlowPath;
-        result.m_structureSet.clear();
-        result.m_chain.clear();
-        result.m_specificValue = JSValue();
-    } else
-        result.m_state = Simple;
-    
-    return result;
-#else // ENABLE(JIT)
-    return GetByIdStatus(NoInformation, false);
+    RELEASE_ASSERT_NOT_REACHED();
+    return GetByIdStatus();
+}
 #endif // ENABLE(JIT)
+
+GetByIdStatus GetByIdStatus::computeFor(
+    CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
+    StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
+{
+#if ENABLE(DFG_JIT)
+    if (dfgBlock) {
+        GetByIdStatus result;
+        {
+            ConcurrentJITLocker locker(dfgBlock->m_lock);
+            result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
+        }
+        
+        if (result.takesSlowPath())
+            return result;
+    
+        {
+            ConcurrentJITLocker locker(profiledBlock->m_lock);
+            if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
+                return GetByIdStatus(TakesSlowPath, true);
+        }
+        
+        if (result.isSet())
+            return result;
+    }
+#else
+    UNUSED_PARAM(dfgBlock);
+    UNUSED_PARAM(dfgMap);
+#endif
+
+    return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
 }
 
-GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, Identifier& ident)
+GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
 {
     // For now we only handle the super simple self access case. We could handle the
     // prototype case in the future.
     
-    if (PropertyName(ident).asIndex() != PropertyName::NotAnIndex)
+    if (!structure)
+        return GetByIdStatus(TakesSlowPath);
+
+    if (toUInt32FromStringImpl(uid) != PropertyName::NotAnIndex)
         return GetByIdStatus(TakesSlowPath);
     
-    if (structure->typeInfo().overridesGetOwnPropertySlot())
+    if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
         return GetByIdStatus(TakesSlowPath);
     
     if (!structure->propertyAccessesAreCacheable())
         return GetByIdStatus(TakesSlowPath);
-    
-    GetByIdStatus result;
-    result.m_wasSeenInJIT = false; // To my knowledge nobody that uses computeFor(VM&, Structure*, Identifier&) reads this field, but I might as well be honest: no, it wasn't seen in the JIT, since I computed it statically.
+
     unsigned attributes;
     JSCell* specificValue;
-    result.m_offset = structure->get(vm, ident, attributes, specificValue);
-    if (!isValidOffset(result.m_offset))
+    PropertyOffset offset = structure->getConcurrently(vm, uid, attributes, specificValue);
+    if (!isValidOffset(offset))
         return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
     if (attributes & Accessor)
         return GetByIdStatus(MakesCalls);
     if (structure->isDictionary())
         specificValue = 0;
-    result.m_structureSet.add(structure);
-    result.m_specificValue = JSValue(specificValue);
-    return result;
+    return GetByIdStatus(
+        Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
+}
+
+void GetByIdStatus::dump(PrintStream& out) const
+{
+    out.print("(");
+    switch (m_state) {
+    case NoInformation:
+        out.print("NoInformation");
+        break;
+    case Simple:
+        out.print("Simple");
+        break;
+    case TakesSlowPath:
+        out.print("TakesSlowPath");
+        break;
+    case MakesCalls:
+        out.print("MakesCalls");
+        break;
+    }
+    out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
 }
 
 } // namespace JSC