#ifndef CopiedBlock_h
#define CopiedBlock_h
+#include "BlockAllocator.h"
+#include "CopyWorkList.h"
#include "HeapBlock.h"
-#include "JSValue.h"
-#include "JSValueInlineMethods.h"
+#include "JSCJSValue.h"
+#include "Options.h"
+#include <wtf/Atomics.h>
+#include <wtf/OwnPtr.h>
+#include <wtf/PassOwnPtr.h>
namespace JSC {
class CopiedSpace;
-class CopiedBlock : public HeapBlock {
+class CopiedBlock : public HeapBlock<CopiedBlock> {
friend class CopiedSpace;
friend class CopiedAllocator;
public:
- CopiedBlock(PageAllocationAligned& allocation)
- : HeapBlock(allocation)
- , m_offset(payload())
- , m_isPinned(false)
- {
- ASSERT(is8ByteAligned(static_cast<void*>(m_offset)));
-#if USE(JSVALUE64)
- char* offset = static_cast<char*>(m_offset);
- memset(static_cast<void*>(offset), 0, static_cast<size_t>((reinterpret_cast<char*>(this) + allocation.size()) - offset));
-#else
- JSValue emptyValue;
- JSValue* limit = reinterpret_cast_ptr<JSValue*>(reinterpret_cast<char*>(this) + allocation.size());
- for (JSValue* currentValue = reinterpret_cast<JSValue*>(m_offset); currentValue < limit; currentValue++)
- *currentValue = emptyValue;
-#endif
- }
+ static CopiedBlock* create(DeadBlock*);
+ static CopiedBlock* createNoZeroFill(DeadBlock*);
+
+ void pin();
+ bool isPinned();
+
+ bool isOversize();
+
+ unsigned liveBytes();
+ void reportLiveBytes(JSCell*, unsigned);
+ void didSurviveGC();
+ void didEvacuateBytes(unsigned);
+ bool shouldEvacuate();
+ bool canBeRecycled();
+ // The payload is the region of the block that is usable for allocations.
char* payload();
+ char* payloadEnd();
+ size_t payloadCapacity();
+
+ // The data is the region of the block that has been used for allocations.
+ char* data();
+ char* dataEnd();
+ size_t dataSize();
+
+ // The wilderness is the region of the block that is usable for allocations
+ // but has not been so used.
+ char* wilderness();
+ char* wildernessEnd();
+ size_t wildernessSize();
+
size_t size();
size_t capacity();
+ static const size_t blockSize = 32 * KB;
+
+ bool hasWorkList();
+ CopyWorkList& workList();
+
private:
- void* m_offset;
+ CopiedBlock(Region*);
+ void zeroFillWilderness(); // Can be called at any time to zero-fill to the end of the block.
+
+#if ENABLE(PARALLEL_GC)
+ SpinLock m_workListLock;
+#endif
+ OwnPtr<CopyWorkList> m_workList;
+
+ size_t m_remaining;
uintptr_t m_isPinned;
+ unsigned m_liveBytes;
};
+inline CopiedBlock* CopiedBlock::createNoZeroFill(DeadBlock* block)
+{
+ Region* region = block->region();
+ return new(NotNull, block) CopiedBlock(region);
+}
+
+inline CopiedBlock* CopiedBlock::create(DeadBlock* block)
+{
+ CopiedBlock* newBlock = createNoZeroFill(block);
+ newBlock->zeroFillWilderness();
+ return newBlock;
+}
+
+inline void CopiedBlock::zeroFillWilderness()
+{
+#if USE(JSVALUE64)
+ memset(wilderness(), 0, wildernessSize());
+#else
+ JSValue emptyValue;
+ JSValue* limit = reinterpret_cast_ptr<JSValue*>(wildernessEnd());
+ for (JSValue* currentValue = reinterpret_cast_ptr<JSValue*>(wilderness()); currentValue < limit; currentValue++)
+ *currentValue = emptyValue;
+#endif
+}
+
+inline CopiedBlock::CopiedBlock(Region* region)
+ : HeapBlock<CopiedBlock>(region)
+ , m_remaining(payloadCapacity())
+ , m_isPinned(false)
+ , m_liveBytes(0)
+{
+#if ENABLE(PARALLEL_GC)
+ m_workListLock.Init();
+#endif
+ ASSERT(is8ByteAligned(reinterpret_cast<void*>(m_remaining)));
+}
+
+inline void CopiedBlock::didSurviveGC()
+{
+ m_liveBytes = 0;
+ m_isPinned = false;
+ if (m_workList)
+ m_workList.clear();
+}
+
+inline void CopiedBlock::didEvacuateBytes(unsigned bytes)
+{
+ ASSERT(m_liveBytes >= bytes);
+ m_liveBytes -= bytes;
+}
+
+inline bool CopiedBlock::canBeRecycled()
+{
+ return !m_liveBytes;
+}
+
+inline bool CopiedBlock::shouldEvacuate()
+{
+ return static_cast<double>(m_liveBytes) / static_cast<double>(payloadCapacity()) <= Options::minCopiedBlockUtilization();
+}
+
+inline void CopiedBlock::pin()
+{
+ m_isPinned = true;
+ if (m_workList)
+ m_workList.clear();
+}
+
+inline bool CopiedBlock::isPinned()
+{
+ return m_isPinned;
+}
+
+inline bool CopiedBlock::isOversize()
+{
+ return region()->isCustomSize();
+}
+
+inline unsigned CopiedBlock::liveBytes()
+{
+ return m_liveBytes;
+}
+
inline char* CopiedBlock::payload()
{
return reinterpret_cast<char*>(this) + ((sizeof(CopiedBlock) + 7) & ~7);
}
+inline char* CopiedBlock::payloadEnd()
+{
+ return reinterpret_cast<char*>(this) + region()->blockSize();
+}
+
+inline size_t CopiedBlock::payloadCapacity()
+{
+ return payloadEnd() - payload();
+}
+
+inline char* CopiedBlock::data()
+{
+ return payload();
+}
+
+inline char* CopiedBlock::dataEnd()
+{
+ return payloadEnd() - m_remaining;
+}
+
+inline size_t CopiedBlock::dataSize()
+{
+ return dataEnd() - data();
+}
+
+inline char* CopiedBlock::wilderness()
+{
+ return dataEnd();
+}
+
+inline char* CopiedBlock::wildernessEnd()
+{
+ return payloadEnd();
+}
+
+inline size_t CopiedBlock::wildernessSize()
+{
+ return wildernessEnd() - wilderness();
+}
+
inline size_t CopiedBlock::size()
{
- return static_cast<size_t>(static_cast<char*>(m_offset) - payload());
+ return dataSize();
}
inline size_t CopiedBlock::capacity()
{
- return m_allocation.size();
+ return region()->blockSize();
+}
+
+inline bool CopiedBlock::hasWorkList()
+{
+ return !!m_workList;
+}
+
+inline CopyWorkList& CopiedBlock::workList()
+{
+ return *m_workList;
}
} // namespace JSC