/*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
{
for (size_t i = 0; i < count; ++i) {
JSValue& value = slot[i];
- internalAppend(value);
+ internalAppend(&value, value);
}
}
{
ASSERT(slot);
JSCell* cell = *slot;
- internalAppend(cell);
+ internalAppend(slot, cell);
+}
+
+template<typename T>
+inline void SlotVisitor::appendUnbarrieredReadOnlyPointer(T* cell)
+{
+ internalAppend(0, cell);
}
ALWAYS_INLINE void SlotVisitor::append(JSValue* slot)
{
ASSERT(slot);
- internalAppend(*slot);
+ internalAppend(slot, *slot);
}
ALWAYS_INLINE void SlotVisitor::appendUnbarrieredValue(JSValue* slot)
{
ASSERT(slot);
- internalAppend(*slot);
+ internalAppend(slot, *slot);
+}
+
+ALWAYS_INLINE void SlotVisitor::appendUnbarrieredReadOnlyValue(JSValue value)
+{
+ internalAppend(0, value);
}
ALWAYS_INLINE void SlotVisitor::append(JSCell** slot)
{
ASSERT(slot);
- internalAppend(*slot);
+ internalAppend(slot, *slot);
}
template<typename T>
{
ASSERT(weak);
if (weak->get())
- internalAppend(weak->get());
+ internalAppend(0, weak->get());
}
-ALWAYS_INLINE void SlotVisitor::internalAppend(JSValue value)
+ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSValue value)
{
if (!value || !value.isCell())
return;
- internalAppend(value.asCell());
+ internalAppend(from, value.asCell());
+}
+
+ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSCell* cell)
+{
+ ASSERT(!m_isCheckingForDefaultMarkViolation);
+ if (!cell)
+ return;
+#if ENABLE(ALLOCATION_LOGGING)
+ dataLogF("JSC GC noticing reference from %p to %p.\n", from, cell);
+#else
+ UNUSED_PARAM(from);
+#endif
+#if ENABLE(GC_VALIDATION)
+ validate(cell);
+#endif
+ if (Heap::testAndSetMarked(cell) || !cell->structure()) {
+ ASSERT(cell->structure());
+ return;
+ }
+
+ cell->setMarked();
+ m_bytesVisited += MarkedBlock::blockFor(cell)->cellSize();
+
+ unconditionallyAppend(cell);
+}
+
+ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell)
+{
+ ASSERT(Heap::isMarked(cell));
+ m_visitCount++;
+
+ // Should never attempt to mark something that is zapped.
+ ASSERT(!cell->isZapped());
+
+ m_stack.append(cell);
+}
+
+template<typename T> inline void SlotVisitor::append(WriteBarrierBase<T>* slot)
+{
+ internalAppend(slot, *slot->slot());
+}
+
+template<typename Iterator> inline void SlotVisitor::append(Iterator begin, Iterator end)
+{
+ for (auto it = begin; it != end; ++it)
+ append(&*it);
+}
+
+ALWAYS_INLINE void SlotVisitor::appendValues(WriteBarrierBase<Unknown>* barriers, size_t count)
+{
+ append(barriers->slot(), count);
}
inline void SlotVisitor::addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)
#endif
}
-inline bool SlotVisitor::containsOpaqueRoot(void* root)
+inline bool SlotVisitor::containsOpaqueRoot(void* root) const
{
ASSERT(!m_isInParallelMode);
#if ENABLE(PARALLEL_GC)
#endif
}
-inline TriState SlotVisitor::containsOpaqueRootTriState(void* root)
+inline TriState SlotVisitor::containsOpaqueRootTriState(void* root) const
{
if (m_opaqueRoots.contains(root))
return TrueTriState;
- MutexLocker locker(m_shared.m_opaqueRootsLock);
+ std::lock_guard<std::mutex> lock(m_shared.m_opaqueRootsMutex);
if (m_shared.m_opaqueRoots.contains(root))
return TrueTriState;
return MixedTriState;
drain();
}
-inline void SlotVisitor::copyLater(JSCell* owner, void* ptr, size_t bytes)
+inline void SlotVisitor::copyLater(JSCell* owner, CopyToken token, void* ptr, size_t bytes)
{
+ ASSERT(bytes);
CopiedBlock* block = CopiedSpace::blockFor(ptr);
if (block->isOversize()) {
+ ASSERT(bytes <= block->size());
+ // FIXME: We should be able to shrink the allocation if bytes went below the block size.
+ // For now, we just make sure that our accounting of how much memory we are actually using
+ // is correct.
+ // https://bugs.webkit.org/show_bug.cgi?id=144749
+ bytes = block->size();
m_shared.m_copiedSpace->pin(block);
- return;
}
- if (block->isPinned())
- return;
+ ASSERT(heap()->m_storageSpace.contains(block));
- block->reportLiveBytes(owner, bytes);
+ SpinLockHolder locker(&block->workListLock());
+ if (heap()->operationInProgress() == FullCollection || block->shouldReportLiveBytes(locker, owner)) {
+ m_bytesCopied += bytes;
+ block->reportLiveBytes(locker, owner, token, bytes);
+ }
}
+inline void SlotVisitor::reportExtraMemoryVisited(JSCell* owner, size_t size)
+{
+ heap()->reportExtraMemoryVisited(owner, size);
+}
+
+inline Heap* SlotVisitor::heap() const
+{
+ return &sharedData().m_vm->heap;
+}
+
+inline VM& SlotVisitor::vm()
+{
+ return *sharedData().m_vm;
+}
+
+inline const VM& SlotVisitor::vm() const
+{
+ return *sharedData().m_vm;
+}
+
} // namespace JSC
#endif // SlotVisitorInlines_h