+ ~PageMapMemoryUsageRecorder()
+ {
+ ASSERT(!m_coalescedSpans.size());
+ }
+
+ void recordPendingRegions()
+ {
+ Span* lastSpan = m_coalescedSpans[m_coalescedSpans.size() - 1];
+ vm_range_t ptrRange = { m_coalescedSpans[0]->start << kPageShift, 0 };
+ ptrRange.size = (lastSpan->start << kPageShift) - ptrRange.address + (lastSpan->length * kPageSize);
+
+ // Mark the memory region the spans represent as a candidate for containing pointers
+ if (m_typeMask & MALLOC_PTR_REGION_RANGE_TYPE)
+ (*m_recorder)(m_task, m_context, MALLOC_PTR_REGION_RANGE_TYPE, &ptrRange, 1);
+
+ if (!(m_typeMask & MALLOC_PTR_IN_USE_RANGE_TYPE)) {
+ m_coalescedSpans.clear();
+ return;
+ }
+
+ Vector<vm_range_t, 1024> allocatedPointers;
+ for (size_t i = 0; i < m_coalescedSpans.size(); ++i) {
+ Span *theSpan = m_coalescedSpans[i];
+ if (theSpan->free)
+ continue;
+
+ vm_address_t spanStartAddress = theSpan->start << kPageShift;
+ vm_size_t spanSizeInBytes = theSpan->length * kPageSize;
+
+ if (!theSpan->sizeclass) {
+ // If it's an allocated large object span, mark it as in use
+ if (!m_freeObjectFinder.isFreeObject(spanStartAddress))
+ allocatedPointers.append((vm_range_t){spanStartAddress, spanSizeInBytes});
+ } else {
+ const size_t objectSize = ByteSizeForClass(theSpan->sizeclass);
+
+ // Mark each allocated small object within the span as in use
+ const vm_address_t endOfSpan = spanStartAddress + spanSizeInBytes;
+ for (vm_address_t object = spanStartAddress; object + objectSize <= endOfSpan; object += objectSize) {
+ if (!m_freeObjectFinder.isFreeObject(object))
+ allocatedPointers.append((vm_range_t){object, objectSize});
+ }
+ }
+ }
+
+ (*m_recorder)(m_task, m_context, MALLOC_PTR_IN_USE_RANGE_TYPE, allocatedPointers.data(), allocatedPointers.size());
+
+ m_coalescedSpans.clear();
+ }
+
+ int visit(void* ptr)