*/
#include "config.h"
-
#include "ExecutableAllocator.h"
+#include "JSCInlines.h"
+
#if ENABLE(EXECUTABLE_ALLOCATOR_DEMAND)
#include "CodeProfiling.h"
#include <wtf/HashSet.h>
#include <wtf/MetaAllocator.h>
+#include <wtf/NeverDestroyed.h>
#include <wtf/PageReservation.h>
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
-#include <wtf/PassOwnPtr.h>
-#endif
#include <wtf/ThreadingPrimitives.h>
#include <wtf/VMTags.h>
#endif
DemandExecutableAllocator()
: MetaAllocator(jitAllocationGranule)
{
- MutexLocker lock(allocatorsMutex());
+ std::lock_guard<std::mutex> lock(allocatorsMutex());
allocators().add(this);
// Don't preallocate any memory here.
}
virtual ~DemandExecutableAllocator()
{
{
- MutexLocker lock(allocatorsMutex());
+ std::lock_guard<std::mutex> lock(allocatorsMutex());
allocators().remove(this);
}
for (unsigned i = 0; i < reservations.size(); ++i)
static size_t bytesAllocatedByAllAllocators()
{
size_t total = 0;
- MutexLocker lock(allocatorsMutex());
+ std::lock_guard<std::mutex> lock(allocatorsMutex());
for (HashSet<DemandExecutableAllocator*>::const_iterator allocator = allocators().begin(); allocator != allocators().end(); ++allocator)
total += (*allocator)->bytesAllocated();
return total;
static size_t bytesCommittedByAllocactors()
{
size_t total = 0;
- MutexLocker lock(allocatorsMutex());
+ std::lock_guard<std::mutex> lock(allocatorsMutex());
for (HashSet<DemandExecutableAllocator*>::const_iterator allocator = allocators().begin(); allocator != allocators().end(); ++allocator)
total += (*allocator)->bytesCommitted();
return total;
#if ENABLE(META_ALLOCATOR_PROFILE)
static void dumpProfileFromAllAllocators()
{
- MutexLocker lock(allocatorsMutex());
+ std::lock_guard<std::mutex> lock(allocatorsMutex());
for (HashSet<DemandExecutableAllocator*>::const_iterator allocator = allocators().begin(); allocator != allocators().end(); ++allocator)
(*allocator)->dumpProfile();
}
Vector<PageReservation, 16> reservations;
static HashSet<DemandExecutableAllocator*>& allocators()
{
- DEFINE_STATIC_LOCAL(HashSet<DemandExecutableAllocator*>, sAllocators, ());
+ DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<DemandExecutableAllocator*>, sAllocators, ());
return sAllocators;
}
- static Mutex& allocatorsMutex()
+
+ static std::mutex& allocatorsMutex()
{
- DEFINE_STATIC_LOCAL(Mutex, mutex, ());
+ static NeverDestroyed<std::mutex> mutex;
+
return mutex;
}
};
ExecutableAllocator::ExecutableAllocator(VM&)
#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- : m_allocator(adoptPtr(new DemandExecutableAllocator()))
+ : m_allocator(std::make_unique<DemandExecutableAllocator>())
#endif
{
ASSERT(allocator());
}
-PassRefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate(VM&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort)
+RefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate(VM&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort)
{
RefPtr<ExecutableMemoryHandle> result = allocator()->allocate(sizeInBytes, ownerUID);
RELEASE_ASSERT(result || effort != JITCompilationMustSucceed);
- return result.release();
+ return result;
}
size_t ExecutableAllocator::committedByteCount()