#ifdef HAVE_MALLOC_SIZE
#define PREFIX_SIZE (0)
#else
-#if defined(__sun)
+#if defined(__sun) || defined(__sparc) || defined(__sparc__)
#define PREFIX_SIZE (sizeof(long long))
#else
#define PREFIX_SIZE (sizeof(size_t))
#define calloc(count,size) tc_calloc(count,size)
#define realloc(ptr,size) tc_realloc(ptr,size)
#define free(ptr) tc_free(ptr)
+#elif defined(USE_JEMALLOC)
+#define malloc(size) je_malloc(size)
+#define calloc(count,size) je_calloc(count,size)
+#define realloc(ptr,size) je_realloc(ptr,size)
+#define free(ptr) je_free(ptr)
+#endif
+
+#ifdef HAVE_ATOMIC
+#define update_zmalloc_stat_add(__n) __sync_add_and_fetch(&used_memory, (__n))
+#define update_zmalloc_stat_sub(__n) __sync_sub_and_fetch(&used_memory, (__n))
+#else
+#define update_zmalloc_stat_add(__n) do { \
+ pthread_mutex_lock(&used_memory_mutex); \
+ used_memory += (__n); \
+ pthread_mutex_unlock(&used_memory_mutex); \
+} while(0)
+
+#define update_zmalloc_stat_sub(__n) do { \
+ pthread_mutex_lock(&used_memory_mutex); \
+ used_memory -= (__n); \
+ pthread_mutex_unlock(&used_memory_mutex); \
+} while(0)
+
#endif
#define update_zmalloc_stat_alloc(__n,__size) do { \
size_t _n = (__n); \
- size_t _stat_slot = (__size < ZMALLOC_MAX_ALLOC_STAT) ? __size : ZMALLOC_MAX_ALLOC_STAT; \
if (_n&(sizeof(long)-1)) _n += sizeof(long)-(_n&(sizeof(long)-1)); \
if (zmalloc_thread_safe) { \
- pthread_mutex_lock(&used_memory_mutex); \
- used_memory += _n; \
- zmalloc_allocations[_stat_slot]++; \
- pthread_mutex_unlock(&used_memory_mutex); \
+ update_zmalloc_stat_add(_n); \
} else { \
used_memory += _n; \
- zmalloc_allocations[_stat_slot]++; \
} \
} while(0)
size_t _n = (__n); \
if (_n&(sizeof(long)-1)) _n += sizeof(long)-(_n&(sizeof(long)-1)); \
if (zmalloc_thread_safe) { \
- pthread_mutex_lock(&used_memory_mutex); \
- used_memory -= _n; \
- pthread_mutex_unlock(&used_memory_mutex); \
+ update_zmalloc_stat_sub(_n); \
} else { \
used_memory -= _n; \
} \
static size_t used_memory = 0;
static int zmalloc_thread_safe = 0;
pthread_mutex_t used_memory_mutex = PTHREAD_MUTEX_INITIALIZER;
-/* Note that malloc_allocations elements are initialized to zero by C */
-size_t zmalloc_allocations[ZMALLOC_MAX_ALLOC_STAT+1];
static void zmalloc_oom(size_t size) {
fprintf(stderr, "zmalloc: Out of memory trying to allocate %zu bytes\n",
if (!ptr) zmalloc_oom(size);
#ifdef HAVE_MALLOC_SIZE
- update_zmalloc_stat_alloc(redis_malloc_size(ptr),size);
+ update_zmalloc_stat_alloc(zmalloc_size(ptr),size);
return ptr;
#else
*((size_t*)ptr) = size;
if (!ptr) zmalloc_oom(size);
#ifdef HAVE_MALLOC_SIZE
- update_zmalloc_stat_alloc(redis_malloc_size(ptr),size);
+ update_zmalloc_stat_alloc(zmalloc_size(ptr),size);
return ptr;
#else
*((size_t*)ptr) = size;
if (ptr == NULL) return zmalloc(size);
#ifdef HAVE_MALLOC_SIZE
- oldsize = redis_malloc_size(ptr);
+ oldsize = zmalloc_size(ptr);
newptr = realloc(ptr,size);
if (!newptr) zmalloc_oom(size);
update_zmalloc_stat_free(oldsize);
- update_zmalloc_stat_alloc(redis_malloc_size(newptr),size);
+ update_zmalloc_stat_alloc(zmalloc_size(newptr),size);
return newptr;
#else
realptr = (char*)ptr-PREFIX_SIZE;
#endif
}
+/* Provide zmalloc_size() for systems where this function is not provided by
+ * malloc itself, given that in that case we store an header with this
+ * information as the first bytes of every allocation. */
+#ifndef HAVE_MALLOC_SIZE
+size_t zmalloc_size(void *ptr) {
+ void *realptr = (char*)ptr-PREFIX_SIZE;
+ size_t size = *((size_t*)realptr);
+ /* Assume at least that all the allocations are padded at sizeof(long) by
+ * the underlying allocator. */
+ if (size&(sizeof(long)-1)) size += sizeof(long)-(size&(sizeof(long)-1));
+ return size+PREFIX_SIZE;
+}
+#endif
+
void zfree(void *ptr) {
#ifndef HAVE_MALLOC_SIZE
void *realptr;
if (ptr == NULL) return;
#ifdef HAVE_MALLOC_SIZE
- update_zmalloc_stat_free(redis_malloc_size(ptr));
+ update_zmalloc_stat_free(zmalloc_size(ptr));
free(ptr);
#else
realptr = (char*)ptr-PREFIX_SIZE;
size_t zmalloc_used_memory(void) {
size_t um;
- if (zmalloc_thread_safe) pthread_mutex_lock(&used_memory_mutex);
- um = used_memory;
- if (zmalloc_thread_safe) pthread_mutex_unlock(&used_memory_mutex);
- return um;
-}
+ if (zmalloc_thread_safe) {
+#ifdef HAVE_ATOMIC
+ um = __sync_add_and_fetch(&used_memory, 0);
+#else
+ pthread_mutex_lock(&used_memory_mutex);
+ um = used_memory;
+ pthread_mutex_unlock(&used_memory_mutex);
+#endif
+ }
+ else {
+ um = used_memory;
+ }
-size_t zmalloc_allocations_for_size(size_t size) {
- if (size > ZMALLOC_MAX_ALLOC_STAT) return 0;
- return zmalloc_allocations[size];
+ return um;
}
void zmalloc_enable_thread_safeness(void) {