#define free(ptr) je_free(ptr)
#endif
+#ifdef HAVE_ATOMIC
+#define update_zmalloc_stat_add(__n) __sync_add_and_fetch(&used_memory, (__n))
+#define update_zmalloc_stat_sub(__n) __sync_sub_and_fetch(&used_memory, (__n))
+#else
+#define update_zmalloc_stat_add(__n) do { \
+ pthread_mutex_lock(&used_memory_mutex); \
+ used_memory += (__n); \
+ pthread_mutex_unlock(&used_memory_mutex); \
+} while(0)
+
+#define update_zmalloc_stat_sub(__n) do { \
+ pthread_mutex_lock(&used_memory_mutex); \
+ used_memory -= (__n); \
+ pthread_mutex_unlock(&used_memory_mutex); \
+} while(0)
+
+#endif
+
#define update_zmalloc_stat_alloc(__n,__size) do { \
size_t _n = (__n); \
if (_n&(sizeof(long)-1)) _n += sizeof(long)-(_n&(sizeof(long)-1)); \
if (zmalloc_thread_safe) { \
- pthread_mutex_lock(&used_memory_mutex); \
- used_memory += _n; \
- pthread_mutex_unlock(&used_memory_mutex); \
+ update_zmalloc_stat_add(_n); \
} else { \
used_memory += _n; \
} \
size_t _n = (__n); \
if (_n&(sizeof(long)-1)) _n += sizeof(long)-(_n&(sizeof(long)-1)); \
if (zmalloc_thread_safe) { \
- pthread_mutex_lock(&used_memory_mutex); \
- used_memory -= _n; \
- pthread_mutex_unlock(&used_memory_mutex); \
+ update_zmalloc_stat_sub(_n); \
} else { \
used_memory -= _n; \
} \
size_t zmalloc_used_memory(void) {
size_t um;
- if (zmalloc_thread_safe) pthread_mutex_lock(&used_memory_mutex);
- um = used_memory;
- if (zmalloc_thread_safe) pthread_mutex_unlock(&used_memory_mutex);
+ if (zmalloc_thread_safe) {
+#ifdef HAVE_ATOMIC
+ um = __sync_add_and_fetch(&used_memory, 0);
+#else
+ pthread_mutex_lock(&used_memory_mutex);
+ um = used_memory;
+ pthread_mutex_unlock(&used_memory_mutex);
+#endif
+ }
+ else {
+ um = used_memory;
+ }
+
return um;
}