+/* Function prototypes for non-inline static functions. */
+
+static void *chunk_recycle(extent_tree_t *chunks_szad,
+ extent_tree_t *chunks_ad, size_t size, size_t alignment, bool base,
+ bool *zero);
+static void chunk_record(extent_tree_t *chunks_szad,
+ extent_tree_t *chunks_ad, void *chunk, size_t size);
+
+/******************************************************************************/
+
+static void *
+chunk_recycle(extent_tree_t *chunks_szad, extent_tree_t *chunks_ad, size_t size,
+ size_t alignment, bool base, bool *zero)
+{
+ void *ret;
+ extent_node_t *node;
+ extent_node_t key;
+ size_t alloc_size, leadsize, trailsize;
+ bool zeroed;
+
+ if (base) {
+ /*
+ * This function may need to call base_node_{,de}alloc(), but
+ * the current chunk allocation request is on behalf of the
+ * base allocator. Avoid deadlock (and if that weren't an
+ * issue, potential for infinite recursion) by returning NULL.
+ */
+ return (NULL);
+ }
+
+ alloc_size = size + alignment - chunksize;
+ /* Beware size_t wrap-around. */
+ if (alloc_size < size)
+ return (NULL);
+ key.addr = NULL;
+ key.size = alloc_size;
+ malloc_mutex_lock(&chunks_mtx);
+ node = extent_tree_szad_nsearch(chunks_szad, &key);
+ if (node == NULL) {
+ malloc_mutex_unlock(&chunks_mtx);
+ return (NULL);
+ }
+ leadsize = ALIGNMENT_CEILING((uintptr_t)node->addr, alignment) -
+ (uintptr_t)node->addr;
+ assert(node->size >= leadsize + size);
+ trailsize = node->size - leadsize - size;
+ ret = (void *)((uintptr_t)node->addr + leadsize);
+ /* Remove node from the tree. */
+ extent_tree_szad_remove(chunks_szad, node);
+ extent_tree_ad_remove(chunks_ad, node);
+ if (leadsize != 0) {
+ /* Insert the leading space as a smaller chunk. */
+ node->size = leadsize;
+ extent_tree_szad_insert(chunks_szad, node);
+ extent_tree_ad_insert(chunks_ad, node);
+ node = NULL;
+ }
+ if (trailsize != 0) {
+ /* Insert the trailing space as a smaller chunk. */
+ if (node == NULL) {
+ /*
+ * An additional node is required, but
+ * base_node_alloc() can cause a new base chunk to be
+ * allocated. Drop chunks_mtx in order to avoid
+ * deadlock, and if node allocation fails, deallocate
+ * the result before returning an error.
+ */
+ malloc_mutex_unlock(&chunks_mtx);
+ node = base_node_alloc();
+ if (node == NULL) {
+ chunk_dealloc(ret, size, true);
+ return (NULL);
+ }
+ malloc_mutex_lock(&chunks_mtx);
+ }
+ node->addr = (void *)((uintptr_t)(ret) + size);
+ node->size = trailsize;
+ extent_tree_szad_insert(chunks_szad, node);
+ extent_tree_ad_insert(chunks_ad, node);
+ node = NULL;
+ }
+ malloc_mutex_unlock(&chunks_mtx);
+
+ zeroed = false;
+ if (node != NULL) {
+ if (node->zeroed) {
+ zeroed = true;
+ *zero = true;
+ }
+ base_node_dealloc(node);
+ }
+ if (zeroed == false && *zero) {
+ VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
+ memset(ret, 0, size);
+ }
+ return (ret);
+}