2 * Copyright (c) 2007 Apple Inc. All Rights Reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
27 * "zone allocator" for objc.
29 * Provides packed allocation for data structures the runtime
33 #include "objc-private.h"
34 #include "objc-zalloc.h"
38 void *AtomicQueue::pop()
42 l1.pair = pair; // non atomic on purpose
45 if (l1.head == nullptr) {
48 l2.head = l1.head->next;
50 } while (!atomic_pair.compare_exchange_weak(l1.pair, l2.pair, relaxed, relaxed));
52 return reinterpret_cast<void *>(l1.head);
55 void AtomicQueue::push_list(void *_head, void *_tail)
57 Entry *head = reinterpret_cast<Entry *>(_head);
58 Entry *tail = reinterpret_cast<Entry *>(_tail);
61 l1.pair = pair; // non atomic load on purpose
66 } while (!atomic_pair.compare_exchange_weak(l1.pair, l2.pair, release, relaxed));
73 return b == 0 ? a : gcd(b, a % b);
77 AtomicQueue Zone<T, false>::_freelist;
80 T *Zone<T, false>::alloc_slow()
82 // our malloc aligns to 16 bytes and this code should be used for sizes
83 // small enough that this should always be an actual malloc bucket.
85 // The point of this code is *NOT* speed but optimal density
86 constexpr size_t n_elem = MALLOC_ALIGNMENT / gcd(sizeof(T), size_t{MALLOC_ALIGNMENT});
87 Element *slab = reinterpret_cast<Element *>(::calloc(n_elem, sizeof(T)));
88 for (size_t i = 1; i < n_elem - 1; i++) {
89 slab[i].next = &slab[i + 1];
91 _freelist.push_list(reinterpret_cast<void *>(&slab[1]),
92 reinterpret_cast<void *>(&slab[n_elem - 1]));
93 return reinterpret_cast<T *>(&slab[0]);
97 T *Zone<T, false>::alloc()
99 void *e = _freelist.pop();
101 __builtin_bzero(e, sizeof(void *));
102 return reinterpret_cast<T *>(e);
108 void Zone<T, false>::free(T *ptr)
111 Element *e = reinterpret_cast<Element *>(ptr);
112 __builtin_bzero(e->buf, sizeof(e->buf));
118 #define ZoneInstantiate(type) \
119 template class Zone<type, sizeof(type) % MALLOC_ALIGNMENT == 0>
121 ZoneInstantiate(class_rw_t);
122 ZoneInstantiate(class_rw_ext_t);