2 * Copyright (c) 2010-2011 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #include "objc-private.h"
26 #include "objc-weak.h"
30 #include <sys/types.h>
31 #include <libkern/OSAtomic.h>
33 #define TABLE_SIZE(entry) (entry->mask ? entry->mask + 1 : 0)
35 static void append_referrer(weak_entry_t *entry, id *new_referrer);
38 * Unique hash function for object pointers only.
40 * @param key The object pointer
42 * @return Size unrestricted hash of pointer.
44 static inline uintptr_t hash_pointer(id key) {
45 uintptr_t k = (uintptr_t)key;
46 return (k >> 4) ^ (k >> 9);
50 * Unique hash function for weak object pointers only.
52 * @param key The weak object pointer.
54 * @return Size unrestricted hash of pointer.
56 static inline uintptr_t w_hash_pointer(id *key) {
57 uintptr_t k = (uintptr_t)key;
58 return (sizeof(size_t) == 8) ? (k >> 3) : (k >> 2);
62 * Grow the entry's hash table of referrers. Rehashes each
65 * @param entry Weak pointer hash set for a particular object.
67 __attribute__((noinline, used))
68 static void grow_refs_and_insert(weak_entry_t *entry, id *new_referrer)
70 assert(entry->out_of_line);
72 size_t old_size = TABLE_SIZE(entry);
73 size_t new_size = old_size ? old_size * 2 : 8;
75 size_t num_refs = entry->num_refs;
76 weak_referrer_t *old_refs = entry->referrers;
77 entry->mask = new_size - 1;
79 entry->referrers = (weak_referrer_t *)
80 _calloc_internal(TABLE_SIZE(entry), sizeof(weak_referrer_t));
82 entry->max_hash_displacement = 0;
84 for (size_t i = 0; i < old_size && num_refs > 0; i++) {
85 if (old_refs[i] != nil) {
86 append_referrer(entry, old_refs[i]);
91 append_referrer(entry, new_referrer);
92 if (old_refs) _free_internal(old_refs);
96 * Add the given referrer to set of weak pointers in this entry.
97 * Does not perform duplicate checking (b/c weak pointers are never
98 * added to a set twice).
100 * @param entry The entry holding the set of weak pointers.
101 * @param new_referrer The new weak pointer to be added.
103 static void append_referrer(weak_entry_t *entry, id *new_referrer)
105 if (! entry->out_of_line) {
106 // Try to insert inline.
107 for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) {
108 if (entry->inline_referrers[i] == nil) {
109 entry->inline_referrers[i] = new_referrer;
114 // Couldn't insert inline. Allocate out of line.
115 weak_referrer_t *new_referrers = (weak_referrer_t *)
116 _calloc_internal(WEAK_INLINE_COUNT, sizeof(weak_referrer_t));
117 // This constructed table is invalid, but grow_refs_and_insert
118 // will fix it and rehash it.
119 for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) {
120 new_referrers[i] = entry->inline_referrers[i];
122 entry->referrers = new_referrers;
123 entry->num_refs = WEAK_INLINE_COUNT;
124 entry->out_of_line = 1;
125 entry->mask = WEAK_INLINE_COUNT-1;
126 entry->max_hash_displacement = 0;
129 assert(entry->out_of_line);
131 if (entry->num_refs >= TABLE_SIZE(entry) * 3/4) {
132 return grow_refs_and_insert(entry, new_referrer);
134 size_t index = w_hash_pointer(new_referrer) & (entry->mask);
135 size_t hash_displacement = 0;
136 while (entry->referrers[index] != NULL) {
137 index = (index+1) & entry->mask;
140 if (hash_displacement > entry->max_hash_displacement) {
141 entry->max_hash_displacement = hash_displacement;
143 weak_referrer_t &ref = entry->referrers[index];
149 * Remove old_referrer from set of referrers, if it's present.
150 * Does not remove duplicates, because duplicates should not exist.
152 * @todo this is slow if old_referrer is not present. But, is this ever the case?
154 * @param entry The entry holding the referrers.
155 * @param old_referrer The referrer to remove.
157 static void remove_referrer(weak_entry_t *entry, id *old_referrer)
159 if (! entry->out_of_line) {
160 for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) {
161 if (entry->inline_referrers[i] == old_referrer) {
162 entry->inline_referrers[i] = nil;
166 _objc_inform("attempted to remove unregistered weak referrer %p\n",
170 size_t index = w_hash_pointer(old_referrer) & (entry->mask);
171 size_t hash_displacement = 0;
172 while (entry->referrers[index] != old_referrer) {
173 index = (index+1) & entry->mask;
175 if (hash_displacement > entry->max_hash_displacement) {
176 _objc_inform("attempted to remove unregistered weak referrer %p\n",
181 entry->referrers[index] = nil;
186 * Add new_entry to the object's table of weak references.
187 * Does not check whether the referent is already in the table.
189 static void weak_entry_insert(weak_table_t *weak_table, weak_entry_t *new_entry)
191 weak_entry_t *weak_entries = weak_table->weak_entries;
192 assert(weak_entries != nil);
194 size_t index = hash_pointer(new_entry->referent) & (weak_table->mask);
195 size_t hash_displacement = 0;
196 while (weak_entries[index].referent != nil) {
197 index = (index+1) & weak_table->mask;
201 weak_entries[index] = *new_entry;
202 weak_table->num_entries++;
204 if (hash_displacement > weak_table->max_hash_displacement) {
205 weak_table->max_hash_displacement = hash_displacement;
210 static void weak_resize(weak_table_t *weak_table, size_t new_size)
212 size_t old_size = TABLE_SIZE(weak_table);
214 weak_entry_t *old_entries = weak_table->weak_entries;
215 weak_entry_t *new_entries = (weak_entry_t *)
216 _calloc_internal(new_size, sizeof(weak_entry_t));
218 weak_table->mask = new_size - 1;
219 weak_table->weak_entries = new_entries;
220 weak_table->max_hash_displacement = 0;
221 weak_table->num_entries = 0; // restored by weak_entry_insert below
225 weak_entry_t *end = old_entries + old_size;
226 for (entry = old_entries; entry < end; entry++) {
227 if (entry->referent) {
228 weak_entry_insert(weak_table, entry);
231 _free_internal(old_entries);
235 // Grow the given zone's table of weak references if it is full.
236 static void weak_grow_maybe(weak_table_t *weak_table)
238 size_t old_size = TABLE_SIZE(weak_table);
240 // Grow if at least 3/4 full.
241 if (weak_table->num_entries >= old_size * 3 / 4) {
242 weak_resize(weak_table, old_size ? old_size*2 : 64);
246 // Shrink the table if it is mostly empty.
247 static void weak_compact_maybe(weak_table_t *weak_table)
249 size_t old_size = TABLE_SIZE(weak_table);
251 // Shrink if larger than 1024 buckets and at most 1/16 full.
252 if (old_size >= 1024 && old_size / 16 >= weak_table->num_entries) {
253 weak_resize(weak_table, old_size / 8);
254 // leaves new table no more than 1/2 full
260 * Remove entry from the zone's table of weak references.
262 static void weak_entry_remove(weak_table_t *weak_table, weak_entry_t *entry)
265 if (entry->out_of_line) _free_internal(entry->referrers);
266 bzero(entry, sizeof(*entry));
268 weak_table->num_entries--;
270 weak_compact_maybe(weak_table);
275 * Return the weak reference table entry for the given referent.
276 * If there is no entry for referent, return NULL.
280 * @param referent The object. Must not be nil.
282 * @return The table of weak referrers to this object.
284 static weak_entry_t *weak_entry_for_referent(weak_table_t *weak_table, id referent)
288 weak_entry_t *weak_entries = weak_table->weak_entries;
290 if (!weak_entries) return nil;
292 size_t index = hash_pointer(referent) & weak_table->mask;
293 size_t hash_displacement = 0;
294 while (weak_table->weak_entries[index].referent != referent) {
295 index = (index+1) & weak_table->mask;
297 if (hash_displacement > weak_table->max_hash_displacement) {
302 return &weak_table->weak_entries[index];
306 * Unregister an already-registered weak reference.
307 * This is used when referrer's storage is about to go away, but referent
308 * isn't dead yet. (Otherwise, zeroing referrer later would be a
309 * bad memory access.)
310 * Does nothing if referent/referrer is not a currently active weak reference.
311 * Does not zero referrer.
313 * FIXME currently requires old referent value to be passed in (lame)
314 * FIXME unregistration should be automatic if referrer is collected
316 * @param weak_table The global weak table.
317 * @param referent The object.
318 * @param referrer The weak reference.
321 weak_unregister_no_lock(weak_table_t *weak_table, id referent, id *referrer)
325 if (!referent) return;
327 if ((entry = weak_entry_for_referent(weak_table, referent))) {
328 remove_referrer(entry, referrer);
330 if (entry->out_of_line && entry->num_refs != 0) {
334 for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) {
335 if (entry->inline_referrers[i]) {
343 weak_entry_remove(weak_table, entry);
347 // Do not set *referrer = nil. objc_storeWeak() requires that the
352 * Registers a new (object, weak pointer) pair. Creates a new weak
353 * object entry if it does not exist.
355 * @param weak_table The global weak table.
356 * @param referent The object pointed to by the weak reference.
357 * @param referrer The weak pointer address.
360 weak_register_no_lock(weak_table_t *weak_table, id referent, id *referrer)
362 if (referent && !referent->isTaggedPointer()) {
363 // ensure that the referenced object is viable
364 BOOL (*allowsWeakReference)(id, SEL) = (BOOL(*)(id, SEL))
365 object_getMethodImplementation(referent,
366 @selector(allowsWeakReference));
367 if ((IMP)allowsWeakReference != _objc_msgForward) {
368 if (! (*allowsWeakReference)(referent, @selector(allowsWeakReference))) {
369 _objc_fatal("Cannot form weak reference to instance (%p) of class %s. It is possible that this object was over-released, or is in the process of deallocation.", (void*)referent, object_getClassName(referent));
375 // now remember it and where it is being stored
377 if ((entry = weak_entry_for_referent(weak_table, referent))) {
378 append_referrer(entry, referrer);
381 weak_entry_t new_entry;
382 new_entry.referent = referent;
383 new_entry.out_of_line = 0;
384 new_entry.inline_referrers[0] = referrer;
385 for (size_t i = 1; i < WEAK_INLINE_COUNT; i++) {
386 new_entry.inline_referrers[i] = nil;
389 weak_grow_maybe(weak_table);
390 weak_entry_insert(weak_table, &new_entry);
394 // Do not set *referrer. objc_storeWeak() requires that the
401 * Called by dealloc; nils out all weak pointers that point to the
402 * provided object so that they can no longer be used.
405 * @param referent The object being deallocated.
408 weak_clear_no_lock(weak_table_t *weak_table, id referent)
410 weak_entry_t *entry = weak_entry_for_referent(weak_table, referent);
412 /// XXX shouldn't happen, but does with mismatched CF/objc
413 //printf("XXX no entry for clear deallocating %p\n", referent);
417 // zero out references
418 weak_referrer_t *referrers;
421 if (entry->out_of_line) {
422 referrers = entry->referrers;
423 count = TABLE_SIZE(entry);
426 referrers = entry->inline_referrers;
427 count = WEAK_INLINE_COUNT;
430 for (size_t i = 0; i < count; ++i) {
431 id *referrer = referrers[i];
433 if (*referrer == referent) {
436 else if (*referrer) {
437 _objc_inform("__weak variable @ %p holds %p instead of %p\n", referrer, (void*)*referrer, (void*)referent);
442 weak_entry_remove(weak_table, entry);
447 * This function gets called when the value of a weak pointer is being
448 * used in an expression. Called by objc_loadWeakRetained() which is
449 * ultimately called by objc_loadWeak(). The objective is to assert that
450 * there is in fact a weak pointer(s) entry for this particular object being
451 * stored in the weak-table, and to retain that object so it is not deallocated
452 * during the weak pointer's usage.
455 * @param referrer The weak pointer address.
458 weak_read_no_lock(weak_table_t *weak_table, id *referrer)
460 id referent = *referrer;
461 if (referent->isTaggedPointer()) return referent;
464 if (referent == nil || !(entry = weak_entry_for_referent(weak_table, referent))) {
469 BOOL (*tryRetain)(id, SEL) = (BOOL(*)(id, SEL))
470 object_getMethodImplementation(referent,
471 @selector(retainWeakReference));
472 if ((IMP)tryRetain == _objc_msgForward) {
476 if (! (*tryRetain)(referent, @selector(retainWeakReference))) {