]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-weak.mm
objc4-493.9.tar.gz
[apple/objc4.git] / runtime / objc-weak.mm
1 /*
2 * Copyright (c) 2010-2011 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 #import "objc-weak.h"
25 #import "objc-os.h"
26 #import "objc-private.h"
27
28 #import <stdint.h>
29 #import <stdbool.h>
30 #import <sys/types.h>
31 #import <libkern/OSAtomic.h>
32
33
34 template <typename T> struct WeakAllocator {
35 typedef T value_type;
36 typedef value_type* pointer;
37 typedef const value_type *const_pointer;
38 typedef value_type& reference;
39 typedef const value_type& const_reference;
40 typedef size_t size_type;
41 typedef ptrdiff_t difference_type;
42
43 template <typename U> struct rebind { typedef WeakAllocator<U> other; };
44
45 template <typename U> WeakAllocator(const WeakAllocator<U>&) {}
46 WeakAllocator() {}
47 WeakAllocator(const WeakAllocator&) {}
48 ~WeakAllocator() {}
49
50 pointer address(reference x) const { return &x; }
51 const_pointer address(const_reference x) const {
52 return x;
53 }
54
55 pointer allocate(size_type n, const_pointer = 0) {
56 return static_cast<pointer>(::_malloc_internal(n * sizeof(T)));
57 }
58
59 void deallocate(pointer p, size_type) { ::_free_internal(p); }
60
61 size_type max_size() const {
62 return static_cast<size_type>(-1) / sizeof(T);
63 }
64
65 void construct(pointer p, const value_type& x) {
66 new(p) value_type(x);
67 }
68
69 void destroy(pointer p) { p->~value_type(); }
70
71 void operator=(const WeakAllocator&);
72
73 };
74
75 class Range {
76 private:
77 void *_address; // start of range
78 void *_end; // end of the range (one byte beyond last usable space)
79 public:
80 static void *displace(void *address, ptrdiff_t offset) { return (void *)((char *)address + offset); }
81
82 //
83 // Constructors
84 //
85 Range() : _address(NULL), _end(NULL) {}
86 Range(void *address) : _address(address), _end(address) {}
87 Range(void *address, void *end) : _address(address), _end(end) {}
88 Range(void *address, size_t size) : _address(address), _end(displace(address, size)) {}
89
90 //
91 // Accessors
92 //
93 inline Range& range() { return *this; }
94 inline void *address() const { return _address; }
95 inline void *end() const { return _end; }
96 inline const size_t size() const { return (uintptr_t)_end - (uintptr_t)_address; }
97 inline void set_address(void *address) { _address = address; }
98 inline void set_end(void *end) { _end = end; }
99 inline void set_size(size_t size) { _end = displace(_address, size); }
100 inline void set_range(void *address, void *end) { _address = address; _end = end; }
101 inline void set_range(void *address, size_t size) { _address = address; _end = displace(address, size); }
102 inline void set_range(Range range) { _address = range.address(); _end = range.end(); }
103 inline void adjust_address(intptr_t delta) { _address = displace(_address, delta); }
104 inline void adjust_end(intptr_t delta) { _end = displace(_end, delta); }
105 inline void adjust(intptr_t delta) { _address = displace(_address, delta), _end = displace(_end, delta); }
106
107
108 //
109 // is_empty
110 //
111 // Returns true if the range is empty.
112 //
113 inline bool is_empty() { return _address == _end; }
114
115
116 //
117 // in_range
118 //
119 // Returns true if the specified address is in range.
120 // This form reduces the number of branches. Works well with invariant lo and hi.
121 //
122 static inline const bool in_range(void *lo, void *hi, void *address) {
123 uintptr_t lo_as_int = (uintptr_t)lo;
124 uintptr_t hi_as_int = (uintptr_t)hi;
125 uintptr_t diff = hi_as_int - lo_as_int;
126 uintptr_t address_as_int = (uintptr_t)address;
127 return (address_as_int - lo_as_int) < diff;
128 }
129 inline const bool in_range(void *address) const { return in_range(_address, _end, address); }
130
131
132 //
133 // operator ==
134 //
135 // Used to locate entry in list or hash table (use is_range for exaxt match.)
136 inline const bool operator==(const Range *range) const { return _address == range->_address; }
137 inline const bool operator==(const Range &range) const { return _address == range._address; }
138
139
140 //
141 // is_range
142 //
143 // Return true if the ranges are equivalent.
144 //
145 inline const bool is_range(const Range& range) const { return _address == range._address && _end == range._end; }
146
147
148 //
149 // clear
150 //
151 // Initialize the range to zero.
152 //
153 inline void clear() { bzero(address(), size()); }
154
155 //
156 // expand_range
157 //
158 // Expand the bounds with the specified range.
159 //
160 inline void expand_range(void *address) {
161 if (_address > address) _address = address;
162 if (_end < address) _end = address;
163 }
164 inline void expand_range(Range& range) {
165 expand_range(range.address());
166 expand_range(range.end());
167 }
168
169
170 //
171 // relative_address
172 //
173 // Converts an absolute address to an address relative to this address.
174 //
175 inline void *relative_address(void *address) const { return (void *)((uintptr_t)address - (uintptr_t)_address); }
176
177
178 //
179 // absolute_address
180 //
181 // Converts an address relative to this address to an absolute address.
182 //
183 inline void *absolute_address(void *address) const { return (void *)((uintptr_t)address + (uintptr_t)_address); }
184 };
185
186
187 template<> struct WeakAllocator<void> {
188 typedef void value_type;
189 typedef void* pointer;
190 typedef const void *const_pointer;
191 template <typename U> struct rebind { typedef WeakAllocator<U> other; };
192 };
193
194 typedef std::pair<id, id *> WeakPair;
195 typedef std::vector<WeakPair, WeakAllocator<WeakPair> > WeakPairVector;
196 typedef std::vector<weak_referrer_t, WeakAllocator<WeakPair> > WeakReferrerVector;
197
198 static void append_referrer_no_lock(weak_referrer_array_t *list, id *new_referrer);
199
200 static inline uintptr_t hash_pointer(void *key) {
201 uintptr_t k = (uintptr_t)key;
202
203 // Code from CFSet.c
204 #if __LP64__
205 uintptr_t a = 0x4368726973746F70ULL;
206 uintptr_t b = 0x686572204B616E65ULL;
207 #else
208 uintptr_t a = 0x4B616E65UL;
209 uintptr_t b = 0x4B616E65UL;
210 #endif
211 uintptr_t c = 1;
212 a += k;
213 #if __LP64__
214 a -= b; a -= c; a ^= (c >> 43);
215 b -= c; b -= a; b ^= (a << 9);
216 c -= a; c -= b; c ^= (b >> 8);
217 a -= b; a -= c; a ^= (c >> 38);
218 b -= c; b -= a; b ^= (a << 23);
219 c -= a; c -= b; c ^= (b >> 5);
220 a -= b; a -= c; a ^= (c >> 35);
221 b -= c; b -= a; b ^= (a << 49);
222 c -= a; c -= b; c ^= (b >> 11);
223 a -= b; a -= c; a ^= (c >> 12);
224 b -= c; b -= a; b ^= (a << 18);
225 c -= a; c -= b; c ^= (b >> 22);
226 #else
227 a -= b; a -= c; a ^= (c >> 13);
228 b -= c; b -= a; b ^= (a << 8);
229 c -= a; c -= b; c ^= (b >> 13);
230 a -= b; a -= c; a ^= (c >> 12);
231 b -= c; b -= a; b ^= (a << 16);
232 c -= a; c -= b; c ^= (b >> 5);
233 a -= b; a -= c; a ^= (c >> 3);
234 b -= c; b -= a; b ^= (a << 10);
235 c -= a; c -= b; c ^= (b >> 15);
236 #endif
237 return c;
238 }
239
240 // Up until this size the weak referrer array grows one slot at a time. Above this size it grows by doubling.
241 #define WEAK_TABLE_DOUBLE_SIZE 8
242
243 // Grow the refs list. Rehashes the entries.
244 static void grow_refs(weak_referrer_array_t *list)
245 {
246 size_t old_num_allocated = list->num_allocated;
247 size_t num_refs = list->num_refs;
248 weak_referrer_t *old_refs = list->refs;
249 size_t new_allocated = old_num_allocated < WEAK_TABLE_DOUBLE_SIZE ? old_num_allocated + 1 : old_num_allocated + old_num_allocated;
250 list->refs = (weak_referrer_t *)_malloc_internal(new_allocated * sizeof(weak_referrer_t));
251 list->num_allocated = _malloc_size_internal(list->refs)/sizeof(weak_referrer_t);
252 bzero(list->refs, list->num_allocated * sizeof(weak_referrer_t));
253 // for larger tables drop one entry from the end to give an odd number of hash buckets for better hashing
254 if ((list->num_allocated > WEAK_TABLE_DOUBLE_SIZE) && !(list->num_allocated & 1)) list->num_allocated--;
255 list->num_refs = 0;
256 list->max_hash_displacement = 0;
257
258 size_t i;
259 for (i=0; i < old_num_allocated && num_refs > 0; i++) {
260 if (old_refs[i].referrer != NULL) {
261 append_referrer_no_lock(list, old_refs[i].referrer);
262 num_refs--;
263 }
264 }
265 if (old_refs)
266 _free_internal(old_refs);
267 }
268
269 // Add the given referrer to list
270 // Does not perform duplicate checking.
271 static void append_referrer_no_lock(weak_referrer_array_t *list, id *new_referrer)
272 {
273 if ((list->num_refs == list->num_allocated) || ((list->num_refs >= WEAK_TABLE_DOUBLE_SIZE) && (list->num_refs >= list->num_allocated * 2 / 3))) {
274 grow_refs(list);
275 }
276 size_t index = hash_pointer(new_referrer) % list->num_allocated, hash_displacement = 0;
277 while (list->refs[index].referrer != NULL) {
278 index++;
279 hash_displacement++;
280 if (index == list->num_allocated)
281 index = 0;
282 }
283 if (list->max_hash_displacement < hash_displacement) {
284 list->max_hash_displacement = hash_displacement;
285 //malloc_printf("max_hash_displacement: %d allocated: %d\n", list->max_hash_displacement, list->num_allocated);
286 }
287 weak_referrer_t &ref = list->refs[index];
288 ref.referrer = new_referrer;
289 list->num_refs++;
290 }
291
292
293 // Remove old_referrer from list, if it's present.
294 // Does not remove duplicates.
295 // fixme this is slow if old_referrer is not present.
296 static void remove_referrer_no_lock(weak_referrer_array_t *list, id *old_referrer)
297 {
298 size_t index = hash_pointer(old_referrer) % list->num_allocated;
299 size_t start_index = index, hash_displacement = 0;
300 while (list->refs[index].referrer != old_referrer) {
301 index++;
302 hash_displacement++;
303 if (index == list->num_allocated)
304 index = 0;
305 if (index == start_index || hash_displacement > list->max_hash_displacement) {
306 malloc_printf("attempted to remove unregistered weak referrer %p\n", old_referrer);
307 return;
308 }
309 }
310 list->refs[index].referrer = NULL;
311 list->num_refs--;
312 }
313
314
315 // Add new_entry to the zone's table of weak references.
316 // Does not check whether the referent is already in the table.
317 // Does not update num_weak_refs.
318 static void weak_entry_insert_no_lock(weak_table_t *weak_table, weak_entry_t *new_entry)
319 {
320 weak_entry_t *weak_entries = weak_table->weak_entries;
321 assert(weak_entries != NULL);
322
323 size_t table_size = weak_table->max_weak_refs;
324 size_t hash_index = hash_pointer(new_entry->referent) % table_size;
325 size_t index = hash_index;
326
327 do {
328 weak_entry_t *entry = weak_entries + index;
329 if (entry->referent == NULL) {
330 *entry = *new_entry;
331 return;
332 }
333 index++; if (index == table_size) index = 0;
334 } while (index != hash_index);
335 malloc_printf("no room for new entry in auto weak ref table!\n");
336 }
337
338
339 // Remove entry from the zone's table of weak references, and rehash
340 // Does not update num_weak_refs.
341 static void weak_entry_remove_no_lock(weak_table_t *weak_table, weak_entry_t *entry)
342 {
343 // remove entry
344 entry->referent = NULL;
345 if (entry->referrers.refs) _free_internal(entry->referrers.refs);
346 entry->referrers.refs = NULL;
347 entry->referrers.num_refs = 0;
348 entry->referrers.num_allocated = 0;
349
350 // rehash after entry
351 weak_entry_t *weak_entries = weak_table->weak_entries;
352 size_t table_size = weak_table->max_weak_refs;
353 size_t hash_index = entry - weak_entries;
354 size_t index = hash_index;
355
356 if (!weak_entries) return;
357
358 do {
359 index++; if (index == table_size) index = 0;
360 if (!weak_entries[index].referent) return;
361 weak_entry_t entry = weak_entries[index];
362 weak_entries[index].referent = NULL;
363 weak_entry_insert_no_lock(weak_table, &entry);
364 } while (index != hash_index);
365 }
366
367
368 // Grow the given zone's table of weak references if it is full.
369 static void weak_grow_maybe_no_lock(weak_table_t *weak_table)
370 {
371 if (weak_table->num_weak_refs >= weak_table->max_weak_refs * 3 / 4) {
372 // grow table
373 size_t old_max = weak_table->max_weak_refs;
374 size_t new_max = old_max ? old_max * 2 + 1 : 15;
375 weak_entry_t *old_entries = weak_table->weak_entries;
376 weak_entry_t *new_entries = (weak_entry_t *)_calloc_internal(new_max, sizeof(weak_entry_t));
377 weak_table->max_weak_refs = new_max;
378 weak_table->weak_entries = new_entries;
379
380 if (old_entries) {
381 weak_entry_t *entry;
382 weak_entry_t *end = old_entries + old_max;
383 for (entry = old_entries; entry < end; entry++) {
384 weak_entry_insert_no_lock(weak_table, entry);
385 }
386 _free_internal(old_entries);
387 }
388 }
389 }
390
391 // Return the weak reference table entry for the given referent.
392 // If there is no entry for referent, return NULL.
393 static weak_entry_t *weak_entry_for_referent(weak_table_t *weak_table, id referent)
394 {
395 weak_entry_t *weak_entries = weak_table->weak_entries;
396
397 if (!weak_entries) return NULL;
398
399 size_t table_size = weak_table->max_weak_refs;
400 size_t hash_index = hash_pointer(referent) % table_size;
401 size_t index = hash_index;
402
403 do {
404 weak_entry_t *entry = weak_entries + index;
405 if (entry->referent == referent) return entry;
406 if (entry->referent == NULL) return NULL;
407 index++; if (index == table_size) index = 0;
408 } while (index != hash_index);
409
410 return NULL;
411 }
412
413 // Unregister an already-registered weak reference.
414 // This is used when referrer's storage is about to go away, but referent
415 // isn't dead yet. (Otherwise, zeroing referrer later would be a
416 // bad memory access.)
417 // Does nothing if referent/referrer is not a currently active weak reference.
418 // Does not zero referrer.
419 // fixme currently requires old referent value to be passed in (lame)
420 // fixme unregistration should be automatic if referrer is collected
421 PRIVATE_EXTERN void weak_unregister_no_lock(weak_table_t *weak_table, id referent, id *referrer)
422 {
423 weak_entry_t *entry;
424
425 if ((entry = weak_entry_for_referent(weak_table, referent))) {
426 remove_referrer_no_lock(&entry->referrers, referrer);
427 if (entry->referrers.num_refs == 0) {
428 weak_entry_remove_no_lock(weak_table, entry);
429 weak_table->num_weak_refs--;
430 }
431 }
432
433 // Do not set *referrer = NULL. objc_storeWeak() requires that the
434 // value not change.
435 }
436
437 PRIVATE_EXTERN void
438 arr_clear_deallocating(weak_table_t *weak_table, id referent) {
439 {
440 weak_entry_t *entry = weak_entry_for_referent(weak_table, referent);
441 if (entry == NULL) {
442 /// XXX shouldn't happen, but does with mismatched CF/objc
443 //printf("XXX no entry for clear deallocating %p\n", referent);
444 return;
445 }
446 // zero out references
447 for (int i = 0; i < entry->referrers.num_allocated; ++i) {
448 id *referrer = entry->referrers.refs[i].referrer;
449 if (referrer) {
450 if (*referrer == referent) {
451 *referrer = nil;
452 }
453 else if (*referrer) {
454 _objc_inform("__weak variable @ %p holds %p instead of %p\n", referrer, *referrer, referent);
455 }
456 }
457 }
458
459 weak_entry_remove_no_lock(weak_table, entry);
460 weak_table->num_weak_refs--;
461 }
462 }
463
464
465 PRIVATE_EXTERN id weak_register_no_lock(weak_table_t *weak_table, id referent, id *referrer) {
466 if (referent) {
467 // ensure that the referenced object is viable
468 BOOL (*allowsWeakReference)(id, SEL) = (BOOL(*)(id, SEL))
469 class_getMethodImplementation(object_getClass(referent),
470 @selector(allowsWeakReference));
471 if ((IMP)allowsWeakReference != _objc_msgForward) {
472 if (! (*allowsWeakReference)(referent, @selector(allowsWeakReference))) {
473 _objc_fatal("cannot form weak reference to instance (%p) of class %s", referent, object_getClassName(referent));
474 }
475 }
476 else {
477 return NULL;
478 }
479 // now remember it and where it is being stored
480 weak_entry_t *entry;
481 if ((entry = weak_entry_for_referent(weak_table, referent))) {
482 append_referrer_no_lock(&entry->referrers, referrer);
483 }
484 else {
485 weak_entry_t new_entry;
486 new_entry.referent = referent;
487 new_entry.referrers.refs = NULL;
488 new_entry.referrers.num_refs = 0;
489 new_entry.referrers.num_allocated = 0;
490 append_referrer_no_lock(&new_entry.referrers, referrer);
491 weak_table->num_weak_refs++;
492 weak_grow_maybe_no_lock(weak_table);
493 weak_entry_insert_no_lock(weak_table, &new_entry);
494 }
495 }
496
497 // Do not set *referrer. objc_storeWeak() requires that the
498 // value not change.
499
500 return referent;
501 }
502
503
504 // Automated Retain Release (ARR) support
505
506 PRIVATE_EXTERN id
507 arr_read_weak_reference(weak_table_t *weak_table, id *referrer) {
508 id referent;
509 // find entry and mark that it needs retaining
510 {
511 referent = *referrer;
512 weak_entry_t *entry;
513 if (referent == NULL || !(entry = weak_entry_for_referent(weak_table, referent))) {
514 *referrer = NULL;
515 return NULL;
516 }
517 BOOL (*tryRetain)(id, SEL) = (BOOL(*)(id, SEL))
518 class_getMethodImplementation(object_getClass(referent),
519 @selector(retainWeakReference));
520 if ((IMP)tryRetain != _objc_msgForward) {
521 //printf("sending _tryRetain for %p\n", referent);
522 if (! (*tryRetain)(referent, @selector(retainWeakReference))) {
523 //printf("_tryRetain(%p) tried and failed!\n", referent);
524 return NULL;
525 }
526 //else printf("_tryRetain(%p) succeeded\n", referent);
527 }
528 else {
529 *referrer = NULL;
530 return NULL;
531 }
532 }
533 return referent;
534 }
535