]> git.saurik.com Git - apple/objc4.git/blob - runtime/Accessors.subproj/objc-accessors.m
objc4-371.tar.gz
[apple/objc4.git] / runtime / Accessors.subproj / objc-accessors.m
1 /*
2 * Copyright (c) 2006-2007 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 #import <string.h>
25 #import <stddef.h>
26
27 #import <libkern/OSAtomic.h>
28
29 #import "objc-accessors.h"
30 #import <objc/objc-auto.h>
31 #import <objc/runtime.h>
32 #import "../objc-private.h"
33
34 #import "/usr/local/include/auto_zone.h"
35
36 #import "objc-accessors-table.h"
37
38 // stub interface declarations to make compiler happy.
39
40 @interface __NSCopyable
41 - (id)copyWithZone:(void *)zone;
42 @end
43
44 @interface __NSRetained
45 - (id)retain;
46 - (oneway void)release;
47 - (id)autorelease;
48 @end
49
50 static /*inline*/ IMP optimized_getter_for_gc(id self, SEL name, ptrdiff_t offset) {
51 // replace this method with a faster version that does no message sends, and fewer tests.
52 IMP getter = GETPROPERTY_IMP(offset);
53 if (getter != NULL) {
54 // HACK ALERT: replaces the IMP in the cache!
55 Class cls = self->isa;
56 Method method = class_getInstanceMethod(cls, name);
57 if (method_getImplementation(method) != getter)
58 method_setImplementation(method, getter);
59 }
60 return getter;
61 }
62
63 static /*inline*/ IMP optimized_setter_for_gc(id self, SEL name, ptrdiff_t offset) {
64 // replace this method with a faster version that does no message sends.
65 IMP setter = SETPROPERTY_IMP(offset);
66 if (setter != NULL) {
67 // HACK ALERT: replaces the IMP in the cache!
68 Class cls = self->isa;
69 Method method = class_getInstanceMethod(cls, name);
70 if (method_getImplementation(method) != setter)
71 method_setImplementation(method, setter);
72 }
73 return setter;
74 }
75
76 // ATOMIC entry points
77
78 typedef uintptr_t spin_lock_t;
79 extern void _spin_lock(spin_lock_t *lockp);
80 extern int _spin_lock_try(spin_lock_t *lockp);
81 extern void _spin_unlock(spin_lock_t *lockp);
82
83 /* need to consider cache line contention - space locks out XXX */
84
85 #define GOODPOWER 7
86 #define GOODMASK ((1<<GOODPOWER)-1)
87 #define GOODHASH(x) (((long)x >> 5) & GOODMASK)
88 static spin_lock_t PropertyLocks[1 << GOODPOWER] = { 0 };
89
90 id objc_getProperty(id self, SEL _cmd, ptrdiff_t offset, BOOL atomic) {
91 if (UseGC) {
92 // FIXME: we could optimize getters when a class is first initialized, then KVO won't get confused.
93 if (false) {
94 IMP getter = optimized_getter_for_gc(self, _cmd, offset);
95 if (getter) return getter(self, _cmd);
96 }
97 return *(id*) ((char*)self + offset);
98 }
99
100 // Retain release world
101 id *slot = (id*) ((char*)self + offset);
102 if (!atomic) return *slot;
103
104 // Atomic retain release world
105 spin_lock_t *slotlock = &PropertyLocks[GOODHASH(slot)];
106 _spin_lock(slotlock);
107 id value = [*slot retain];
108 _spin_unlock(slotlock);
109
110 // for performance, we (safely) issue the autorelease OUTSIDE of the spinlock.
111 return [value autorelease];
112 }
113
114
115 void objc_setProperty(id self, SEL _cmd, ptrdiff_t offset, id newValue, BOOL atomic, BOOL shouldCopy) {
116 if (UseGC) {
117 if (shouldCopy) {
118 newValue = [newValue copyWithZone:NULL];
119 }
120 else if (false) {
121 IMP setter = optimized_setter_for_gc(self, _cmd, offset);
122 if (setter) {
123 setter(self, _cmd, newValue);
124 return;
125 }
126 }
127 objc_assign_ivar_internal(newValue, self, offset);
128 return;
129 }
130
131 // Retain release world
132 id oldValue, *slot = (id*) ((char*)self + offset);
133
134 // atomic or not, if slot would be unchanged, do nothing.
135 if (!shouldCopy && *slot == newValue) return;
136
137 newValue = (shouldCopy ? [newValue copyWithZone:NULL] : [newValue retain]);
138
139 if (!atomic) {
140 oldValue = *slot;
141 *slot = newValue;
142 } else {
143 spin_lock_t *slotlock = &PropertyLocks[GOODHASH(slot)];
144 _spin_lock(slotlock);
145 oldValue = *slot;
146 *slot = newValue;
147 _spin_unlock(slotlock);
148 }
149
150 [oldValue release];
151 }
152
153
154 __private_extern__ auto_zone_t *gc_zone;
155
156 // This entry point was designed wrong. When used as a getter, src needs to be locked so that
157 // if simultaneously used for a setter then there would be contention on src.
158 // So we need two locks - one of which will be contended.
159 void objc_copyStruct(void *dest, const void *src, ptrdiff_t size, BOOL atomic, BOOL hasStrong) {
160 static spin_lock_t StructLocks[1 << GOODPOWER] = { 0 };
161 spin_lock_t *lockfirst = NULL;
162 spin_lock_t *locksecond = NULL;
163 if (atomic) {
164 lockfirst = &StructLocks[GOODHASH(src)];
165 locksecond = &StructLocks[GOODHASH(dest)];
166 // order the locks by address so that we don't deadlock
167 if (lockfirst > locksecond) {
168 lockfirst = locksecond;
169 locksecond = &StructLocks[GOODHASH(src)];
170 }
171 else if (lockfirst == locksecond) {
172 // lucky - we only need one lock
173 locksecond = NULL;
174 }
175 _spin_lock(lockfirst);
176 if (locksecond) _spin_lock(locksecond);
177 }
178 if (UseGC && hasStrong) {
179 auto_zone_write_barrier_memmove(gc_zone, dest, src, size);
180 }
181 else {
182 memmove(dest, src, size);
183 }
184 if (atomic) {
185 _spin_unlock(lockfirst);
186 if (locksecond) _spin_unlock(locksecond);
187 }
188 }
189
190 // PRE-ATOMIC entry points
191
192 id <NSCopying> object_getProperty_bycopy(id self, SEL _cmd, ptrdiff_t offset) {
193 if (UseGC) {
194 IMP getter = optimized_getter_for_gc(self, _cmd, offset);
195 if (getter) return getter(self, _cmd);
196 }
197 id *slot = (id*) ((char*)self + offset);
198 return *slot;
199 }
200
201 void object_setProperty_bycopy(id self, SEL _cmd, id <NSCopying> value, ptrdiff_t offset) {
202 id *slot = (id*) ((char*)self + offset);
203 id oldValue = *slot;
204 objc_assign_ivar_internal([value copyWithZone:NULL], self, offset);
205 [oldValue release];
206 }
207
208 id object_getProperty_byref(id self, SEL _cmd, ptrdiff_t offset) {
209 if (UseGC) {
210 IMP getter = optimized_getter_for_gc(self, _cmd, offset);
211 if (getter) return getter(self, _cmd);
212 }
213 id *slot = (id*) ((char*)self + offset);
214 return *slot;
215 }
216
217 void object_setProperty_byref(id self, SEL _cmd, id value, ptrdiff_t offset) {
218 if (UseGC) {
219 IMP setter = optimized_setter_for_gc(self, _cmd, offset);
220 if (setter) {
221 setter(self, _cmd, value);
222 return;
223 }
224 }
225 id *slot = (id*) ((char*)self + offset);
226 id oldValue = *slot;
227 if (oldValue != value) {
228 objc_assign_ivar_internal([value retain], self, offset);
229 [oldValue release];
230 }
231 }