]> git.saurik.com Git - apple/cf.git/blob - CFArray.c
CF-550.tar.gz
[apple/cf.git] / CFArray.c
1 /*
2 * Copyright (c) 2009 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /* CFArray.c
24 Copyright (c) 1998-2009, Apple Inc. All rights reserved.
25 Responsibility: Christopher Kane
26 */
27
28 #include <CoreFoundation/CFArray.h>
29 #include "CFStorage.h"
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
32 #include <string.h>
33 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED
34 #include <libkern/OSAtomic.h>
35 #endif
36
37 __private_extern__ void _CFStorageSetWeak(CFStorageRef storage);
38
39 const CFArrayCallBacks kCFTypeArrayCallBacks = {0, __CFTypeCollectionRetain, __CFTypeCollectionRelease, CFCopyDescription, CFEqual};
40 static const CFArrayCallBacks __kCFNullArrayCallBacks = {0, NULL, NULL, NULL, NULL};
41
42 struct __CFArrayBucket {
43 const void *_item;
44 };
45
46 enum {
47 __CF_MAX_BUCKETS_PER_DEQUE = 262140
48 };
49
50 CF_INLINE CFIndex __CFArrayDequeRoundUpCapacity(CFIndex capacity) {
51 if (capacity < 4) return 4;
52 return __CFMin((1 << flsl(capacity)), __CF_MAX_BUCKETS_PER_DEQUE);
53 }
54
55 struct __CFArrayDeque {
56 uint32_t _leftIdx;
57 uint32_t _capacity;
58 int32_t _bias;
59 #if __LP64__
60 uint32_t _pad; // GC: pointers must be 8-byte aligned for the collector to find them.
61 #endif
62 /* struct __CFArrayBucket buckets follow here */
63 };
64
65 struct __CFArray {
66 CFRuntimeBase _base;
67 CFIndex _count; /* number of objects */
68 CFIndex _mutations;
69 int32_t _mutInProgress;
70 void *_store; /* can be NULL when MutableDeque */
71 };
72
73 /* Flag bits */
74 enum { /* Bits 0-1 */
75 __kCFArrayImmutable = 0,
76 __kCFArrayDeque = 2,
77 __kCFArrayStorage = 3
78 };
79
80 enum { /* Bits 2-3 */
81 __kCFArrayHasNullCallBacks = 0,
82 __kCFArrayHasCFTypeCallBacks = 1,
83 __kCFArrayHasCustomCallBacks = 3 /* callbacks are at end of header */
84 };
85
86 /*
87 Bits 4 & 5 are reserved for GC use.
88 Bit 4, if set, indicates that the array is weak.
89 Bit 5 marks whether finalization has occured and, thus, whether to continue to do special retain/release processing of elements.
90 */
91
92 CF_INLINE bool isStrongMemory(CFTypeRef collection) {
93 return __CFBitfieldGetValue(((const CFRuntimeBase *)collection)->_cfinfo[CF_INFO_BITS], 4, 4) == 0;
94 }
95
96 CF_INLINE bool isWeakMemory(CFTypeRef collection) {
97 return __CFBitfieldGetValue(((const CFRuntimeBase *)collection)->_cfinfo[CF_INFO_BITS], 4, 4) != 0;
98 }
99
100 CF_INLINE bool hasBeenFinalized(CFTypeRef collection) {
101 return __CFBitfieldGetValue(((const CFRuntimeBase *)collection)->_cfinfo[CF_INFO_BITS], 5, 5) != 0;
102 }
103
104 CF_INLINE void markFinalized(CFTypeRef collection) {
105 __CFBitfieldSetValue(((CFRuntimeBase *)collection)->_cfinfo[CF_INFO_BITS], 5, 5, 1);
106 }
107
108 CF_INLINE CFIndex __CFArrayGetType(CFArrayRef array) {
109 return __CFBitfieldGetValue(((const CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 1, 0);
110 }
111
112 CF_INLINE CFIndex __CFArrayGetSizeOfType(CFIndex t) {
113 CFIndex size = 0;
114 size += sizeof(struct __CFArray);
115 if (__CFBitfieldGetValue(t, 3, 2) == __kCFArrayHasCustomCallBacks) {
116 size += sizeof(CFArrayCallBacks);
117 }
118 return size;
119 }
120
121 CF_INLINE CFIndex __CFArrayGetCount(CFArrayRef array) {
122 return array->_count;
123 }
124
125 CF_INLINE void __CFArraySetCount(CFArrayRef array, CFIndex v) {
126 ((struct __CFArray *)array)->_count = v;
127 }
128
129 /* Only applies to immutable and mutable-deque-using arrays;
130 * Returns the bucket holding the left-most real value in the latter case. */
131 CF_INLINE struct __CFArrayBucket *__CFArrayGetBucketsPtr(CFArrayRef array) {
132 switch (__CFArrayGetType(array)) {
133 case __kCFArrayImmutable:
134 return (struct __CFArrayBucket *)((uint8_t *)array + __CFArrayGetSizeOfType(((CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS]));
135 case __kCFArrayDeque: {
136 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
137 return (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque) + deque->_leftIdx * sizeof(struct __CFArrayBucket));
138 }
139 }
140 return NULL;
141 }
142
143 /* This shouldn't be called if the array count is 0. */
144 CF_INLINE struct __CFArrayBucket *__CFArrayGetBucketAtIndex(CFArrayRef array, CFIndex idx) {
145 switch (__CFArrayGetType(array)) {
146 case __kCFArrayImmutable:
147 case __kCFArrayDeque:
148 return __CFArrayGetBucketsPtr(array) + idx;
149 case __kCFArrayStorage: {
150 CFStorageRef store = (CFStorageRef)array->_store;
151 return (struct __CFArrayBucket *)CFStorageGetValueAtIndex(store, idx, NULL);
152 }
153 }
154 return NULL;
155 }
156
157 CF_INLINE CFArrayCallBacks *__CFArrayGetCallBacks(CFArrayRef array) {
158 CFArrayCallBacks *result = NULL;
159 switch (__CFBitfieldGetValue(((const CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 3, 2)) {
160 case __kCFArrayHasNullCallBacks:
161 return (CFArrayCallBacks *)&__kCFNullArrayCallBacks;
162 case __kCFArrayHasCFTypeCallBacks:
163 return (CFArrayCallBacks *)&kCFTypeArrayCallBacks;
164 case __kCFArrayHasCustomCallBacks:
165 break;
166 }
167 switch (__CFArrayGetType(array)) {
168 case __kCFArrayImmutable:
169 result = (CFArrayCallBacks *)((uint8_t *)array + sizeof(struct __CFArray));
170 break;
171 case __kCFArrayDeque:
172 case __kCFArrayStorage:
173 result = (CFArrayCallBacks *)((uint8_t *)array + sizeof(struct __CFArray));
174 break;
175 }
176 return result;
177 }
178
179 CF_INLINE bool __CFArrayCallBacksMatchNull(const CFArrayCallBacks *c) {
180 return (NULL == c ||
181 (c->retain == __kCFNullArrayCallBacks.retain &&
182 c->release == __kCFNullArrayCallBacks.release &&
183 c->copyDescription == __kCFNullArrayCallBacks.copyDescription &&
184 c->equal == __kCFNullArrayCallBacks.equal));
185 }
186
187 CF_INLINE bool __CFArrayCallBacksMatchCFType(const CFArrayCallBacks *c) {
188 return (&kCFTypeArrayCallBacks == c ||
189 (c->retain == kCFTypeArrayCallBacks.retain &&
190 c->release == kCFTypeArrayCallBacks.release &&
191 c->copyDescription == kCFTypeArrayCallBacks.copyDescription &&
192 c->equal == kCFTypeArrayCallBacks.equal));
193 }
194
195 #if 0
196 #define CHECK_FOR_MUTATION(A) do { if ((A)->_mutInProgress) CFLog(3, CFSTR("*** %s: function called while the array (%p) is being mutated in this or another thread"), __PRETTY_FUNCTION__, (A)); } while (0)
197 #define BEGIN_MUTATION(A) do { OSAtomicAdd32Barrier(1, &((struct __CFArray *)(A))->_mutInProgress); } while (0)
198 #define END_MUTATION(A) do { OSAtomicAdd32Barrier(-1, &((struct __CFArray *)(A))->_mutInProgress); } while (0)
199 #else
200 #define CHECK_FOR_MUTATION(A) do { } while (0)
201 #define BEGIN_MUTATION(A) do { } while (0)
202 #define END_MUTATION(A) do { } while (0)
203 #endif
204
205 struct _releaseContext {
206 void (*release)(CFAllocatorRef, const void *);
207 CFAllocatorRef allocator;
208 };
209
210 static void __CFArrayStorageRelease(const void *itemptr, void *context) {
211 struct _releaseContext *rc = (struct _releaseContext *)context;
212 INVOKE_CALLBACK2(rc->release, rc->allocator, *(const void **)itemptr);
213 *(const void **)itemptr = NULL; // GC: clear item to break strong reference.
214 }
215
216 static void __CFArrayReleaseValues(CFArrayRef array, CFRange range, bool releaseStorageIfPossible) {
217 const CFArrayCallBacks *cb = __CFArrayGetCallBacks(array);
218 CFAllocatorRef allocator;
219 CFIndex idx;
220 switch (__CFArrayGetType(array)) {
221 case __kCFArrayImmutable:
222 if (NULL != cb->release && 0 < range.length && !hasBeenFinalized(array)) {
223 // if we've been finalized then we know that
224 // 1) we're using the standard callback on GC memory
225 // 2) the slots don't' need to be zeroed
226 struct __CFArrayBucket *buckets = __CFArrayGetBucketsPtr(array);
227 allocator = __CFGetAllocator(array);
228 for (idx = 0; idx < range.length; idx++) {
229 INVOKE_CALLBACK2(cb->release, allocator, buckets[idx + range.location]._item);
230 buckets[idx + range.location]._item = NULL; // GC: break strong reference.
231 }
232 }
233 break;
234 case __kCFArrayDeque: {
235 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
236 if (0 < range.length && NULL != deque && !hasBeenFinalized(array)) {
237 struct __CFArrayBucket *buckets = __CFArrayGetBucketsPtr(array);
238 if (NULL != cb->release) {
239 allocator = __CFGetAllocator(array);
240 for (idx = 0; idx < range.length; idx++) {
241 INVOKE_CALLBACK2(cb->release, allocator, buckets[idx + range.location]._item);
242 buckets[idx + range.location]._item = NULL; // GC: break strong reference.
243 }
244 } else {
245 for (idx = 0; idx < range.length; idx++) {
246 buckets[idx + range.location]._item = NULL; // GC: break strong reference.
247 }
248 }
249 }
250 if (releaseStorageIfPossible && 0 == range.location && __CFArrayGetCount(array) == range.length) {
251 allocator = __CFGetAllocator(array);
252 if (NULL != deque) _CFAllocatorDeallocateGC(allocator, deque);
253 __CFArraySetCount(array, 0); // GC: _count == 0 ==> _store == NULL.
254 ((struct __CFArray *)array)->_store = NULL;
255 }
256 break;
257 }
258 case __kCFArrayStorage: {
259 CFStorageRef store = (CFStorageRef)array->_store;
260 if (NULL != cb->release && 0 < range.length && !hasBeenFinalized(array)) {
261 struct _releaseContext context;
262 allocator = __CFGetAllocator(array);
263 context.release = cb->release;
264 context.allocator = allocator;
265 CFStorageApplyFunction(store, range, __CFArrayStorageRelease, &context);
266 }
267 if (releaseStorageIfPossible && 0 == range.location && __CFArrayGetCount(array) == range.length) {
268 _CFReleaseGC(store);
269 __CFArraySetCount(array, 0); // GC: _count == 0 ==> _store == NULL.
270 ((struct __CFArray *)array)->_store = NULL;
271 __CFBitfieldSetValue(((CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 1, 0, __kCFArrayDeque);
272 }
273 break;
274 }
275 }
276 }
277
278 #if defined(DEBUG)
279 CF_INLINE void __CFArrayValidateRange(CFArrayRef array, CFRange range, const char *func) {
280 CFAssert3(0 <= range.location && range.location <= CFArrayGetCount(array), __kCFLogAssertion, "%s(): range.location index (%d) out of bounds (0, %d)", func, range.location, CFArrayGetCount(array));
281 CFAssert2(0 <= range.length, __kCFLogAssertion, "%s(): range.length (%d) cannot be less than zero", func, range.length);
282 CFAssert3(range.location + range.length <= CFArrayGetCount(array), __kCFLogAssertion, "%s(): ending index (%d) out of bounds (0, %d)", func, range.location + range.length, CFArrayGetCount(array));
283 }
284 #else
285 #define __CFArrayValidateRange(a,r,f)
286 #endif
287
288 static Boolean __CFArrayEqual(CFTypeRef cf1, CFTypeRef cf2) {
289 CFArrayRef array1 = (CFArrayRef)cf1;
290 CFArrayRef array2 = (CFArrayRef)cf2;
291 const CFArrayCallBacks *cb1, *cb2;
292 CFIndex idx, cnt;
293 if (array1 == array2) return true;
294 cnt = __CFArrayGetCount(array1);
295 if (cnt != __CFArrayGetCount(array2)) return false;
296 cb1 = __CFArrayGetCallBacks(array1);
297 cb2 = __CFArrayGetCallBacks(array2);
298 if (cb1->equal != cb2->equal) return false;
299 if (0 == cnt) return true; /* after function comparison! */
300 for (idx = 0; idx < cnt; idx++) {
301 const void *val1 = __CFArrayGetBucketAtIndex(array1, idx)->_item;
302 const void *val2 = __CFArrayGetBucketAtIndex(array2, idx)->_item;
303 if (val1 != val2) {
304 if (NULL == cb1->equal) return false;
305 if (!INVOKE_CALLBACK2(cb1->equal, val1, val2)) return false;
306 }
307 }
308 return true;
309 }
310
311 static CFHashCode __CFArrayHash(CFTypeRef cf) {
312 CFArrayRef array = (CFArrayRef)cf;
313 return __CFArrayGetCount(array);
314 }
315
316 static CFStringRef __CFArrayCopyDescription(CFTypeRef cf) {
317 CFArrayRef array = (CFArrayRef)cf;
318 CFMutableStringRef result;
319 const CFArrayCallBacks *cb;
320 CFAllocatorRef allocator;
321 CFIndex idx, cnt;
322 cnt = __CFArrayGetCount(array);
323 allocator = CFGetAllocator(array);
324 result = CFStringCreateMutable(allocator, 0);
325 switch (__CFArrayGetType(array)) {
326 case __kCFArrayImmutable:
327 CFStringAppendFormat(result, NULL, CFSTR("<CFArray %p [%p]>{type = immutable, count = %u, values = (%s"), cf, allocator, cnt, cnt ? "\n" : "");
328 break;
329 case __kCFArrayDeque:
330 CFStringAppendFormat(result, NULL, CFSTR("<CFArray %p [%p]>{type = mutable-small, count = %u, values = (%s"), cf, allocator, cnt, cnt ? "\n" : "");
331 break;
332 case __kCFArrayStorage:
333 CFStringAppendFormat(result, NULL, CFSTR("<CFArray %p [%p]>{type = mutable-large, count = %u, values = (%s"), cf, allocator, cnt, cnt ? "\n" : "");
334 break;
335 }
336 cb = __CFArrayGetCallBacks(array);
337 for (idx = 0; idx < cnt; idx++) {
338 CFStringRef desc = NULL;
339 const void *val = __CFArrayGetBucketAtIndex(array, idx)->_item;
340 if (NULL != cb->copyDescription) {
341 desc = (CFStringRef)INVOKE_CALLBACK1(cb->copyDescription, val);
342 }
343 if (NULL != desc) {
344 CFStringAppendFormat(result, NULL, CFSTR("\t%u : %@\n"), idx, desc);
345 CFRelease(desc);
346 } else {
347 CFStringAppendFormat(result, NULL, CFSTR("\t%u : <%p>\n"), idx, val);
348 }
349 }
350 CFStringAppend(result, CFSTR(")}"));
351 return result;
352 }
353
354
355 static void __CFArrayDeallocate(CFTypeRef cf) {
356 CFArrayRef array = (CFArrayRef)cf;
357 BEGIN_MUTATION(array);
358 // Under GC, keep contents alive when we know we can, either standard callbacks or NULL
359 // if (__CFBitfieldGetValue(cf->info, 5, 4)) return; // bits only ever set under GC
360 CFAllocatorRef allocator = __CFGetAllocator(array);
361 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) {
362 // XXX_PCB keep array intact during finalization.
363 const CFArrayCallBacks *cb = __CFArrayGetCallBacks(array);
364 if (cb->retain == NULL && cb->release == NULL) {
365 END_MUTATION(array);
366 return;
367 }
368 if (cb == &kCFTypeArrayCallBacks || cb->release == kCFTypeArrayCallBacks.release) {
369 markFinalized(cf);
370 for (CFIndex idx = 0; idx < __CFArrayGetCount(array); idx++) {
371 const void *item = __CFArrayGetBucketAtIndex(array, 0 + idx)->_item;
372 kCFTypeArrayCallBacks.release(kCFAllocatorSystemDefault, item);
373 }
374 END_MUTATION(array);
375 return;
376 }
377 }
378 __CFArrayReleaseValues(array, CFRangeMake(0, __CFArrayGetCount(array)), true);
379 END_MUTATION(array);
380 }
381
382 static CFTypeID __kCFArrayTypeID = _kCFRuntimeNotATypeID;
383
384 static const CFRuntimeClass __CFArrayClass = {
385 _kCFRuntimeScannedObject,
386 "CFArray",
387 NULL, // init
388 NULL, // copy
389 __CFArrayDeallocate,
390 __CFArrayEqual,
391 __CFArrayHash,
392 NULL, //
393 __CFArrayCopyDescription
394 };
395
396 __private_extern__ void __CFArrayInitialize(void) {
397 __kCFArrayTypeID = _CFRuntimeRegisterClass(&__CFArrayClass);
398 }
399
400 CFTypeID CFArrayGetTypeID(void) {
401 return __kCFArrayTypeID;
402 }
403
404 static CFArrayRef __CFArrayInit(CFAllocatorRef allocator, UInt32 flags, CFIndex capacity, const CFArrayCallBacks *callBacks) {
405 struct __CFArray *memory;
406 UInt32 size;
407 __CFBitfieldSetValue(flags, 31, 2, 0);
408 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) {
409 if (!callBacks || (callBacks->retain == NULL && callBacks->release == NULL)) {
410 __CFBitfieldSetValue(flags, 4, 4, 1); // setWeak
411 }
412 }
413 if (__CFArrayCallBacksMatchNull(callBacks)) {
414 __CFBitfieldSetValue(flags, 3, 2, __kCFArrayHasNullCallBacks);
415 } else if (__CFArrayCallBacksMatchCFType(callBacks)) {
416 __CFBitfieldSetValue(flags, 3, 2, __kCFArrayHasCFTypeCallBacks);
417 } else {
418 __CFBitfieldSetValue(flags, 3, 2, __kCFArrayHasCustomCallBacks);
419 }
420 size = __CFArrayGetSizeOfType(flags) - sizeof(CFRuntimeBase);
421 switch (__CFBitfieldGetValue(flags, 1, 0)) {
422 case __kCFArrayImmutable:
423 size += capacity * sizeof(struct __CFArrayBucket);
424 break;
425 case __kCFArrayDeque:
426 case __kCFArrayStorage:
427 break;
428 }
429 memory = (struct __CFArray*)_CFRuntimeCreateInstance(allocator, __kCFArrayTypeID, size, NULL);
430 if (NULL == memory) {
431 return NULL;
432 }
433 __CFBitfieldSetValue(memory->_base._cfinfo[CF_INFO_BITS], 6, 0, flags);
434 __CFArraySetCount((CFArrayRef)memory, 0);
435 switch (__CFBitfieldGetValue(flags, 1, 0)) {
436 case __kCFArrayImmutable:
437 if (isWeakMemory(memory)) { // if weak, don't scan
438 auto_zone_set_unscanned(auto_zone(), memory);
439 }
440 if (__CFOASafe) __CFSetLastAllocationEventName(memory, "CFArray (immutable)");
441 break;
442 case __kCFArrayDeque:
443 case __kCFArrayStorage:
444 if (__CFOASafe) __CFSetLastAllocationEventName(memory, "CFArray (mutable-variable)");
445 ((struct __CFArray *)memory)->_mutations = 1;
446 ((struct __CFArray *)memory)->_mutInProgress = 0;
447 ((struct __CFArray*)memory)->_store = NULL;
448 break;
449 }
450 if (__kCFArrayHasCustomCallBacks == __CFBitfieldGetValue(flags, 3, 2)) {
451 CFArrayCallBacks *cb = (CFArrayCallBacks *)__CFArrayGetCallBacks((CFArrayRef)memory);
452 *cb = *callBacks;
453 FAULT_CALLBACK((void **)&(cb->retain));
454 FAULT_CALLBACK((void **)&(cb->release));
455 FAULT_CALLBACK((void **)&(cb->copyDescription));
456 FAULT_CALLBACK((void **)&(cb->equal));
457 }
458 return (CFArrayRef)memory;
459 }
460
461 CFArrayRef CFArrayCreate(CFAllocatorRef allocator, const void **values, CFIndex numValues, const CFArrayCallBacks *callBacks) {
462 CFArrayRef result;
463 const CFArrayCallBacks *cb;
464 struct __CFArrayBucket *buckets;
465 CFAllocatorRef bucketsAllocator;
466 void* bucketsBase;
467 CFIndex idx;
468 CFAssert2(0 <= numValues, __kCFLogAssertion, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__, numValues);
469 result = __CFArrayInit(allocator, __kCFArrayImmutable, numValues, callBacks);
470 cb = __CFArrayGetCallBacks(result);
471 buckets = __CFArrayGetBucketsPtr(result);
472 bucketsAllocator = isStrongMemory(result) ? allocator : kCFAllocatorNull;
473 bucketsBase = CF_IS_COLLECTABLE_ALLOCATOR(bucketsAllocator) ? (void *)auto_zone_base_pointer(auto_zone(), buckets) : NULL;
474 if (NULL != cb->retain) {
475 for (idx = 0; idx < numValues; idx++) {
476 __CFAssignWithWriteBarrier((void **)&buckets->_item, (void *)INVOKE_CALLBACK2(cb->retain, allocator, *values));
477 values++;
478 buckets++;
479 }
480 }
481 else {
482 for (idx = 0; idx < numValues; idx++) {
483 __CFAssignWithWriteBarrier((void **)&buckets->_item, (void *)*values);
484 values++;
485 buckets++;
486 }
487 }
488 __CFArraySetCount(result, numValues);
489 return result;
490 }
491
492 CFMutableArrayRef CFArrayCreateMutable(CFAllocatorRef allocator, CFIndex capacity, const CFArrayCallBacks *callBacks) {
493 CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity);
494 CFAssert2(capacity <= LONG_MAX / sizeof(void *), __kCFLogAssertion, "%s(): capacity (%d) is too large for this architecture", __PRETTY_FUNCTION__, capacity);
495 return (CFMutableArrayRef)__CFArrayInit(allocator, __kCFArrayDeque, capacity, callBacks);
496 }
497
498 CFArrayRef CFArrayCreateCopy(CFAllocatorRef allocator, CFArrayRef array) {
499 CFArrayRef result;
500 const CFArrayCallBacks *cb;
501 struct __CFArrayBucket *buckets;
502 CFAllocatorRef bucketsAllocator;
503 void* bucketsBase;
504 CFIndex numValues = CFArrayGetCount(array);
505 CFIndex idx;
506 if (CF_IS_OBJC(__kCFArrayTypeID, array)) {
507 cb = &kCFTypeArrayCallBacks;
508 } else {
509 cb = __CFArrayGetCallBacks(array);
510 }
511 result = __CFArrayInit(allocator, __kCFArrayImmutable, numValues, cb);
512 cb = __CFArrayGetCallBacks(result); // GC: use the new array's callbacks so we don't leak.
513 buckets = __CFArrayGetBucketsPtr(result);
514 bucketsAllocator = isStrongMemory(result) ? allocator : kCFAllocatorNull;
515 bucketsBase = CF_IS_COLLECTABLE_ALLOCATOR(bucketsAllocator) ? (void *)auto_zone_base_pointer(auto_zone(), buckets) : NULL;
516 for (idx = 0; idx < numValues; idx++) {
517 const void *value = CFArrayGetValueAtIndex(array, idx);
518 if (NULL != cb->retain) {
519 value = (void *)INVOKE_CALLBACK2(cb->retain, allocator, value);
520 }
521 __CFAssignWithWriteBarrier((void **)&buckets->_item, (void *)value);
522 buckets++;
523 }
524 __CFArraySetCount(result, numValues);
525 return result;
526 }
527
528 CFMutableArrayRef CFArrayCreateMutableCopy(CFAllocatorRef allocator, CFIndex capacity, CFArrayRef array) {
529 CFMutableArrayRef result;
530 const CFArrayCallBacks *cb;
531 CFIndex idx, numValues = CFArrayGetCount(array);
532 UInt32 flags;
533 if (CF_IS_OBJC(__kCFArrayTypeID, array)) {
534 cb = &kCFTypeArrayCallBacks;
535 }
536 else {
537 cb = __CFArrayGetCallBacks(array);
538 }
539 flags = __kCFArrayDeque;
540 result = (CFMutableArrayRef)__CFArrayInit(allocator, flags, capacity, cb);
541 if (0 == capacity) _CFArraySetCapacity(result, numValues);
542 for (idx = 0; idx < numValues; idx++) {
543 const void *value = CFArrayGetValueAtIndex(array, idx);
544 CFArrayAppendValue(result, value);
545 }
546 return result;
547 }
548
549 CFIndex CFArrayGetCount(CFArrayRef array) {
550 CF_OBJC_FUNCDISPATCH0(__kCFArrayTypeID, CFIndex, array, "count");
551 __CFGenericValidateType(array, __kCFArrayTypeID);
552 CHECK_FOR_MUTATION(array);
553 return __CFArrayGetCount(array);
554 }
555
556
557 CFIndex CFArrayGetCountOfValue(CFArrayRef array, CFRange range, const void *value) {
558 const CFArrayCallBacks *cb;
559 CFIndex idx, count = 0;
560 // CF: this ignores range
561 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID, CFIndex, array, "_cfcountOccurrences:", value);
562 __CFGenericValidateType(array, __kCFArrayTypeID);
563 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
564 CHECK_FOR_MUTATION(array);
565 cb = __CFArrayGetCallBacks(array);
566 for (idx = 0; idx < range.length; idx++) {
567 const void *item = __CFArrayGetBucketAtIndex(array, range.location + idx)->_item;
568 if (value == item || (cb->equal && INVOKE_CALLBACK2(cb->equal, value, item))) {
569 count++;
570 }
571 }
572 return count;
573 }
574
575 Boolean CFArrayContainsValue(CFArrayRef array, CFRange range, const void *value) {
576 CFIndex idx;
577 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, char, array, "containsObject:inRange:", value, range);
578 __CFGenericValidateType(array, __kCFArrayTypeID);
579 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
580 CHECK_FOR_MUTATION(array);
581 const CFArrayCallBacks *cb = __CFArrayGetCallBacks(array);
582 for (idx = 0; idx < range.length; idx++) {
583 const void *item = __CFArrayGetBucketAtIndex(array, range.location + idx)->_item;
584 if (value == item || (cb->equal && INVOKE_CALLBACK2(cb->equal, value, item))) {
585 return true;
586 }
587 }
588 return false;
589 }
590
591 const void *CFArrayGetValueAtIndex(CFArrayRef array, CFIndex idx) {
592 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID, void *, array, "objectAtIndex:", idx);
593 __CFGenericValidateType(array, __kCFArrayTypeID);
594 CFAssert2(0 <= idx && idx < __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__, idx);
595 CHECK_FOR_MUTATION(array);
596 return __CFArrayGetBucketAtIndex(array, idx)->_item;
597 }
598
599 // This is for use by NSCFArray; it avoids ObjC dispatch, and checks for out of bounds
600 const void *_CFArrayCheckAndGetValueAtIndex(CFArrayRef array, CFIndex idx) {
601 CHECK_FOR_MUTATION(array);
602 if (0 <= idx && idx < __CFArrayGetCount(array)) return __CFArrayGetBucketAtIndex(array, idx)->_item;
603 return (void *)(-1);
604 }
605
606
607 void CFArrayGetValues(CFArrayRef array, CFRange range, const void **values) {
608 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, void, array, "getObjects:range:", values, range);
609 __CFGenericValidateType(array, __kCFArrayTypeID);
610 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
611 CFAssert1(NULL != values, __kCFLogAssertion, "%s(): pointer to values may not be NULL", __PRETTY_FUNCTION__);
612 CHECK_FOR_MUTATION(array);
613 if (0 < range.length) {
614 switch (__CFArrayGetType(array)) {
615 case __kCFArrayImmutable:
616 case __kCFArrayDeque:
617 objc_memmove_collectable(values, __CFArrayGetBucketsPtr(array) + range.location, range.length * sizeof(struct __CFArrayBucket));
618 break;
619 case __kCFArrayStorage: {
620 CFStorageRef store = (CFStorageRef)array->_store;
621 CFStorageGetValues(store, range, values);
622 break;
623 }
624 }
625 }
626 }
627
628
629 CF_EXPORT unsigned long _CFArrayFastEnumeration(CFArrayRef array, struct __objcFastEnumerationStateEquivalent *state, void *stackbuffer, unsigned long count) {
630 CHECK_FOR_MUTATION(array);
631 if (array->_count == 0) return 0;
632 enum { ATSTART = 0, ATEND = 1 };
633 switch (__CFArrayGetType(array)) {
634 case __kCFArrayImmutable:
635 if (state->state == ATSTART) { /* first time */
636 static const unsigned long const_mu = 1;
637 state->state = ATEND;
638 state->mutationsPtr = (unsigned long *)&const_mu;
639 state->itemsPtr = (unsigned long *)__CFArrayGetBucketsPtr(array);
640 return array->_count;
641 }
642 return 0;
643 case __kCFArrayDeque:
644 if (state->state == ATSTART) { /* first time */
645 state->state = ATEND;
646 state->mutationsPtr = (unsigned long *)&array->_mutations;
647 state->itemsPtr = (unsigned long *)__CFArrayGetBucketsPtr(array);
648 return array->_count;
649 }
650 return 0;
651 case __kCFArrayStorage:
652 state->mutationsPtr = (unsigned long *)&array->_mutations;
653 return _CFStorageFastEnumeration((CFStorageRef)array->_store, state, stackbuffer, count);
654 }
655 return 0;
656 }
657
658
659 void CFArrayApplyFunction(CFArrayRef array, CFRange range, CFArrayApplierFunction applier, void *context) {
660 CFIndex idx;
661 FAULT_CALLBACK((void **)&(applier));
662 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, void, array, "_cfapply:context:", applier, context);
663 __CFGenericValidateType(array, __kCFArrayTypeID);
664 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
665 CFAssert1(NULL != applier, __kCFLogAssertion, "%s(): pointer to applier function may not be NULL", __PRETTY_FUNCTION__);
666 CHECK_FOR_MUTATION(array);
667 for (idx = 0; idx < range.length; idx++) {
668 const void *item = __CFArrayGetBucketAtIndex(array, range.location + idx)->_item;
669 INVOKE_CALLBACK2(applier, item, context);
670 }
671 }
672
673 CFIndex CFArrayGetFirstIndexOfValue(CFArrayRef array, CFRange range, const void *value) {
674 const CFArrayCallBacks *cb;
675 CFIndex idx;
676 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, CFIndex, array, "_cfindexOfObject:inRange:", value, range);
677 __CFGenericValidateType(array, __kCFArrayTypeID);
678 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
679 CHECK_FOR_MUTATION(array);
680 cb = __CFArrayGetCallBacks(array);
681 for (idx = 0; idx < range.length; idx++) {
682 const void *item = __CFArrayGetBucketAtIndex(array, range.location + idx)->_item;
683 if (value == item || (cb->equal && INVOKE_CALLBACK2(cb->equal, value, item)))
684 return idx + range.location;
685 }
686 return kCFNotFound;
687 }
688
689 CFIndex CFArrayGetLastIndexOfValue(CFArrayRef array, CFRange range, const void *value) {
690 const CFArrayCallBacks *cb;
691 CFIndex idx;
692 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, CFIndex, array, "_cflastIndexOfObject:inRange:", value, range);
693 __CFGenericValidateType(array, __kCFArrayTypeID);
694 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
695 CHECK_FOR_MUTATION(array);
696 cb = __CFArrayGetCallBacks(array);
697 for (idx = range.length; idx--;) {
698 const void *item = __CFArrayGetBucketAtIndex(array, range.location + idx)->_item;
699 if (value == item || (cb->equal && INVOKE_CALLBACK2(cb->equal, value, item)))
700 return idx + range.location;
701 }
702 return kCFNotFound;
703 }
704
705 void CFArrayAppendValue(CFMutableArrayRef array, const void *value) {
706 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID, void, array, "addObject:", value);
707 __CFGenericValidateType(array, __kCFArrayTypeID);
708 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
709 CHECK_FOR_MUTATION(array);
710 _CFArrayReplaceValues(array, CFRangeMake(__CFArrayGetCount(array), 0), &value, 1);
711 }
712
713 void CFArraySetValueAtIndex(CFMutableArrayRef array, CFIndex idx, const void *value) {
714 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, void, array, "setObject:atIndex:", value, idx);
715 __CFGenericValidateType(array, __kCFArrayTypeID);
716 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
717 CFAssert2(0 <= idx && idx <= __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__, idx);
718 CHECK_FOR_MUTATION(array);
719 if (idx == __CFArrayGetCount(array)) {
720 _CFArrayReplaceValues(array, CFRangeMake(idx, 0), &value, 1);
721 } else {
722 BEGIN_MUTATION(array);
723 const void *old_value;
724 const CFArrayCallBacks *cb = __CFArrayGetCallBacks(array);
725 CFAllocatorRef allocator = __CFGetAllocator(array);
726 struct __CFArrayBucket *bucket = __CFArrayGetBucketAtIndex(array, idx);
727 if (NULL != cb->retain && !hasBeenFinalized(array)) {
728 value = (void *)INVOKE_CALLBACK2(cb->retain, allocator, value);
729 }
730 old_value = bucket->_item;
731 __CFAssignWithWriteBarrier((void **)&bucket->_item, (void *)value); // GC: handles deque/CFStorage cases.
732 if (NULL != cb->release && !hasBeenFinalized(array)) {
733 INVOKE_CALLBACK2(cb->release, allocator, old_value);
734 }
735 array->_mutations++;
736 END_MUTATION(array);
737 }
738 }
739
740 void CFArrayInsertValueAtIndex(CFMutableArrayRef array, CFIndex idx, const void *value) {
741 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, void, array, "insertObject:atIndex:", value, idx);
742 __CFGenericValidateType(array, __kCFArrayTypeID);
743 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
744 CFAssert2(0 <= idx && idx <= __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__, idx);
745 CHECK_FOR_MUTATION(array);
746 _CFArrayReplaceValues(array, CFRangeMake(idx, 0), &value, 1);
747 }
748
749 void CFArrayExchangeValuesAtIndices(CFMutableArrayRef array, CFIndex idx1, CFIndex idx2) {
750 const void *tmp;
751 struct __CFArrayBucket *bucket1, *bucket2;
752 CFAllocatorRef bucketsAllocator;
753 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID, void, array, "_cfexchange::", idx1, idx2);
754 __CFGenericValidateType(array, __kCFArrayTypeID);
755 CFAssert2(0 <= idx1 && idx1 < __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index #1 (%d) out of bounds", __PRETTY_FUNCTION__, idx1);
756 CFAssert2(0 <= idx2 && idx2 < __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index #2 (%d) out of bounds", __PRETTY_FUNCTION__, idx2);
757 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
758 CHECK_FOR_MUTATION(array);
759 BEGIN_MUTATION(array);
760 bucket1 = __CFArrayGetBucketAtIndex(array, idx1);
761 bucket2 = __CFArrayGetBucketAtIndex(array, idx2);
762 tmp = bucket1->_item;
763 bucketsAllocator = isStrongMemory(array) ? __CFGetAllocator(array) : kCFAllocatorNull;
764 // XXX these aren't needed.
765 __CFAssignWithWriteBarrier((void **)&bucket1->_item, (void *)bucket2->_item);
766 __CFAssignWithWriteBarrier((void **)&bucket2->_item, (void *)tmp);
767 array->_mutations++;
768 END_MUTATION(array);
769 }
770
771 void CFArrayRemoveValueAtIndex(CFMutableArrayRef array, CFIndex idx) {
772 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID, void, array, "removeObjectAtIndex:", idx);
773 __CFGenericValidateType(array, __kCFArrayTypeID);
774 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
775 CFAssert2(0 <= idx && idx < __CFArrayGetCount(array), __kCFLogAssertion, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__, idx);
776 CHECK_FOR_MUTATION(array);
777 _CFArrayReplaceValues(array, CFRangeMake(idx, 1), NULL, 0);
778 }
779
780 void CFArrayRemoveAllValues(CFMutableArrayRef array) {
781 CF_OBJC_FUNCDISPATCH0(__kCFArrayTypeID, void, array, "removeAllObjects");
782 __CFGenericValidateType(array, __kCFArrayTypeID);
783 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
784 CHECK_FOR_MUTATION(array);
785 BEGIN_MUTATION(array);
786 __CFArrayReleaseValues(array, CFRangeMake(0, __CFArrayGetCount(array)), true);
787 __CFArraySetCount(array, 0);
788 array->_mutations++;
789 END_MUTATION(array);
790 }
791
792 static void __CFArrayConvertDequeToStore(CFMutableArrayRef array) {
793 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
794 struct __CFArrayBucket *raw_buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque));
795 CFStorageRef store;
796 CFIndex count = __CFArrayGetCount(array);
797 CFAllocatorRef allocator = __CFGetAllocator(array);
798 Boolean collectableMemory = CF_IS_COLLECTABLE_ALLOCATOR(allocator);
799 if (collectableMemory) auto_zone_retain(auto_zone(), deque);
800 store = CFStorageCreate(allocator, sizeof(const void *));
801 if (__CFOASafe) __CFSetLastAllocationEventName(store, "CFArray (store-storage)");
802 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)store);
803 CFMakeCollectable(store); // GC: now safe to unroot the store.
804 CFStorageInsertValues(store, CFRangeMake(0, count));
805 CFStorageReplaceValues(store, CFRangeMake(0, count), raw_buckets + deque->_leftIdx);
806 CFAllocatorDeallocate(__CFGetAllocator(array), deque);
807 __CFBitfieldSetValue(((CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 1, 0, __kCFArrayStorage);
808 }
809
810 static void __CFArrayConvertStoreToDeque(CFMutableArrayRef array) {
811 CFStorageRef store = (CFStorageRef)array->_store;
812 struct __CFArrayDeque *deque;
813 struct __CFArrayBucket *raw_buckets;
814 CFIndex count = CFStorageGetCount(store);// storage, not array, has correct count at this point
815 // do not resize down to a completely tight deque
816 CFIndex capacity = __CFArrayDequeRoundUpCapacity(count + 6);
817 CFIndex size = sizeof(struct __CFArrayDeque) + capacity * sizeof(struct __CFArrayBucket);
818 CFAllocatorRef allocator = __CFGetAllocator(array);
819 Boolean collectableMemory = CF_IS_COLLECTABLE_ALLOCATOR(allocator);
820 if (collectableMemory) CFRetain(store); // GC: need to root the CFStorage
821 deque = (struct __CFArrayDeque *)CFAllocatorAllocate(allocator, size, isStrongMemory(array) ? __kCFAllocatorGCScannedMemory : 0);
822 if (__CFOASafe) __CFSetLastAllocationEventName(deque, "CFArray (store-deque)");
823 deque->_leftIdx = (capacity - count) / 2;
824 deque->_capacity = capacity;
825 deque->_bias = 0;
826 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)deque);
827 if (collectableMemory) auto_zone_release(auto_zone(), deque);
828 raw_buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque));
829 CFStorageGetValues(store, CFRangeMake(0, count), raw_buckets + deque->_leftIdx);
830 CFRelease(store);
831 __CFBitfieldSetValue(((CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 1, 0, __kCFArrayDeque);
832 }
833
834 // may move deque storage, as it may need to grow deque
835 static void __CFArrayRepositionDequeRegions(CFMutableArrayRef array, CFRange range, CFIndex newCount) {
836 // newCount elements are going to replace the range, and the result will fit in the deque
837 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
838 struct __CFArrayBucket *buckets;
839 CFIndex cnt, futureCnt, numNewElems;
840 CFIndex L, A, B, C, R;
841
842 buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque));
843 cnt = __CFArrayGetCount(array);
844 futureCnt = cnt - range.length + newCount;
845
846 L = deque->_leftIdx; // length of region to left of deque
847 A = range.location; // length of region in deque to left of replaced range
848 B = range.length; // length of replaced range
849 C = cnt - B - A; // length of region in deque to right of replaced range
850 R = deque->_capacity - cnt - L; // length of region to right of deque
851 numNewElems = newCount - B;
852
853 CFIndex wiggle = deque->_capacity >> 17;
854 if (wiggle < 4) wiggle = 4;
855 if (deque->_capacity < (uint32_t)futureCnt || (cnt < futureCnt && L + R < wiggle)) {
856 // must be inserting or space is tight, reallocate and re-center everything
857 CFIndex capacity = __CFArrayDequeRoundUpCapacity(futureCnt + wiggle);
858 CFIndex size = sizeof(struct __CFArrayDeque) + capacity * sizeof(struct __CFArrayBucket);
859 CFAllocatorRef allocator = __CFGetAllocator(array);
860 struct __CFArrayDeque *newDeque = (struct __CFArrayDeque *)CFAllocatorAllocate(allocator, size, isStrongMemory(array) ? __kCFAllocatorGCScannedMemory : 0);
861 if (__CFOASafe) __CFSetLastAllocationEventName(newDeque, "CFArray (store-deque)");
862 struct __CFArrayBucket *newBuckets = (struct __CFArrayBucket *)((uint8_t *)newDeque + sizeof(struct __CFArrayDeque));
863 CFIndex oldL = L;
864 CFIndex newL = (capacity - futureCnt) / 2;
865 CFIndex oldC0 = oldL + A + B;
866 CFIndex newC0 = newL + A + newCount;
867 newDeque->_leftIdx = newL;
868 newDeque->_capacity = capacity;
869 newDeque->_bias = 0;
870 if (0 < A) objc_memmove_collectable(newBuckets + newL, buckets + oldL, A * sizeof(struct __CFArrayBucket));
871 if (0 < C) objc_memmove_collectable(newBuckets + newC0, buckets + oldC0, C * sizeof(struct __CFArrayBucket));
872 if (deque) _CFAllocatorDeallocateGC(allocator, deque);
873 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)newDeque);
874 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) auto_zone_release(auto_zone(), newDeque);
875 return;
876 }
877
878 if ((numNewElems < 0 && C < A) || (numNewElems <= R && C < A)) { // move C
879 // deleting: C is smaller
880 // inserting: C is smaller and R has room
881 CFIndex oldC0 = L + A + B;
882 CFIndex newC0 = L + A + newCount;
883 if (0 < C) objc_memmove_collectable(buckets + newC0, buckets + oldC0, C * sizeof(struct __CFArrayBucket));
884 // GrP GC: zero-out newly exposed space on the right, if any
885 if (oldC0 > newC0) memset(buckets + newC0 + C, 0, (oldC0 - newC0) * sizeof(struct __CFArrayBucket));
886 } else if ((numNewElems < 0) || (numNewElems <= L && A <= C)) { // move A
887 // deleting: A is smaller or equal (covers remaining delete cases)
888 // inserting: A is smaller and L has room
889 CFIndex oldL = L;
890 CFIndex newL = L - numNewElems;
891 deque->_leftIdx = newL;
892 if (0 < A) objc_memmove_collectable(buckets + newL, buckets + oldL, A * sizeof(struct __CFArrayBucket));
893 // GrP GC: zero-out newly exposed space on the left, if any
894 if (newL > oldL) memset(buckets + oldL, 0, (newL - oldL) * sizeof(struct __CFArrayBucket));
895 } else {
896 // now, must be inserting, and either:
897 // A<=C, but L doesn't have room (R might have, but don't care)
898 // C<A, but R doesn't have room (L might have, but don't care)
899 // re-center everything
900 CFIndex oldL = L;
901 CFIndex newL = (L + R - numNewElems) / 2;
902 CFIndex oldBias = deque->_bias;
903 deque->_bias = (newL < oldL) ? -1 : 1;
904 if (oldBias < 0) {
905 newL = newL - newL / 2;
906 } else if (0 < oldBias) {
907 newL = newL + newL / 2;
908 }
909 CFIndex oldC0 = oldL + A + B;
910 CFIndex newC0 = newL + A + newCount;
911 deque->_leftIdx = newL;
912 if (newL < oldL) {
913 if (0 < A) objc_memmove_collectable(buckets + newL, buckets + oldL, A * sizeof(struct __CFArrayBucket));
914 if (0 < C) objc_memmove_collectable(buckets + newC0, buckets + oldC0, C * sizeof(struct __CFArrayBucket));
915 // GrP GC: zero-out newly exposed space on the right, if any
916 if (oldC0 > newC0) memset(buckets + newC0 + C, 0, (oldC0 - newC0) * sizeof(struct __CFArrayBucket));
917 } else {
918 if (0 < C) objc_memmove_collectable(buckets + newC0, buckets + oldC0, C * sizeof(struct __CFArrayBucket));
919 if (0 < A) objc_memmove_collectable(buckets + newL, buckets + oldL, A * sizeof(struct __CFArrayBucket));
920 // GrP GC: zero-out newly exposed space on the left, if any
921 if (newL > oldL) memset(buckets + oldL, 0, (newL - oldL) * sizeof(struct __CFArrayBucket));
922 }
923 }
924 }
925
926 static void __CFArrayHandleOutOfMemory(CFTypeRef obj, CFIndex numBytes) {
927 CFStringRef msg = CFStringCreateWithFormat(kCFAllocatorSystemDefault, NULL, CFSTR("Attempt to allocate %ld bytes for CFArray failed"), numBytes);
928 {
929 CFLog(kCFLogLevelCritical, CFSTR("%@"), msg);
930 HALT;
931 }
932 CFRelease(msg);
933 }
934
935 // This function is for Foundation's benefit; no one else should use it.
936 void _CFArraySetCapacity(CFMutableArrayRef array, CFIndex cap) {
937 if (CF_IS_OBJC(__kCFArrayTypeID, array)) return;
938 __CFGenericValidateType(array, __kCFArrayTypeID);
939 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
940 CFAssert3(__CFArrayGetCount(array) <= cap, __kCFLogAssertion, "%s(): desired capacity (%d) is less than count (%d)", __PRETTY_FUNCTION__, cap, __CFArrayGetCount(array));
941 CHECK_FOR_MUTATION(array);
942 BEGIN_MUTATION(array);
943 // Currently, attempting to set the capacity of an array which is the CFStorage
944 // variant, or set the capacity larger than __CF_MAX_BUCKETS_PER_DEQUE, has no
945 // effect. The primary purpose of this API is to help avoid a bunch of the
946 // resizes at the small capacities 4, 8, 16, etc.
947 if (__CFArrayGetType(array) == __kCFArrayDeque) {
948 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
949 CFIndex capacity = __CFArrayDequeRoundUpCapacity(cap);
950 CFIndex size = sizeof(struct __CFArrayDeque) + capacity * sizeof(struct __CFArrayBucket);
951 CFAllocatorRef allocator = __CFGetAllocator(array);
952 Boolean collectableMemory = CF_IS_COLLECTABLE_ALLOCATOR(allocator);
953 if (NULL == deque) {
954 deque = (struct __CFArrayDeque *)CFAllocatorAllocate(allocator, size, isStrongMemory(array) ? __kCFAllocatorGCScannedMemory : 0);
955 if (NULL == deque) __CFArrayHandleOutOfMemory(array, size);
956 if (__CFOASafe) __CFSetLastAllocationEventName(deque, "CFArray (store-deque)");
957 deque->_leftIdx = capacity / 2;
958 } else {
959 struct __CFArrayDeque *olddeque = deque;
960 CFIndex oldcap = deque->_capacity;
961 deque = (struct __CFArrayDeque *)CFAllocatorAllocate(allocator, size, isStrongMemory(array) ? __kCFAllocatorGCScannedMemory : 0);
962 if (NULL == deque) __CFArrayHandleOutOfMemory(array, size);
963 objc_memmove_collectable(deque, olddeque, sizeof(struct __CFArrayDeque) + oldcap * sizeof(struct __CFArrayBucket));
964 _CFAllocatorDeallocateGC(allocator, olddeque);
965 if (__CFOASafe) __CFSetLastAllocationEventName(deque, "CFArray (store-deque)");
966 }
967 deque->_capacity = capacity;
968 deque->_bias = 0;
969 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)deque);
970 if (collectableMemory) auto_zone_release(auto_zone(), deque);
971 }
972 END_MUTATION(array);
973 }
974
975
976 void CFArrayReplaceValues(CFMutableArrayRef array, CFRange range, const void **newValues, CFIndex newCount) {
977 CF_OBJC_FUNCDISPATCH3(__kCFArrayTypeID, void, array, "replaceObjectsInRange:withObjects:count:", range, (void **)newValues, newCount);
978 __CFGenericValidateType(array, __kCFArrayTypeID);
979 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
980 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
981 CFAssert2(0 <= newCount, __kCFLogAssertion, "%s(): newCount (%d) cannot be less than zero", __PRETTY_FUNCTION__, newCount);
982 CHECK_FOR_MUTATION(array);
983 return _CFArrayReplaceValues(array, range, newValues, newCount);
984 }
985
986 // This function does no ObjC dispatch or argument checking;
987 // It should only be called from places where that dispatch and check has already been done, or NSCFArray
988 void _CFArrayReplaceValues(CFMutableArrayRef array, CFRange range, const void **newValues, CFIndex newCount) {
989 CHECK_FOR_MUTATION(array);
990 BEGIN_MUTATION(array);
991 const CFArrayCallBacks *cb;
992 CFAllocatorRef allocator;
993 CFIndex idx, cnt, futureCnt;
994 const void **newv, *buffer[256];
995 cnt = __CFArrayGetCount(array);
996 futureCnt = cnt - range.length + newCount;
997 CFAssert1(newCount <= futureCnt, __kCFLogAssertion, "%s(): internal error 1", __PRETTY_FUNCTION__);
998 cb = __CFArrayGetCallBacks(array);
999 allocator = __CFGetAllocator(array);
1000
1001 /* Retain new values if needed, possibly allocating a temporary buffer for them */
1002 if (NULL != cb->retain && !hasBeenFinalized(array)) {
1003 newv = (newCount <= 256) ? (const void **)buffer : (const void **)CFAllocatorAllocate(allocator, newCount * sizeof(void *), 0); // GC OK
1004 if (newv != buffer && __CFOASafe) __CFSetLastAllocationEventName(newv, "CFArray (temp)");
1005 for (idx = 0; idx < newCount; idx++) {
1006 newv[idx] = (void *)INVOKE_CALLBACK2(cb->retain, allocator, (void *)newValues[idx]);
1007 }
1008 } else {
1009 newv = newValues;
1010 }
1011 array->_mutations++;
1012
1013 /* Now, there are three regions of interest, each of which may be empty:
1014 * A: the region from index 0 to one less than the range.location
1015 * B: the region of the range
1016 * C: the region from range.location + range.length to the end
1017 * Note that index 0 is not necessarily at the lowest-address edge
1018 * of the available storage. The values in region B need to get
1019 * released, and the values in regions A and C (depending) need
1020 * to get shifted if the number of new values is different from
1021 * the length of the range being replaced.
1022 */
1023 if (0 < range.length) {
1024 __CFArrayReleaseValues(array, range, false);
1025 }
1026 // region B elements are now "dead"
1027 if (__kCFArrayStorage == __CFArrayGetType(array)) {
1028 CFStorageRef store = (CFStorageRef)array->_store;
1029 // reposition regions A and C for new region B elements in gap
1030 if (range.length < newCount) {
1031 CFStorageInsertValues(store, CFRangeMake(range.location + range.length, newCount - range.length));
1032 } else if (newCount < range.length) {
1033 CFStorageDeleteValues(store, CFRangeMake(range.location + newCount, range.length - newCount));
1034 }
1035 if (futureCnt <= __CF_MAX_BUCKETS_PER_DEQUE / 2) {
1036 __CFArrayConvertStoreToDeque(array);
1037 }
1038 } else if (NULL == array->_store) {
1039 if (__CF_MAX_BUCKETS_PER_DEQUE <= futureCnt) {
1040 CFStorageRef store = CFStorageCreate(allocator, sizeof(const void *));
1041 if (! isStrongMemory(array)) _CFStorageSetWeak(store);
1042 if (__CFOASafe) __CFSetLastAllocationEventName(store, "CFArray (store-storage)");
1043 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)store);
1044 CFMakeCollectable(store);
1045 CFStorageInsertValues(store, CFRangeMake(0, newCount));
1046 __CFBitfieldSetValue(((CFRuntimeBase *)array)->_cfinfo[CF_INFO_BITS], 1, 0, __kCFArrayStorage);
1047 } else if (0 <= futureCnt) {
1048 struct __CFArrayDeque *deque;
1049 CFIndex capacity = __CFArrayDequeRoundUpCapacity(futureCnt);
1050 CFIndex size = sizeof(struct __CFArrayDeque) + capacity * sizeof(struct __CFArrayBucket);
1051 deque = (struct __CFArrayDeque *)CFAllocatorAllocate(allocator, size, isStrongMemory(array) ? __kCFAllocatorGCScannedMemory : 0);
1052 if (__CFOASafe) __CFSetLastAllocationEventName(deque, "CFArray (store-deque)");
1053 deque->_leftIdx = (capacity - newCount) / 2;
1054 deque->_capacity = capacity;
1055 deque->_bias = 0;
1056 __CFAssignWithWriteBarrier((void **)&array->_store, (void *)deque);
1057 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) auto_zone_release(auto_zone(), deque); // GC: now safe to unroot the array body.
1058 }
1059 } else { // Deque
1060 // reposition regions A and C for new region B elements in gap
1061 if (__CF_MAX_BUCKETS_PER_DEQUE <= futureCnt) {
1062 CFStorageRef store;
1063 __CFArrayConvertDequeToStore(array);
1064 store = (CFStorageRef)array->_store;
1065 if (range.length < newCount) {
1066 CFStorageInsertValues(store, CFRangeMake(range.location + range.length, newCount - range.length));
1067 } else if (newCount < range.length) { // this won't happen, but is here for completeness
1068 CFStorageDeleteValues(store, CFRangeMake(range.location + newCount, range.length - newCount));
1069 }
1070 } else if (range.length != newCount) {
1071 __CFArrayRepositionDequeRegions(array, range, newCount);
1072 }
1073 }
1074 // copy in new region B elements
1075 if (0 < newCount) {
1076 if (__kCFArrayStorage == __CFArrayGetType(array)) {
1077 CFStorageRef store = (CFStorageRef)array->_store;
1078 CFStorageReplaceValues(store, CFRangeMake(range.location, newCount), newv);
1079 } else { // Deque
1080 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
1081 struct __CFArrayBucket *raw_buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque));
1082 objc_memmove_collectable(raw_buckets + deque->_leftIdx + range.location, newv, newCount * sizeof(struct __CFArrayBucket));
1083 }
1084 }
1085 __CFArraySetCount(array, futureCnt);
1086 if (newv != buffer && newv != newValues) CFAllocatorDeallocate(allocator, newv);
1087 END_MUTATION(array);
1088 }
1089
1090 struct _acompareContext {
1091 CFComparatorFunction func;
1092 void *context;
1093 };
1094
1095 static CFComparisonResult __CFArrayCompareValues(const void *v1, const void *v2, struct _acompareContext *context) {
1096 const void **val1 = (const void **)v1;
1097 const void **val2 = (const void **)v2;
1098 return (CFComparisonResult)(INVOKE_CALLBACK3(context->func, *val1, *val2, context->context));
1099 }
1100
1101 void CFArraySortValues(CFMutableArrayRef array, CFRange range, CFComparatorFunction comparator, void *context) {
1102 FAULT_CALLBACK((void **)&(comparator));
1103 CF_OBJC_FUNCDISPATCH3(__kCFArrayTypeID, void, array, "sortUsingFunction:context:range:", comparator, context, range);
1104 __CFGenericValidateType(array, __kCFArrayTypeID);
1105 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
1106 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
1107 CFAssert1(NULL != comparator, __kCFLogAssertion, "%s(): pointer to comparator function may not be NULL", __PRETTY_FUNCTION__);
1108 CHECK_FOR_MUTATION(array);
1109 BEGIN_MUTATION(array);
1110 array->_mutations++;
1111
1112 if (1 < range.length) {
1113 struct _acompareContext ctx;
1114 struct __CFArrayBucket *bucket;
1115 ctx.func = comparator;
1116 ctx.context = context;
1117 switch (__CFArrayGetType(array)) {
1118 case __kCFArrayDeque:
1119 bucket = __CFArrayGetBucketsPtr(array) + range.location;
1120 CFQSortArray(bucket, range.length, sizeof(void *), (CFComparatorFunction)__CFArrayCompareValues, &ctx);
1121 break;
1122 case __kCFArrayStorage: {
1123 CFStorageRef store = (CFStorageRef)array->_store;
1124 const void **values, *buffer[256];
1125 values = (range.length <= 256) ? (const void **)buffer : (const void **)CFAllocatorAllocate(kCFAllocatorSystemDefault, range.length * sizeof(void *), 0); // GC OK
1126 if (values != buffer && __CFOASafe) __CFSetLastAllocationEventName(values, "CFArray (temp)");
1127 CFStorageGetValues(store, range, values);
1128 CFQSortArray(values, range.length, sizeof(void *), (CFComparatorFunction)__CFArrayCompareValues, &ctx);
1129 CFStorageReplaceValues(store, range, values);
1130 if (values != buffer) CFAllocatorDeallocate(kCFAllocatorSystemDefault, values); // GC OK
1131 break;
1132 }
1133 }
1134 }
1135 END_MUTATION(array);
1136 }
1137
1138 CFIndex CFArrayBSearchValues(CFArrayRef array, CFRange range, const void *value, CFComparatorFunction comparator, void *context) {
1139 __CFGenericValidateType(array, __kCFArrayTypeID);
1140 __CFArrayValidateRange(array, range, __PRETTY_FUNCTION__);
1141 CFAssert1(NULL != comparator, __kCFLogAssertion, "%s(): pointer to comparator function may not be NULL", __PRETTY_FUNCTION__);
1142 bool isObjC = CF_IS_OBJC(__kCFArrayTypeID, array);
1143 FAULT_CALLBACK((void **)&(comparator));
1144 if (!isObjC) CHECK_FOR_MUTATION(array);
1145 CFIndex idx = 0;
1146 if (range.length <= 0) return range.location;
1147 if (isObjC || __kCFArrayStorage == __CFArrayGetType(array)) {
1148 const void *item;
1149 SInt32 lg;
1150 item = CFArrayGetValueAtIndex(array, range.location + range.length - 1);
1151 if ((CFComparisonResult)(INVOKE_CALLBACK3(comparator, item, value, context)) < 0) {
1152 return range.location + range.length;
1153 }
1154 item = CFArrayGetValueAtIndex(array, range.location);
1155 if ((CFComparisonResult)(INVOKE_CALLBACK3(comparator, value, item, context)) < 0) {
1156 return range.location;
1157 }
1158 lg = flsl(range.length) - 1; // lg2(range.length)
1159 item = CFArrayGetValueAtIndex(array, range.location + -1 + (1 << lg));
1160 idx = range.location + ((CFComparisonResult)(INVOKE_CALLBACK3(comparator, item, value, context)) < 0) ? range.length - (1 << lg) : -1;
1161 while (lg--) {
1162 item = CFArrayGetValueAtIndex(array, range.location + idx + (1 << lg));
1163 if ((CFComparisonResult)(INVOKE_CALLBACK3(comparator, item, value, context)) < 0) {
1164 idx += (1 << lg);
1165 }
1166 }
1167 idx++;
1168 } else {
1169 struct _acompareContext ctx;
1170 ctx.func = comparator;
1171 ctx.context = context;
1172 idx = CFBSearch(&value, sizeof(void *), __CFArrayGetBucketsPtr(array) + range.location, range.length, (CFComparatorFunction)__CFArrayCompareValues, &ctx);
1173 }
1174 return idx + range.location;
1175 }
1176
1177 void CFArrayAppendArray(CFMutableArrayRef array, CFArrayRef otherArray, CFRange otherRange) {
1178 CFIndex idx;
1179 __CFGenericValidateType(array, __kCFArrayTypeID);
1180 __CFGenericValidateType(otherArray, __kCFArrayTypeID);
1181 CFAssert1(__CFArrayGetType(array) != __kCFArrayImmutable, __kCFLogAssertion, "%s(): array is immutable", __PRETTY_FUNCTION__);
1182 __CFArrayValidateRange(otherArray, otherRange, __PRETTY_FUNCTION__);
1183 CHECK_FOR_MUTATION(array);
1184 for (idx = otherRange.location; idx < otherRange.location + otherRange.length; idx++) {
1185 CFArrayAppendValue(array, CFArrayGetValueAtIndex(otherArray, idx));
1186 }
1187 }
1188
1189 // ----====---- ----====---- ----====---- ----====----
1190
1191 __private_extern__ Boolean __CFArray6130(CFMutableArrayRef array, CFIndex *p, void **list) {
1192 if (CF_IS_OBJC(__kCFArrayTypeID, array)) return false;
1193 CHECK_FOR_MUTATION(array);
1194 if (__kCFArrayStorage == __CFArrayGetType(array)) {
1195 CFStorageRef store = (CFStorageRef)array->_store;
1196 CFRange range = {0, 0};
1197 void *bytes = NULL;
1198 for (CFIndex idx = 0; idx < __CFArrayGetCount(array); idx++) {
1199 if (range.location + range.length - 1 < idx) {
1200 bytes = CFStorageGetValueAtIndex(store, idx, &range);
1201 }
1202 ((void **)bytes)[idx - range.location] = list[p[idx]];
1203 }
1204 } else if (kCFUseCollectableAllocator) { // Deque
1205 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
1206 struct __CFArrayBucket *raw_buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque)) + deque->_leftIdx;
1207 for (CFIndex idx = 0; idx < __CFArrayGetCount(array); idx++) {
1208 struct __CFArrayBucket *dest = raw_buckets + idx;
1209 objc_memmove_collectable(dest, list + p[idx], sizeof(struct __CFArrayBucket));
1210 }
1211 } else { // Deque
1212 struct __CFArrayDeque *deque = (struct __CFArrayDeque *)array->_store;
1213 struct __CFArrayBucket *raw_buckets = (struct __CFArrayBucket *)((uint8_t *)deque + sizeof(struct __CFArrayDeque)) + deque->_leftIdx;
1214 for (CFIndex idx = 0; idx < __CFArrayGetCount(array); idx++) {
1215 raw_buckets[idx]._item = list[p[idx]];
1216 }
1217 }
1218 return true;
1219 }
1220