2 * Copyright (c) 2008 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 Copyright 1998-2002, Apple, Inc. All rights reserved.
25 Responsibility: Christopher Kane
28 #include <CoreFoundation/CFArray.h>
29 #include "CFStorage.h"
31 #include "CFInternal.h"
34 __private_extern__
void _CFStorageSetWeak(CFStorageRef storage
);
36 const CFArrayCallBacks kCFTypeArrayCallBacks
= {0, __CFTypeCollectionRetain
, __CFTypeCollectionRelease
, CFCopyDescription
, CFEqual
};
37 static const CFArrayCallBacks __kCFNullArrayCallBacks
= {0, NULL
, NULL
, NULL
, NULL
};
39 struct __CFArrayBucket
{
44 __CF_MAX_BUCKETS_PER_DEQUE
= 262140
47 CF_INLINE CFIndex
__CFArrayDequeRoundUpCapacity(CFIndex capacity
) {
48 if (capacity
< 4) return 4;
49 return __CFMin((1 << flsl(capacity
)), __CF_MAX_BUCKETS_PER_DEQUE
);
52 struct __CFArrayDeque
{
57 uint32_t _pad
; // GC: pointers must be 8-byte aligned for the collector to find them.
59 /* struct __CFArrayBucket buckets follow here */
64 CFIndex _count
; /* number of objects */
66 void *_store
; /* can be NULL when MutableDeque */
71 __kCFArrayImmutable
= 0,
77 __kCFArrayHasNullCallBacks
= 0,
78 __kCFArrayHasCFTypeCallBacks
= 1,
79 __kCFArrayHasCustomCallBacks
= 3 /* callbacks are at end of header */
83 Bits 4 & 5 are reserved for GC use.
84 Bit 4, if set, indicates that the array is weak.
85 Bit 5 marks whether finalization has occured and, thus, whether to continue to do special retain/release processing of elements.
88 CF_INLINE
bool isStrongMemory(CFTypeRef collection
) {
89 return __CFBitfieldGetValue(((const CFRuntimeBase
*)collection
)->_cfinfo
[CF_INFO_BITS
], 4, 4) == 0;
92 CF_INLINE
bool isWeakMemory(CFTypeRef collection
) {
93 return __CFBitfieldGetValue(((const CFRuntimeBase
*)collection
)->_cfinfo
[CF_INFO_BITS
], 4, 4) != 0;
96 CF_INLINE
bool hasBeenFinalized(CFTypeRef collection
) {
97 return __CFBitfieldGetValue(((const CFRuntimeBase
*)collection
)->_cfinfo
[CF_INFO_BITS
], 5, 5) != 0;
100 CF_INLINE
void markFinalized(CFTypeRef collection
) {
101 __CFBitfieldSetValue(((CFRuntimeBase
*)collection
)->_cfinfo
[CF_INFO_BITS
], 5, 5, 1);
104 CF_INLINE CFIndex
__CFArrayGetType(CFArrayRef array
) {
105 return __CFBitfieldGetValue(((const CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 1, 0);
108 CF_INLINE CFIndex
__CFArrayGetSizeOfType(CFIndex t
) {
110 size
+= sizeof(struct __CFArray
);
111 if (__CFBitfieldGetValue(t
, 3, 2) == __kCFArrayHasCustomCallBacks
) {
112 size
+= sizeof(CFArrayCallBacks
);
117 CF_INLINE CFIndex
__CFArrayGetCount(CFArrayRef array
) {
118 return array
->_count
;
121 CF_INLINE
void __CFArraySetCount(CFArrayRef array
, CFIndex v
) {
122 ((struct __CFArray
*)array
)->_count
= v
;
125 /* Only applies to immutable and mutable-deque-using arrays;
126 * Returns the bucket holding the left-most real value in the latter case. */
127 CF_INLINE
struct __CFArrayBucket
*__CFArrayGetBucketsPtr(CFArrayRef array
) {
128 switch (__CFArrayGetType(array
)) {
129 case __kCFArrayImmutable
:
130 return (struct __CFArrayBucket
*)((uint8_t *)array
+ __CFArrayGetSizeOfType(((CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
]));
131 case __kCFArrayDeque
: {
132 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
133 return (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
) + deque
->_leftIdx
* sizeof(struct __CFArrayBucket
));
139 /* This shouldn't be called if the array count is 0. */
140 CF_INLINE
struct __CFArrayBucket
*__CFArrayGetBucketAtIndex(CFArrayRef array
, CFIndex idx
) {
141 switch (__CFArrayGetType(array
)) {
142 case __kCFArrayImmutable
:
143 case __kCFArrayDeque
:
144 return __CFArrayGetBucketsPtr(array
) + idx
;
145 case __kCFArrayStorage
: {
146 CFStorageRef store
= (CFStorageRef
)array
->_store
;
147 return (struct __CFArrayBucket
*)CFStorageGetValueAtIndex(store
, idx
, NULL
);
153 CF_INLINE CFArrayCallBacks
*__CFArrayGetCallBacks(CFArrayRef array
) {
154 CFArrayCallBacks
*result
= NULL
;
155 switch (__CFBitfieldGetValue(((const CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 3, 2)) {
156 case __kCFArrayHasNullCallBacks
:
157 return (CFArrayCallBacks
*)&__kCFNullArrayCallBacks
;
158 case __kCFArrayHasCFTypeCallBacks
:
159 return (CFArrayCallBacks
*)&kCFTypeArrayCallBacks
;
160 case __kCFArrayHasCustomCallBacks
:
163 switch (__CFArrayGetType(array
)) {
164 case __kCFArrayImmutable
:
165 result
= (CFArrayCallBacks
*)((uint8_t *)array
+ sizeof(struct __CFArray
));
167 case __kCFArrayDeque
:
168 case __kCFArrayStorage
:
169 result
= (CFArrayCallBacks
*)((uint8_t *)array
+ sizeof(struct __CFArray
));
175 CF_INLINE
bool __CFArrayCallBacksMatchNull(const CFArrayCallBacks
*c
) {
177 (c
->retain
== __kCFNullArrayCallBacks
.retain
&&
178 c
->release
== __kCFNullArrayCallBacks
.release
&&
179 c
->copyDescription
== __kCFNullArrayCallBacks
.copyDescription
&&
180 c
->equal
== __kCFNullArrayCallBacks
.equal
));
183 CF_INLINE
bool __CFArrayCallBacksMatchCFType(const CFArrayCallBacks
*c
) {
184 return (&kCFTypeArrayCallBacks
== c
||
185 (c
->retain
== kCFTypeArrayCallBacks
.retain
&&
186 c
->release
== kCFTypeArrayCallBacks
.release
&&
187 c
->copyDescription
== kCFTypeArrayCallBacks
.copyDescription
&&
188 c
->equal
== kCFTypeArrayCallBacks
.equal
));
191 struct _releaseContext
{
192 void (*release
)(CFAllocatorRef
, const void *);
193 CFAllocatorRef allocator
;
196 static void __CFArrayStorageRelease(const void *itemptr
, void *context
) {
197 struct _releaseContext
*rc
= (struct _releaseContext
*)context
;
198 INVOKE_CALLBACK2(rc
->release
, rc
->allocator
, *(const void **)itemptr
);
199 *(const void **)itemptr
= NULL
; // GC: clear item to break strong reference.
202 static void __CFArrayReleaseValues(CFArrayRef array
, CFRange range
, bool releaseStorageIfPossible
) {
203 const CFArrayCallBacks
*cb
= __CFArrayGetCallBacks(array
);
204 CFAllocatorRef allocator
;
206 switch (__CFArrayGetType(array
)) {
207 case __kCFArrayImmutable
:
208 if (NULL
!= cb
->release
&& 0 < range
.length
&& !hasBeenFinalized(array
)) {
209 // if we've been finalized then we know that
210 // 1) we're using the standard callback on GC memory
211 // 2) the slots don't' need to be zeroed
212 struct __CFArrayBucket
*buckets
= __CFArrayGetBucketsPtr(array
);
213 allocator
= __CFGetAllocator(array
);
214 for (idx
= 0; idx
< range
.length
; idx
++) {
215 INVOKE_CALLBACK2(cb
->release
, allocator
, buckets
[idx
+ range
.location
]._item
);
216 buckets
[idx
+ range
.location
]._item
= NULL
; // GC: break strong reference.
220 case __kCFArrayDeque
: {
221 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
222 if (0 < range
.length
&& NULL
!= deque
&& !hasBeenFinalized(array
)) {
223 struct __CFArrayBucket
*buckets
= __CFArrayGetBucketsPtr(array
);
224 if (NULL
!= cb
->release
) {
225 allocator
= __CFGetAllocator(array
);
226 for (idx
= 0; idx
< range
.length
; idx
++) {
227 INVOKE_CALLBACK2(cb
->release
, allocator
, buckets
[idx
+ range
.location
]._item
);
228 buckets
[idx
+ range
.location
]._item
= NULL
; // GC: break strong reference.
231 for (idx
= 0; idx
< range
.length
; idx
++) {
232 buckets
[idx
+ range
.location
]._item
= NULL
; // GC: break strong reference.
236 if (releaseStorageIfPossible
&& 0 == range
.location
&& __CFArrayGetCount(array
) == range
.length
) {
237 allocator
= __CFGetAllocator(array
);
238 if (NULL
!= deque
) _CFAllocatorDeallocateGC(allocator
, deque
);
239 __CFArraySetCount(array
, 0); // GC: _count == 0 ==> _store == NULL.
240 ((struct __CFArray
*)array
)->_store
= NULL
;
244 case __kCFArrayStorage
: {
245 CFStorageRef store
= (CFStorageRef
)array
->_store
;
246 if (NULL
!= cb
->release
&& 0 < range
.length
&& !hasBeenFinalized(array
)) {
247 struct _releaseContext context
;
248 allocator
= __CFGetAllocator(array
);
249 context
.release
= cb
->release
;
250 context
.allocator
= allocator
;
251 CFStorageApplyFunction(store
, range
, __CFArrayStorageRelease
, &context
);
253 if (releaseStorageIfPossible
&& 0 == range
.location
&& __CFArrayGetCount(array
) == range
.length
) {
255 __CFArraySetCount(array
, 0); // GC: _count == 0 ==> _store == NULL.
256 ((struct __CFArray
*)array
)->_store
= NULL
;
257 __CFBitfieldSetValue(((CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayDeque
);
265 CF_INLINE
void __CFArrayValidateRange(CFArrayRef array
, CFRange range
, const char *func
) {
266 CFAssert3(0 <= range
.location
&& range
.location
<= CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): range.location index (%d) out of bounds (0, %d)", func
, range
.location
, CFArrayGetCount(array
));
267 CFAssert2(0 <= range
.length
, __kCFLogAssertion
, "%s(): range.length (%d) cannot be less than zero", func
, range
.length
);
268 CFAssert3(range
.location
+ range
.length
<= CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): ending index (%d) out of bounds (0, %d)", func
, range
.location
+ range
.length
, CFArrayGetCount(array
));
271 #define __CFArrayValidateRange(a,r,f)
274 static Boolean
__CFArrayEqual(CFTypeRef cf1
, CFTypeRef cf2
) {
275 CFArrayRef array1
= (CFArrayRef
)cf1
;
276 CFArrayRef array2
= (CFArrayRef
)cf2
;
277 const CFArrayCallBacks
*cb1
, *cb2
;
279 if (array1
== array2
) return true;
280 cnt
= __CFArrayGetCount(array1
);
281 if (cnt
!= __CFArrayGetCount(array2
)) return false;
282 cb1
= __CFArrayGetCallBacks(array1
);
283 cb2
= __CFArrayGetCallBacks(array2
);
284 if (cb1
->equal
!= cb2
->equal
) return false;
285 if (0 == cnt
) return true; /* after function comparison! */
286 for (idx
= 0; idx
< cnt
; idx
++) {
287 const void *val1
= __CFArrayGetBucketAtIndex(array1
, idx
)->_item
;
288 const void *val2
= __CFArrayGetBucketAtIndex(array2
, idx
)->_item
;
290 if (NULL
== cb1
->equal
) return false;
291 if (!INVOKE_CALLBACK2(cb1
->equal
, val1
, val2
)) return false;
297 static CFHashCode
__CFArrayHash(CFTypeRef cf
) {
298 CFArrayRef array
= (CFArrayRef
)cf
;
299 return __CFArrayGetCount(array
);
302 static CFStringRef
__CFArrayCopyDescription(CFTypeRef cf
) {
303 CFArrayRef array
= (CFArrayRef
)cf
;
304 CFMutableStringRef result
;
305 const CFArrayCallBacks
*cb
;
306 CFAllocatorRef allocator
;
308 cnt
= __CFArrayGetCount(array
);
309 allocator
= CFGetAllocator(array
);
310 result
= CFStringCreateMutable(allocator
, 0);
311 switch (__CFArrayGetType(array
)) {
312 case __kCFArrayImmutable
:
313 CFStringAppendFormat(result
, NULL
, CFSTR("<CFArray %p [%p]>{type = immutable, count = %u, values = (\n"), cf
, allocator
, cnt
);
315 case __kCFArrayDeque
:
316 CFStringAppendFormat(result
, NULL
, CFSTR("<CFArray %p [%p]>{type = mutable-small, count = %u, values = (\n"), cf
, allocator
, cnt
);
318 case __kCFArrayStorage
:
319 CFStringAppendFormat(result
, NULL
, CFSTR("<CFArray %p [%p]>{type = mutable-large, count = %u, values = (\n"), cf
, allocator
, cnt
);
322 cb
= __CFArrayGetCallBacks(array
);
323 for (idx
= 0; idx
< cnt
; idx
++) {
324 CFStringRef desc
= NULL
;
325 const void *val
= __CFArrayGetBucketAtIndex(array
, idx
)->_item
;
326 if (NULL
!= cb
->copyDescription
) {
327 desc
= (CFStringRef
)INVOKE_CALLBACK1(cb
->copyDescription
, val
);
330 CFStringAppendFormat(result
, NULL
, CFSTR("\t%u : %@\n"), idx
, desc
);
333 CFStringAppendFormat(result
, NULL
, CFSTR("\t%u : <%p>\n"), idx
, val
);
336 CFStringAppend(result
, CFSTR(")}"));
341 static void __CFResourceRelease(CFTypeRef cf
, const void *ignored
) {
342 kCFTypeArrayCallBacks
.release(kCFAllocatorSystemDefault
, cf
);
345 static void __CFArrayDeallocate(CFTypeRef cf
) {
346 CFArrayRef array
= (CFArrayRef
)cf
;
347 // Under GC, keep contents alive when we know we can, either standard callbacks or NULL
348 // if (__CFBitfieldGetValue(cf->info, 5, 4)) return; // bits only ever set under GC
349 CFAllocatorRef allocator
= __CFGetAllocator(array
);
350 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator
)) {
351 // XXX_PCB keep array intact during finalization.
352 const CFArrayCallBacks
*cb
= __CFArrayGetCallBacks(array
);
353 if (cb
->retain
== NULL
&& cb
->release
== NULL
)
355 if (cb
== &kCFTypeArrayCallBacks
|| cb
->release
== kCFTypeArrayCallBacks
.release
) {
357 CFArrayApplyFunction((CFArrayRef
)cf
, CFRangeMake(0, __CFArrayGetCount(array
)), (CFArrayApplierFunction
)__CFResourceRelease
, 0);
361 __CFArrayReleaseValues(array
, CFRangeMake(0, __CFArrayGetCount(array
)), true);
364 static CFTypeID __kCFArrayTypeID
= _kCFRuntimeNotATypeID
;
366 static const CFRuntimeClass __CFArrayClass
= {
367 _kCFRuntimeScannedObject
,
375 __CFArrayCopyDescription
378 __private_extern__
void __CFArrayInitialize(void) {
379 __kCFArrayTypeID
= _CFRuntimeRegisterClass(&__CFArrayClass
);
382 CFTypeID
CFArrayGetTypeID(void) {
383 return __kCFArrayTypeID
;
386 static CFArrayRef
__CFArrayInit(CFAllocatorRef allocator
, UInt32 flags
, CFIndex capacity
, const CFArrayCallBacks
*callBacks
) {
387 struct __CFArray
*memory
;
389 __CFBitfieldSetValue(flags
, 31, 2, 0);
390 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator
)) {
391 if (!callBacks
|| (callBacks
->retain
== NULL
&& callBacks
->release
== NULL
)) {
392 __CFBitfieldSetValue(flags
, 4, 4, 1); // setWeak
395 if (__CFArrayCallBacksMatchNull(callBacks
)) {
396 __CFBitfieldSetValue(flags
, 3, 2, __kCFArrayHasNullCallBacks
);
397 } else if (__CFArrayCallBacksMatchCFType(callBacks
)) {
398 __CFBitfieldSetValue(flags
, 3, 2, __kCFArrayHasCFTypeCallBacks
);
400 __CFBitfieldSetValue(flags
, 3, 2, __kCFArrayHasCustomCallBacks
);
402 size
= __CFArrayGetSizeOfType(flags
) - sizeof(CFRuntimeBase
);
403 switch (__CFBitfieldGetValue(flags
, 1, 0)) {
404 case __kCFArrayImmutable
:
405 size
+= capacity
* sizeof(struct __CFArrayBucket
);
407 case __kCFArrayDeque
:
408 case __kCFArrayStorage
:
411 memory
= (struct __CFArray
*)_CFRuntimeCreateInstance(allocator
, __kCFArrayTypeID
, size
, NULL
);
412 if (NULL
== memory
) {
415 __CFBitfieldSetValue(memory
->_base
._cfinfo
[CF_INFO_BITS
], 6, 0, flags
);
416 __CFArraySetCount((CFArrayRef
)memory
, 0);
417 switch (__CFBitfieldGetValue(flags
, 1, 0)) {
418 case __kCFArrayImmutable
:
419 if (isWeakMemory(memory
)) { // if weak, don't scan
420 auto_zone_set_layout_type(__CFCollectableZone
, memory
, AUTO_OBJECT_UNSCANNED
);
422 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
, "CFArray (immutable)");
424 case __kCFArrayDeque
:
425 case __kCFArrayStorage
:
426 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
, "CFArray (mutable-variable)");
427 ((struct __CFArray
*)memory
)->_mutations
= 1;
428 ((struct __CFArray
*)memory
)->_store
= NULL
;
431 if (__kCFArrayHasCustomCallBacks
== __CFBitfieldGetValue(flags
, 3, 2)) {
432 CFArrayCallBacks
*cb
= (CFArrayCallBacks
*)__CFArrayGetCallBacks((CFArrayRef
)memory
);
434 FAULT_CALLBACK((void **)&(cb
->retain
));
435 FAULT_CALLBACK((void **)&(cb
->release
));
436 FAULT_CALLBACK((void **)&(cb
->copyDescription
));
437 FAULT_CALLBACK((void **)&(cb
->equal
));
439 return (CFArrayRef
)memory
;
442 CFArrayRef
CFArrayCreate(CFAllocatorRef allocator
, const void **values
, CFIndex numValues
, const CFArrayCallBacks
*callBacks
) {
444 const CFArrayCallBacks
*cb
;
445 struct __CFArrayBucket
*buckets
;
446 CFAllocatorRef bucketsAllocator
;
449 CFAssert2(0 <= numValues
, __kCFLogAssertion
, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__
, numValues
);
450 result
= __CFArrayInit(allocator
, __kCFArrayImmutable
, numValues
, callBacks
);
451 cb
= __CFArrayGetCallBacks(result
);
452 buckets
= __CFArrayGetBucketsPtr(result
);
453 bucketsAllocator
= isStrongMemory(result
) ? allocator
: kCFAllocatorNull
;
454 bucketsBase
= CF_IS_COLLECTABLE_ALLOCATOR(bucketsAllocator
) ? (void *)auto_zone_base_pointer(__CFCollectableZone
, buckets
) : NULL
;
455 if (NULL
!= cb
->retain
) {
456 for (idx
= 0; idx
< numValues
; idx
++) {
457 CF_WRITE_BARRIER_BASE_ASSIGN(bucketsAllocator
, bucketsBase
, buckets
->_item
, (void *)INVOKE_CALLBACK2(cb
->retain
, allocator
, *values
));
463 for (idx
= 0; idx
< numValues
; idx
++) {
464 CF_WRITE_BARRIER_BASE_ASSIGN(bucketsAllocator
, bucketsBase
, buckets
->_item
, *values
);
469 __CFArraySetCount(result
, numValues
);
473 CFMutableArrayRef
CFArrayCreateMutable(CFAllocatorRef allocator
, CFIndex capacity
, const CFArrayCallBacks
*callBacks
) {
474 CFAssert2(0 <= capacity
, __kCFLogAssertion
, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__
, capacity
);
475 CFAssert2(capacity
<= LONG_MAX
/ sizeof(void *), __kCFLogAssertion
, "%s(): capacity (%d) is too large for this architecture", __PRETTY_FUNCTION__
, capacity
);
476 return (CFMutableArrayRef
)__CFArrayInit(allocator
, __kCFArrayDeque
, capacity
, callBacks
);
479 // This creates an array which is for CFTypes or NSObjects, with an ownership transfer --
480 // the array does not take a retain, and the caller does not need to release the inserted objects.
481 // The incoming objects must also be collectable if allocated out of a collectable allocator.
482 CFArrayRef
_CFArrayCreate_ex(CFAllocatorRef allocator
, Boolean isMutable
, const void **values
, CFIndex numValues
) {
484 result
= __CFArrayInit(allocator
, isMutable
? __kCFArrayDeque
: __kCFArrayImmutable
, numValues
, &kCFTypeArrayCallBacks
);
486 struct __CFArrayBucket
*buckets
= __CFArrayGetBucketsPtr(result
);
487 CF_WRITE_BARRIER_MEMMOVE(buckets
, values
, numValues
* sizeof(struct __CFArrayBucket
));
489 if (__CF_MAX_BUCKETS_PER_DEQUE
<= numValues
) {
490 CFStorageRef store
= (CFStorageRef
)CFMakeCollectable(CFStorageCreate(allocator
, sizeof(const void *)));
491 if (__CFOASafe
) __CFSetLastAllocationEventName(store
, "CFArray (store-storage)");
492 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, result
, result
->_store
, store
);
493 CFStorageInsertValues(store
, CFRangeMake(0, numValues
));
494 CFStorageReplaceValues(store
, CFRangeMake(0, numValues
), values
);
495 __CFBitfieldSetValue(((CFRuntimeBase
*)result
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayStorage
);
496 } else if (0 <= numValues
) {
497 struct __CFArrayDeque
*deque
;
498 struct __CFArrayBucket
*raw_buckets
;
499 CFIndex capacity
= __CFArrayDequeRoundUpCapacity(numValues
);
500 CFIndex size
= sizeof(struct __CFArrayDeque
) + capacity
* sizeof(struct __CFArrayBucket
);
501 deque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(result
) ? __kCFAllocatorGCScannedMemory
: 0);
502 if (__CFOASafe
) __CFSetLastAllocationEventName(deque
, "CFArray (store-deque)");
503 deque
->_leftIdx
= (capacity
- numValues
) / 2;
504 deque
->_capacity
= capacity
;
506 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, result
, result
->_store
, deque
);
507 raw_buckets
= (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
));
508 CF_WRITE_BARRIER_MEMMOVE(raw_buckets
+ deque
->_leftIdx
+ 0, values
, numValues
* sizeof(struct __CFArrayBucket
));
509 __CFBitfieldSetValue(((CFRuntimeBase
*)result
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayDeque
);
512 __CFArraySetCount(result
, numValues
);
516 CFArrayRef
CFArrayCreateCopy(CFAllocatorRef allocator
, CFArrayRef array
) {
518 const CFArrayCallBacks
*cb
;
519 struct __CFArrayBucket
*buckets
;
520 CFAllocatorRef bucketsAllocator
;
522 CFIndex numValues
= CFArrayGetCount(array
);
524 if (CF_IS_OBJC(__kCFArrayTypeID
, array
)) {
525 cb
= &kCFTypeArrayCallBacks
;
527 cb
= __CFArrayGetCallBacks(array
);
529 result
= __CFArrayInit(allocator
, __kCFArrayImmutable
, numValues
, cb
);
530 cb
= __CFArrayGetCallBacks(result
); // GC: use the new array's callbacks so we don't leak.
531 buckets
= __CFArrayGetBucketsPtr(result
);
532 bucketsAllocator
= isStrongMemory(result
) ? allocator
: kCFAllocatorNull
;
533 bucketsBase
= CF_IS_COLLECTABLE_ALLOCATOR(bucketsAllocator
) ? (void *)auto_zone_base_pointer(__CFCollectableZone
, buckets
) : NULL
;
534 for (idx
= 0; idx
< numValues
; idx
++) {
535 const void *value
= CFArrayGetValueAtIndex(array
, idx
);
536 if (NULL
!= cb
->retain
) {
537 value
= (void *)INVOKE_CALLBACK2(cb
->retain
, allocator
, value
);
539 CF_WRITE_BARRIER_BASE_ASSIGN(bucketsAllocator
, bucketsBase
, buckets
->_item
, value
);
542 __CFArraySetCount(result
, numValues
);
546 CFMutableArrayRef
CFArrayCreateMutableCopy(CFAllocatorRef allocator
, CFIndex capacity
, CFArrayRef array
) {
547 CFMutableArrayRef result
;
548 const CFArrayCallBacks
*cb
;
549 CFIndex idx
, numValues
= CFArrayGetCount(array
);
551 if (CF_IS_OBJC(__kCFArrayTypeID
, array
)) {
552 cb
= &kCFTypeArrayCallBacks
;
555 cb
= __CFArrayGetCallBacks(array
);
557 flags
= __kCFArrayDeque
;
558 result
= (CFMutableArrayRef
)__CFArrayInit(allocator
, flags
, capacity
, cb
);
559 if (0 == capacity
) _CFArraySetCapacity(result
, numValues
);
560 for (idx
= 0; idx
< numValues
; idx
++) {
561 const void *value
= CFArrayGetValueAtIndex(array
, idx
);
562 CFArrayAppendValue(result
, value
);
567 CFIndex
CFArrayGetCount(CFArrayRef array
) {
568 CF_OBJC_FUNCDISPATCH0(__kCFArrayTypeID
, CFIndex
, array
, "count");
569 __CFGenericValidateType(array
, __kCFArrayTypeID
);
570 return __CFArrayGetCount(array
);
574 CFIndex
CFArrayGetCountOfValue(CFArrayRef array
, CFRange range
, const void *value
) {
575 const CFArrayCallBacks
*cb
;
576 CFIndex idx
, count
= 0;
577 // CF: this ignores range
578 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID
, CFIndex
, array
, "countOccurrences:", value
);
579 __CFGenericValidateType(array
, __kCFArrayTypeID
);
580 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
581 cb
= __CFArrayGetCallBacks(array
);
582 for (idx
= 0; idx
< range
.length
; idx
++) {
583 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
584 if (value
== item
|| (cb
->equal
&& INVOKE_CALLBACK2(cb
->equal
, value
, item
))) {
591 Boolean
CFArrayContainsValue(CFArrayRef array
, CFRange range
, const void *value
) {
593 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, char, array
, "containsObject:inRange:", value
, range
);
594 __CFGenericValidateType(array
, __kCFArrayTypeID
);
595 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
596 for (idx
= 0; idx
< range
.length
; idx
++) {
597 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
602 const CFArrayCallBacks
*cb
= __CFArrayGetCallBacks(array
);
604 for (idx
= 0; idx
< range
.length
; idx
++) {
605 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
606 if (INVOKE_CALLBACK2(cb
->equal
, value
, item
)) {
614 const void *CFArrayGetValueAtIndex(CFArrayRef array
, CFIndex idx
) {
615 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID
, void *, array
, "objectAtIndex:", idx
);
616 __CFGenericValidateType(array
, __kCFArrayTypeID
);
617 CFAssert2(0 <= idx
&& idx
< __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__
, idx
);
618 return __CFArrayGetBucketAtIndex(array
, idx
)->_item
;
621 // This is for use by NSCFArray; it avoids ObjC dispatch, and checks for out of bounds
622 const void *_CFArrayCheckAndGetValueAtIndex(CFArrayRef array
, CFIndex idx
) {
623 if (0 <= idx
&& idx
< __CFArrayGetCount(array
)) return __CFArrayGetBucketAtIndex(array
, idx
)->_item
;
628 void CFArrayGetValues(CFArrayRef array
, CFRange range
, const void **values
) {
629 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, void, array
, "getObjects:range:", values
, range
);
630 __CFGenericValidateType(array
, __kCFArrayTypeID
);
631 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
632 CFAssert1(NULL
!= values
, __kCFLogAssertion
, "%s(): pointer to values may not be NULL", __PRETTY_FUNCTION__
);
633 if (0 < range
.length
) {
634 switch (__CFArrayGetType(array
)) {
635 case __kCFArrayImmutable
:
636 case __kCFArrayDeque
:
637 CF_WRITE_BARRIER_MEMMOVE(values
, __CFArrayGetBucketsPtr(array
) + range
.location
, range
.length
* sizeof(struct __CFArrayBucket
));
639 case __kCFArrayStorage
: {
640 CFStorageRef store
= (CFStorageRef
)array
->_store
;
641 CFStorageGetValues(store
, range
, values
);
649 unsigned long _CFArrayFastEnumeration(CFArrayRef array
, struct __objcFastEnumerationStateEquivalent
*state
, void *stackbuffer
, unsigned long count
) {
650 if (array
->_count
== 0) return 0;
651 enum { ATSTART
= 0, ATEND
= 1 };
652 switch (__CFArrayGetType(array
)) {
653 case __kCFArrayImmutable
:
654 if (state
->state
== ATSTART
) { /* first time */
655 static const unsigned long const_mu
= 1;
656 state
->state
= ATEND
;
657 state
->mutationsPtr
= (unsigned long *)&const_mu
;
658 state
->itemsPtr
= (unsigned long *)__CFArrayGetBucketsPtr(array
);
659 return array
->_count
;
662 case __kCFArrayDeque
:
663 if (state
->state
== ATSTART
) { /* first time */
664 state
->state
= ATEND
;
665 state
->mutationsPtr
= (unsigned long *)&array
->_mutations
;
666 state
->itemsPtr
= (unsigned long *)__CFArrayGetBucketsPtr(array
);
667 return array
->_count
;
670 case __kCFArrayStorage
:
671 state
->mutationsPtr
= (unsigned long *)&array
->_mutations
;
672 return _CFStorageFastEnumeration((CFStorageRef
)array
->_store
, state
, stackbuffer
, count
);
678 void CFArrayApplyFunction(CFArrayRef array
, CFRange range
, CFArrayApplierFunction applier
, void *context
) {
680 FAULT_CALLBACK((void **)&(applier
));
681 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, void, array
, "apply:context:", applier
, context
);
682 __CFGenericValidateType(array
, __kCFArrayTypeID
);
683 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
684 CFAssert1(NULL
!= applier
, __kCFLogAssertion
, "%s(): pointer to applier function may not be NULL", __PRETTY_FUNCTION__
);
685 for (idx
= 0; idx
< range
.length
; idx
++) {
686 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
687 INVOKE_CALLBACK2(applier
, item
, context
);
691 CFIndex
CFArrayGetFirstIndexOfValue(CFArrayRef array
, CFRange range
, const void *value
) {
692 const CFArrayCallBacks
*cb
;
694 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, CFIndex
, array
, "_cfindexOfObject:inRange:", value
, range
);
695 __CFGenericValidateType(array
, __kCFArrayTypeID
);
696 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
697 cb
= __CFArrayGetCallBacks(array
);
698 for (idx
= 0; idx
< range
.length
; idx
++) {
699 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
700 if (value
== item
|| (cb
->equal
&& INVOKE_CALLBACK2(cb
->equal
, value
, item
)))
701 return idx
+ range
.location
;
706 CFIndex
CFArrayGetLastIndexOfValue(CFArrayRef array
, CFRange range
, const void *value
) {
707 const CFArrayCallBacks
*cb
;
709 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, CFIndex
, array
, "_cflastIndexOfObject:inRange:", value
, range
);
710 __CFGenericValidateType(array
, __kCFArrayTypeID
);
711 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
712 cb
= __CFArrayGetCallBacks(array
);
713 for (idx
= range
.length
; idx
--;) {
714 const void *item
= __CFArrayGetBucketAtIndex(array
, range
.location
+ idx
)->_item
;
715 if (value
== item
|| (cb
->equal
&& INVOKE_CALLBACK2(cb
->equal
, value
, item
)))
716 return idx
+ range
.location
;
721 void CFArrayAppendValue(CFMutableArrayRef array
, const void *value
) {
722 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID
, void, array
, "addObject:", value
);
723 __CFGenericValidateType(array
, __kCFArrayTypeID
);
724 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
725 _CFArrayReplaceValues(array
, CFRangeMake(__CFArrayGetCount(array
), 0), &value
, 1);
728 void CFArraySetValueAtIndex(CFMutableArrayRef array
, CFIndex idx
, const void *value
) {
729 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, void, array
, "setObject:atIndex:", value
, idx
);
730 __CFGenericValidateType(array
, __kCFArrayTypeID
);
731 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
732 CFAssert2(0 <= idx
&& idx
<= __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__
, idx
);
733 if (idx
== __CFArrayGetCount(array
)) {
734 _CFArrayReplaceValues(array
, CFRangeMake(idx
, 0), &value
, 1);
736 const void *old_value
;
737 const CFArrayCallBacks
*cb
= __CFArrayGetCallBacks(array
);
738 CFAllocatorRef allocator
= __CFGetAllocator(array
);
739 CFAllocatorRef bucketsAllocator
= isStrongMemory(array
) ? allocator
: kCFAllocatorNull
;
740 struct __CFArrayBucket
*bucket
= __CFArrayGetBucketAtIndex(array
, idx
);
741 if (NULL
!= cb
->retain
&& !hasBeenFinalized(array
)) {
742 value
= (void *)INVOKE_CALLBACK2(cb
->retain
, allocator
, value
);
744 old_value
= bucket
->_item
;
745 CF_WRITE_BARRIER_ASSIGN(bucketsAllocator
, bucket
->_item
, value
); // GC: handles deque/CFStorage cases.
746 if (NULL
!= cb
->release
&& !hasBeenFinalized(array
)) {
747 INVOKE_CALLBACK2(cb
->release
, allocator
, old_value
);
753 void CFArrayInsertValueAtIndex(CFMutableArrayRef array
, CFIndex idx
, const void *value
) {
754 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, void, array
, "insertObject:atIndex:", value
, idx
);
755 __CFGenericValidateType(array
, __kCFArrayTypeID
);
756 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
757 CFAssert2(0 <= idx
&& idx
<= __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__
, idx
);
758 _CFArrayReplaceValues(array
, CFRangeMake(idx
, 0), &value
, 1);
761 void CFArrayExchangeValuesAtIndices(CFMutableArrayRef array
, CFIndex idx1
, CFIndex idx2
) {
763 struct __CFArrayBucket
*bucket1
, *bucket2
;
764 CFAllocatorRef bucketsAllocator
;
765 CF_OBJC_FUNCDISPATCH2(__kCFArrayTypeID
, void, array
, "exchange::", idx1
, idx2
);
766 __CFGenericValidateType(array
, __kCFArrayTypeID
);
767 CFAssert2(0 <= idx1
&& idx1
< __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index #1 (%d) out of bounds", __PRETTY_FUNCTION__
, idx1
);
768 CFAssert2(0 <= idx2
&& idx2
< __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index #2 (%d) out of bounds", __PRETTY_FUNCTION__
, idx2
);
769 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
770 bucket1
= __CFArrayGetBucketAtIndex(array
, idx1
);
771 bucket2
= __CFArrayGetBucketAtIndex(array
, idx2
);
772 tmp
= bucket1
->_item
;
773 bucketsAllocator
= isStrongMemory(array
) ? __CFGetAllocator(array
) : kCFAllocatorNull
;
774 // XXX these aren't needed.
775 CF_WRITE_BARRIER_ASSIGN(bucketsAllocator
, bucket1
->_item
, bucket2
->_item
);
776 CF_WRITE_BARRIER_ASSIGN(bucketsAllocator
, bucket2
->_item
, tmp
);
781 void CFArrayRemoveValueAtIndex(CFMutableArrayRef array
, CFIndex idx
) {
782 CF_OBJC_FUNCDISPATCH1(__kCFArrayTypeID
, void, array
, "removeObjectAtIndex:", idx
);
783 __CFGenericValidateType(array
, __kCFArrayTypeID
);
784 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
785 CFAssert2(0 <= idx
&& idx
< __CFArrayGetCount(array
), __kCFLogAssertion
, "%s(): index (%d) out of bounds", __PRETTY_FUNCTION__
, idx
);
786 _CFArrayReplaceValues(array
, CFRangeMake(idx
, 1), NULL
, 0);
789 void CFArrayRemoveAllValues(CFMutableArrayRef array
) {
790 CF_OBJC_FUNCDISPATCH0(__kCFArrayTypeID
, void, array
, "removeAllObjects");
791 __CFGenericValidateType(array
, __kCFArrayTypeID
);
792 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
793 __CFArrayReleaseValues(array
, CFRangeMake(0, __CFArrayGetCount(array
)), true);
794 __CFArraySetCount(array
, 0);
798 static void __CFArrayConvertDequeToStore(CFMutableArrayRef array
) {
799 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
800 struct __CFArrayBucket
*raw_buckets
= (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
));
802 CFIndex count
= CFArrayGetCount(array
);
803 CFAllocatorRef allocator
= __CFGetAllocator(array
);
804 store
= (CFStorageRef
)CFMakeCollectable(CFStorageCreate(allocator
, sizeof(const void *)));
805 if (__CFOASafe
) __CFSetLastAllocationEventName(store
, "CFArray (store-storage)");
806 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, store
);
807 CFStorageInsertValues(store
, CFRangeMake(0, count
));
808 CFStorageReplaceValues(store
, CFRangeMake(0, count
), raw_buckets
+ deque
->_leftIdx
);
809 _CFAllocatorDeallocateGC(__CFGetAllocator(array
), deque
);
810 __CFBitfieldSetValue(((CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayStorage
);
813 static void __CFArrayConvertStoreToDeque(CFMutableArrayRef array
) {
814 CFStorageRef store
= (CFStorageRef
)array
->_store
;
815 struct __CFArrayDeque
*deque
;
816 struct __CFArrayBucket
*raw_buckets
;
817 CFIndex count
= CFStorageGetCount(store
);// storage, not array, has correct count at this point
818 // do not resize down to a completely tight deque
819 CFIndex capacity
= __CFArrayDequeRoundUpCapacity(count
+ 6);
820 CFIndex size
= sizeof(struct __CFArrayDeque
) + capacity
* sizeof(struct __CFArrayBucket
);
821 CFAllocatorRef allocator
= __CFGetAllocator(array
);
822 deque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(array
) ? __kCFAllocatorGCScannedMemory
: 0);
823 if (__CFOASafe
) __CFSetLastAllocationEventName(deque
, "CFArray (store-deque)");
824 deque
->_leftIdx
= (capacity
- count
) / 2;
825 deque
->_capacity
= capacity
;
827 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, deque
);
828 raw_buckets
= (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
));
829 CFStorageGetValues(store
, CFRangeMake(0, count
), raw_buckets
+ deque
->_leftIdx
);
830 _CFReleaseGC(store
); // GC: balances CFMakeCollectable() above.
831 __CFBitfieldSetValue(((CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayDeque
);
834 // may move deque storage, as it may need to grow deque
835 static void __CFArrayRepositionDequeRegions(CFMutableArrayRef array
, CFRange range
, CFIndex newCount
) {
836 // newCount elements are going to replace the range, and the result will fit in the deque
837 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
838 struct __CFArrayBucket
*buckets
;
839 CFIndex cnt
, futureCnt
, numNewElems
;
840 CFIndex L
, A
, B
, C
, R
;
842 buckets
= (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
));
843 cnt
= __CFArrayGetCount(array
);
844 futureCnt
= cnt
- range
.length
+ newCount
;
846 L
= deque
->_leftIdx
; // length of region to left of deque
847 A
= range
.location
; // length of region in deque to left of replaced range
848 B
= range
.length
; // length of replaced range
849 C
= cnt
- B
- A
; // length of region in deque to right of replaced range
850 R
= deque
->_capacity
- cnt
- L
; // length of region to right of deque
851 numNewElems
= newCount
- B
;
853 CFIndex wiggle
= deque
->_capacity
>> 17;
854 if (wiggle
< 4) wiggle
= 4;
855 if (deque
->_capacity
< (uint32_t)futureCnt
|| (cnt
< futureCnt
&& L
+ R
< wiggle
)) {
856 // must be inserting or space is tight, reallocate and re-center everything
857 CFIndex capacity
= __CFArrayDequeRoundUpCapacity(futureCnt
+ wiggle
);
858 CFIndex size
= sizeof(struct __CFArrayDeque
) + capacity
* sizeof(struct __CFArrayBucket
);
859 CFAllocatorRef allocator
= __CFGetAllocator(array
);
860 struct __CFArrayDeque
*newDeque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(array
) ? __kCFAllocatorGCScannedMemory
: 0);
861 if (__CFOASafe
) __CFSetLastAllocationEventName(newDeque
, "CFArray (store-deque)");
862 struct __CFArrayBucket
*newBuckets
= (struct __CFArrayBucket
*)((uint8_t *)newDeque
+ sizeof(struct __CFArrayDeque
));
864 CFIndex newL
= (capacity
- futureCnt
) / 2;
865 CFIndex oldC0
= oldL
+ A
+ B
;
866 CFIndex newC0
= newL
+ A
+ newCount
;
867 newDeque
->_leftIdx
= newL
;
868 newDeque
->_capacity
= capacity
;
870 if (0 < A
) CF_WRITE_BARRIER_MEMMOVE(newBuckets
+ newL
, buckets
+ oldL
, A
* sizeof(struct __CFArrayBucket
));
871 if (0 < C
) CF_WRITE_BARRIER_MEMMOVE(newBuckets
+ newC0
, buckets
+ oldC0
, C
* sizeof(struct __CFArrayBucket
));
872 if (deque
) _CFAllocatorDeallocateGC(allocator
, deque
);
873 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, newDeque
);
877 if ((numNewElems
< 0 && C
< A
) || (numNewElems
<= R
&& C
< A
)) { // move C
878 // deleting: C is smaller
879 // inserting: C is smaller and R has room
880 CFIndex oldC0
= L
+ A
+ B
;
881 CFIndex newC0
= L
+ A
+ newCount
;
882 if (0 < C
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newC0
, buckets
+ oldC0
, C
* sizeof(struct __CFArrayBucket
));
883 // GrP GC: zero-out newly exposed space on the right, if any
884 if (oldC0
> newC0
) bzero(buckets
+ newC0
+ C
, (oldC0
- newC0
) * sizeof(struct __CFArrayBucket
));
885 } else if ((numNewElems
< 0) || (numNewElems
<= L
&& A
<= C
)) { // move A
886 // deleting: A is smaller or equal (covers remaining delete cases)
887 // inserting: A is smaller and L has room
889 CFIndex newL
= L
- numNewElems
;
890 deque
->_leftIdx
= newL
;
891 if (0 < A
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newL
, buckets
+ oldL
, A
* sizeof(struct __CFArrayBucket
));
892 // GrP GC: zero-out newly exposed space on the left, if any
893 if (newL
> oldL
) bzero(buckets
+ oldL
, (newL
- oldL
) * sizeof(struct __CFArrayBucket
));
895 // now, must be inserting, and either:
896 // A<=C, but L doesn't have room (R might have, but don't care)
897 // C<A, but R doesn't have room (L might have, but don't care)
898 // re-center everything
900 CFIndex newL
= (L
+ R
- numNewElems
) / 2;
901 CFIndex oldBias
= deque
->_bias
;
902 deque
->_bias
= (newL
< oldL
) ? -1 : 1;
904 newL
= newL
- newL
/ 2;
905 } else if (0 < oldBias
) {
906 newL
= newL
+ newL
/ 2;
908 CFIndex oldC0
= oldL
+ A
+ B
;
909 CFIndex newC0
= newL
+ A
+ newCount
;
910 deque
->_leftIdx
= newL
;
912 if (0 < A
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newL
, buckets
+ oldL
, A
* sizeof(struct __CFArrayBucket
));
913 if (0 < C
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newC0
, buckets
+ oldC0
, C
* sizeof(struct __CFArrayBucket
));
914 // GrP GC: zero-out newly exposed space on the right, if any
915 if (oldC0
> newC0
) bzero(buckets
+ newC0
+ C
, (oldC0
- newC0
) * sizeof(struct __CFArrayBucket
));
917 if (0 < C
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newC0
, buckets
+ oldC0
, C
* sizeof(struct __CFArrayBucket
));
918 if (0 < A
) CF_WRITE_BARRIER_MEMMOVE(buckets
+ newL
, buckets
+ oldL
, A
* sizeof(struct __CFArrayBucket
));
919 // GrP GC: zero-out newly exposed space on the left, if any
920 if (newL
> oldL
) bzero(buckets
+ oldL
, (newL
- oldL
) * sizeof(struct __CFArrayBucket
));
925 static void __CFArrayHandleOutOfMemory(CFTypeRef obj
, CFIndex numBytes
) {
926 CFStringRef msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for CFArray failed"), numBytes
);
927 CFBadErrorCallBack cb
= _CFGetOutOfMemoryErrorCallBack();
928 if (NULL
== cb
|| !cb(obj
, CFSTR("NS/CFArray"), msg
)) {
929 CFLog(kCFLogLevelCritical
, CFSTR("%@"), msg
);
935 // This function is for Foundation's benefit; no one else should use it.
936 void _CFArraySetCapacity(CFMutableArrayRef array
, CFIndex cap
) {
937 if (CF_IS_OBJC(__kCFArrayTypeID
, array
)) return;
938 __CFGenericValidateType(array
, __kCFArrayTypeID
);
939 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
940 CFAssert3(__CFArrayGetCount(array
) <= cap
, __kCFLogAssertion
, "%s(): desired capacity (%d) is less than count (%d)", __PRETTY_FUNCTION__
, cap
, __CFArrayGetCount(array
));
941 // Currently, attempting to set the capacity of an array which is the CFStorage
942 // variant, or set the capacity larger than __CF_MAX_BUCKETS_PER_DEQUE, has no
943 // effect. The primary purpose of this API is to help avoid a bunch of the
944 // resizes at the small capacities 4, 8, 16, etc.
945 if (__CFArrayGetType(array
) == __kCFArrayDeque
) {
946 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
947 CFIndex capacity
= __CFArrayDequeRoundUpCapacity(cap
);
948 CFIndex size
= sizeof(struct __CFArrayDeque
) + capacity
* sizeof(struct __CFArrayBucket
);
949 CFAllocatorRef allocator
= __CFGetAllocator(array
);
951 deque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(array
) ? __kCFAllocatorGCScannedMemory
: 0);
952 if (NULL
== deque
) __CFArrayHandleOutOfMemory(array
, size
);
953 if (__CFOASafe
) __CFSetLastAllocationEventName(deque
, "CFArray (store-deque)");
954 deque
->_leftIdx
= capacity
/ 2;
956 struct __CFArrayDeque
*olddeque
= deque
;
957 CFIndex oldcap
= deque
->_capacity
;
958 deque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(array
) ? __kCFAllocatorGCScannedMemory
: 0);
959 if (NULL
== deque
) __CFArrayHandleOutOfMemory(array
, size
);
960 CF_WRITE_BARRIER_MEMMOVE(deque
, olddeque
, sizeof(struct __CFArrayDeque
) + oldcap
* sizeof(struct __CFArrayBucket
));
961 _CFAllocatorDeallocateGC(allocator
, olddeque
);
962 if (__CFOASafe
) __CFSetLastAllocationEventName(deque
, "CFArray (store-deque)");
964 deque
->_capacity
= capacity
;
966 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, deque
);
971 void CFArrayReplaceValues(CFMutableArrayRef array
, CFRange range
, const void **newValues
, CFIndex newCount
) {
972 CF_OBJC_FUNCDISPATCH3(__kCFArrayTypeID
, void, array
, "replaceObjectsInRange:withObjects:count:", range
, (void **)newValues
, newCount
);
973 __CFGenericValidateType(array
, __kCFArrayTypeID
);
974 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
975 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
976 CFAssert2(0 <= newCount
, __kCFLogAssertion
, "%s(): newCount (%d) cannot be less than zero", __PRETTY_FUNCTION__
, newCount
);
977 return _CFArrayReplaceValues(array
, range
, newValues
, newCount
);
980 // This function does no ObjC dispatch or argument checking;
981 // It should only be called from places where that dispatch and check has already been done, or NSCFArray
982 void _CFArrayReplaceValues(CFMutableArrayRef array
, CFRange range
, const void **newValues
, CFIndex newCount
) {
983 const CFArrayCallBacks
*cb
;
984 CFAllocatorRef allocator
;
985 CFIndex idx
, cnt
, futureCnt
;
986 const void **newv
, *buffer
[256];
987 cnt
= __CFArrayGetCount(array
);
988 futureCnt
= cnt
- range
.length
+ newCount
;
989 CFAssert1(newCount
<= futureCnt
, __kCFLogAssertion
, "%s(): internal error 1", __PRETTY_FUNCTION__
);
990 cb
= __CFArrayGetCallBacks(array
);
991 allocator
= __CFGetAllocator(array
);
992 /* Retain new values if needed, possibly allocating a temporary buffer for them */
993 if (NULL
!= cb
->retain
&& !hasBeenFinalized(array
)) {
994 newv
= (newCount
<= 256) ? (const void **)buffer
: (const void **)CFAllocatorAllocate(allocator
, newCount
* sizeof(void *), 0); // GC OK
995 if (newv
!= buffer
&& __CFOASafe
) __CFSetLastAllocationEventName(newv
, "CFArray (temp)");
996 for (idx
= 0; idx
< newCount
; idx
++) {
997 newv
[idx
] = (void *)INVOKE_CALLBACK2(cb
->retain
, allocator
, (void *)newValues
[idx
]);
1002 array
->_mutations
++;
1004 /* Now, there are three regions of interest, each of which may be empty:
1005 * A: the region from index 0 to one less than the range.location
1006 * B: the region of the range
1007 * C: the region from range.location + range.length to the end
1008 * Note that index 0 is not necessarily at the lowest-address edge
1009 * of the available storage. The values in region B need to get
1010 * released, and the values in regions A and C (depending) need
1011 * to get shifted if the number of new values is different from
1012 * the length of the range being replaced.
1014 if (0 < range
.length
) {
1015 __CFArrayReleaseValues(array
, range
, false);
1017 // region B elements are now "dead"
1018 if (__kCFArrayStorage
== __CFArrayGetType(array
)) {
1019 CFStorageRef store
= (CFStorageRef
)array
->_store
;
1020 // reposition regions A and C for new region B elements in gap
1021 if (range
.length
< newCount
) {
1022 CFStorageInsertValues(store
, CFRangeMake(range
.location
+ range
.length
, newCount
- range
.length
));
1023 } else if (newCount
< range
.length
) {
1024 CFStorageDeleteValues(store
, CFRangeMake(range
.location
+ newCount
, range
.length
- newCount
));
1026 if (futureCnt
<= __CF_MAX_BUCKETS_PER_DEQUE
/ 2) {
1027 __CFArrayConvertStoreToDeque(array
);
1029 } else if (NULL
== array
->_store
) {
1030 if (__CF_MAX_BUCKETS_PER_DEQUE
<= futureCnt
) {
1031 CFStorageRef store
= (CFStorageRef
)CFMakeCollectable(CFStorageCreate(allocator
, sizeof(const void *)));
1032 if (! isStrongMemory(array
)) _CFStorageSetWeak(store
);
1033 if (__CFOASafe
) __CFSetLastAllocationEventName(store
, "CFArray (store-storage)");
1034 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, store
);
1035 CFStorageInsertValues(store
, CFRangeMake(0, newCount
));
1036 __CFBitfieldSetValue(((CFRuntimeBase
*)array
)->_cfinfo
[CF_INFO_BITS
], 1, 0, __kCFArrayStorage
);
1037 } else if (0 <= futureCnt
) {
1038 struct __CFArrayDeque
*deque
;
1039 CFIndex capacity
= __CFArrayDequeRoundUpCapacity(futureCnt
);
1040 CFIndex size
= sizeof(struct __CFArrayDeque
) + capacity
* sizeof(struct __CFArrayBucket
);
1041 deque
= (struct __CFArrayDeque
*)_CFAllocatorAllocateGC(allocator
, size
, isStrongMemory(array
) ? __kCFAllocatorGCScannedMemory
: 0);
1042 if (__CFOASafe
) __CFSetLastAllocationEventName(deque
, "CFArray (store-deque)");
1043 deque
->_leftIdx
= (capacity
- newCount
) / 2;
1044 deque
->_capacity
= capacity
;
1046 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, array
, array
->_store
, deque
);
1049 // reposition regions A and C for new region B elements in gap
1050 if (__CF_MAX_BUCKETS_PER_DEQUE
<= futureCnt
) {
1052 __CFArrayConvertDequeToStore(array
);
1053 store
= (CFStorageRef
)array
->_store
;
1054 if (range
.length
< newCount
) {
1055 CFStorageInsertValues(store
, CFRangeMake(range
.location
+ range
.length
, newCount
- range
.length
));
1056 } else if (newCount
< range
.length
) { // this won't happen, but is here for completeness
1057 CFStorageDeleteValues(store
, CFRangeMake(range
.location
+ newCount
, range
.length
- newCount
));
1059 } else if (range
.length
!= newCount
) {
1060 __CFArrayRepositionDequeRegions(array
, range
, newCount
);
1063 // copy in new region B elements
1065 if (__kCFArrayStorage
== __CFArrayGetType(array
)) {
1066 CFStorageRef store
= (CFStorageRef
)array
->_store
;
1067 CFStorageReplaceValues(store
, CFRangeMake(range
.location
, newCount
), newv
);
1069 struct __CFArrayDeque
*deque
= (struct __CFArrayDeque
*)array
->_store
;
1070 struct __CFArrayBucket
*raw_buckets
= (struct __CFArrayBucket
*)((uint8_t *)deque
+ sizeof(struct __CFArrayDeque
));
1071 CFAllocatorRef bucketsAllocator
= isStrongMemory(array
) ? allocator
: kCFAllocatorNull
;
1073 CF_WRITE_BARRIER_ASSIGN(bucketsAllocator
, *((const void **)raw_buckets
+ deque
->_leftIdx
+ range
.location
), newv
[0]);
1075 CF_WRITE_BARRIER_MEMMOVE(raw_buckets
+ deque
->_leftIdx
+ range
.location
, newv
, newCount
* sizeof(struct __CFArrayBucket
));
1078 __CFArraySetCount(array
, futureCnt
);
1079 if (newv
!= buffer
&& newv
!= newValues
) CFAllocatorDeallocate(allocator
, newv
);
1082 struct _acompareContext
{
1083 CFComparatorFunction func
;
1087 static CFComparisonResult
__CFArrayCompareValues(const void *v1
, const void *v2
, struct _acompareContext
*context
) {
1088 const void **val1
= (const void **)v1
;
1089 const void **val2
= (const void **)v2
;
1090 return (CFComparisonResult
)(INVOKE_CALLBACK3(context
->func
, *val1
, *val2
, context
->context
));
1093 void CFArraySortValues(CFMutableArrayRef array
, CFRange range
, CFComparatorFunction comparator
, void *context
) {
1094 FAULT_CALLBACK((void **)&(comparator
));
1095 CF_OBJC_FUNCDISPATCH3(__kCFArrayTypeID
, void, array
, "sortUsingFunction:context:range:", comparator
, context
, range
);
1096 __CFGenericValidateType(array
, __kCFArrayTypeID
);
1097 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
1098 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
1099 CFAssert1(NULL
!= comparator
, __kCFLogAssertion
, "%s(): pointer to comparator function may not be NULL", __PRETTY_FUNCTION__
);
1100 array
->_mutations
++;
1102 if (1 < range
.length
) {
1103 struct _acompareContext ctx
;
1104 struct __CFArrayBucket
*bucket
;
1105 ctx
.func
= comparator
;
1106 ctx
.context
= context
;
1107 switch (__CFArrayGetType(array
)) {
1108 case __kCFArrayDeque
:
1109 bucket
= __CFArrayGetBucketsPtr(array
) + range
.location
;
1110 if (CF_USING_COLLECTABLE_MEMORY
&& isStrongMemory(array
)) {
1111 size_t size
= range
.length
* sizeof(void*);
1112 __CFObjCWriteBarrierRange(bucket
, size
);
1113 CFQSortArray(bucket
, range
.length
, sizeof(void *), (CFComparatorFunction
)__CFArrayCompareValues
, &ctx
);
1115 CFQSortArray(bucket
, range
.length
, sizeof(void *), (CFComparatorFunction
)__CFArrayCompareValues
, &ctx
);
1118 case __kCFArrayStorage
: {
1119 CFStorageRef store
= (CFStorageRef
)array
->_store
;
1120 CFAllocatorRef allocator
= __CFGetAllocator(array
);
1121 const void **values
, *buffer
[256];
1122 values
= (range
.length
<= 256) ? (const void **)buffer
: (const void **)CFAllocatorAllocate(allocator
, range
.length
* sizeof(void *), 0); // GC OK
1123 if (values
!= buffer
&& __CFOASafe
) __CFSetLastAllocationEventName(values
, "CFArray (temp)");
1124 CFStorageGetValues(store
, range
, values
);
1125 CFQSortArray(values
, range
.length
, sizeof(void *), (CFComparatorFunction
)__CFArrayCompareValues
, &ctx
);
1126 CFStorageReplaceValues(store
, range
, values
);
1127 if (values
!= buffer
) CFAllocatorDeallocate(allocator
, values
); // GC OK
1134 CFIndex
CFArrayBSearchValues(CFArrayRef array
, CFRange range
, const void *value
, CFComparatorFunction comparator
, void *context
) {
1135 __CFGenericValidateType(array
, __kCFArrayTypeID
);
1136 __CFArrayValidateRange(array
, range
, __PRETTY_FUNCTION__
);
1137 CFAssert1(NULL
!= comparator
, __kCFLogAssertion
, "%s(): pointer to comparator function may not be NULL", __PRETTY_FUNCTION__
);
1138 bool isObjC
= CF_IS_OBJC(__kCFArrayTypeID
, array
);
1139 FAULT_CALLBACK((void **)&(comparator
));
1141 if (range
.length
<= 0) return range
.location
;
1142 if (isObjC
|| __kCFArrayStorage
== __CFArrayGetType(array
)) {
1145 item
= CFArrayGetValueAtIndex(array
, range
.location
+ range
.length
- 1);
1146 if ((CFComparisonResult
)(INVOKE_CALLBACK3(comparator
, item
, value
, context
)) < 0) {
1147 return range
.location
+ range
.length
;
1149 item
= CFArrayGetValueAtIndex(array
, range
.location
);
1150 if ((CFComparisonResult
)(INVOKE_CALLBACK3(comparator
, value
, item
, context
)) < 0) {
1151 return range
.location
;
1153 lg
= flsl(range
.length
) - 1; // lg2(range.length)
1154 item
= CFArrayGetValueAtIndex(array
, range
.location
+ -1 + (1 << lg
));
1155 idx
= range
.location
+ ((CFComparisonResult
)(INVOKE_CALLBACK3(comparator
, item
, value
, context
)) < 0) ? range
.length
- (1 << lg
) : -1;
1157 item
= CFArrayGetValueAtIndex(array
, range
.location
+ idx
+ (1 << lg
));
1158 if ((CFComparisonResult
)(INVOKE_CALLBACK3(comparator
, item
, value
, context
)) < 0) {
1164 struct _acompareContext ctx
;
1165 ctx
.func
= comparator
;
1166 ctx
.context
= context
;
1167 idx
= CFBSearch(&value
, sizeof(void *), __CFArrayGetBucketsPtr(array
) + range
.location
, range
.length
, (CFComparatorFunction
)__CFArrayCompareValues
, &ctx
);
1169 return idx
+ range
.location
;
1172 void CFArrayAppendArray(CFMutableArrayRef array
, CFArrayRef otherArray
, CFRange otherRange
) {
1174 __CFGenericValidateType(array
, __kCFArrayTypeID
);
1175 __CFGenericValidateType(otherArray
, __kCFArrayTypeID
);
1176 CFAssert1(__CFArrayGetType(array
) != __kCFArrayImmutable
, __kCFLogAssertion
, "%s(): array is immutable", __PRETTY_FUNCTION__
);
1177 __CFArrayValidateRange(otherArray
, otherRange
, __PRETTY_FUNCTION__
);
1178 for (idx
= otherRange
.location
; idx
< otherRange
.location
+ otherRange
.length
; idx
++) {
1179 CFArrayAppendValue(array
, CFArrayGetValueAtIndex(otherArray
, idx
));