2 * Copyright (c) 2005 Apple Computer, Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 Copyright 1998-2002, Apple, Inc. All rights reserved.
25 Responsibility: Christopher Kane
28 #include <CoreFoundation/CFBinaryHeap.h>
29 #include "CFUtilitiesPriv.h"
30 #include "CFInternal.h"
32 const CFBinaryHeapCallBacks kCFStringBinaryHeapCallBacks
= {0, __CFTypeCollectionRetain
, __CFTypeCollectionRelease
, CFCopyDescription
, (CFComparisonResult (*)(const void *, const void *, void *))CFStringCompare
};
34 struct __CFBinaryHeapBucket
{
38 CF_INLINE CFIndex
__CFBinaryHeapRoundUpCapacity(CFIndex capacity
) {
39 if (capacity
< 4) return 4;
40 return (1 << (CFLog2(capacity
) + 1));
43 CF_INLINE CFIndex
__CFBinaryHeapNumBucketsForCapacity(CFIndex capacity
) {
47 struct __CFBinaryHeap
{
49 CFIndex _count
; /* number of objects */
50 CFIndex _capacity
; /* maximum number of objects */
51 CFBinaryHeapCallBacks _callbacks
;
52 CFBinaryHeapCompareContext _context
;
53 struct __CFBinaryHeapBucket
*_buckets
;
56 CF_INLINE CFIndex
__CFBinaryHeapCount(CFBinaryHeapRef heap
) {
60 CF_INLINE
void __CFBinaryHeapSetCount(CFBinaryHeapRef heap
, CFIndex v
) {
61 /* for a CFBinaryHeap, _bucketsUsed == _count */
64 CF_INLINE CFIndex
__CFBinaryHeapCapacity(CFBinaryHeapRef heap
) {
65 return heap
->_capacity
;
68 CF_INLINE
void __CFBinaryHeapSetCapacity(CFBinaryHeapRef heap
, CFIndex v
) {
69 /* for a CFBinaryHeap, _bucketsNum == _capacity */
72 CF_INLINE CFIndex
__CFBinaryHeapNumBucketsUsed(CFBinaryHeapRef heap
) {
76 CF_INLINE
void __CFBinaryHeapSetNumBucketsUsed(CFBinaryHeapRef heap
, CFIndex v
) {
80 CF_INLINE CFIndex
__CFBinaryHeapNumBuckets(CFBinaryHeapRef heap
) {
81 return heap
->_capacity
;
84 CF_INLINE
void __CFBinaryHeapSetNumBuckets(CFBinaryHeapRef heap
, CFIndex v
) {
89 kCFBinaryHeapImmutable
= 0x0, /* unchangable and fixed capacity; default */
90 kCFBinaryHeapMutable
= 0x1, /* changeable and variable capacity */
91 kCFBinaryHeapFixedMutable
= 0x3 /* changeable and fixed capacity */
94 /* Bits 4-5 are used by GC */
96 CF_INLINE
bool isStrongMemory(CFTypeRef collection
) {
97 return ! __CFBitfieldGetValue(((const CFRuntimeBase
*)collection
)->_info
, 4, 4);
100 CF_INLINE
bool needsRestore(CFTypeRef collection
) {
101 return __CFBitfieldGetValue(((const CFRuntimeBase
*)collection
)->_info
, 5, 5);
105 CF_INLINE UInt32
__CFBinaryHeapMutableVariety(const void *cf
) {
106 return __CFBitfieldGetValue(((const CFRuntimeBase
*)cf
)->_info
, 3, 2);
109 CF_INLINE
void __CFBinaryHeapSetMutableVariety(void *cf
, UInt32 v
) {
110 __CFBitfieldSetValue(((CFRuntimeBase
*)cf
)->_info
, 3, 2, v
);
113 CF_INLINE UInt32
__CFBinaryHeapMutableVarietyFromFlags(UInt32 flags
) {
114 return __CFBitfieldGetValue(flags
, 1, 0);
117 static bool __CFBinaryHeapEqual(CFTypeRef cf1
, CFTypeRef cf2
) {
118 CFBinaryHeapRef heap1
= (CFBinaryHeapRef
)cf1
;
119 CFBinaryHeapRef heap2
= (CFBinaryHeapRef
)cf2
;
120 CFComparisonResult (*compare
)(const void *, const void *, void *);
123 const void **list1
, **list2
, *buffer
[256];
124 cnt
= __CFBinaryHeapCount(heap1
);
125 if (cnt
!= __CFBinaryHeapCount(heap2
)) return false;
126 compare
= heap1
->_callbacks
.compare
;
127 if (compare
!= heap2
->_callbacks
.compare
) return false;
128 if (0 == cnt
) return true; /* after function comparison */
129 list1
= (cnt
<= 128) ? buffer
: CFAllocatorAllocate(kCFAllocatorSystemDefault
, 2 * cnt
* sizeof(void *), 0); // GC OK
130 if (__CFOASafe
&& list1
!= buffer
) __CFSetLastAllocationEventName(list1
, "CFBinaryHeap (temp)");
131 list2
= (cnt
<= 128) ? buffer
+ 128 : list1
+ cnt
;
132 CFBinaryHeapGetValues(heap1
, list1
);
133 CFBinaryHeapGetValues(heap2
, list2
);
134 for (idx
= 0; idx
< cnt
; idx
++) {
135 const void *val1
= list1
[idx
];
136 const void *val2
= list2
[idx
];
137 // CF: which context info should be passed in? both?
138 // CF: if the context infos are not equal, should the heaps not be equal?
139 if (val1
!= val2
&& compare
&& !compare(val1
, val2
, heap1
->_context
.info
)) return false;
141 if (list1
!= buffer
) CFAllocatorDeallocate(CFGetAllocator(heap1
), list1
); // GC OK
145 static UInt32
__CFBinaryHeapHash(CFTypeRef cf
) {
146 CFBinaryHeapRef heap
= (CFBinaryHeapRef
)cf
;
147 return __CFBinaryHeapCount(heap
);
150 static CFStringRef
__CFBinaryHeapCopyDescription(CFTypeRef cf
) {
151 CFBinaryHeapRef heap
= (CFBinaryHeapRef
)cf
;
152 CFMutableStringRef result
;
155 const void **list
, *buffer
[256];
156 cnt
= __CFBinaryHeapCount(heap
);
157 result
= CFStringCreateMutable(CFGetAllocator(heap
), 0);
158 CFStringAppendFormat(result
, NULL
, CFSTR("<CFBinaryHeap %p [%p]>{count = %u, capacity = %u, objects = (\n"), cf
, CFGetAllocator(heap
), cnt
, __CFBinaryHeapCapacity(heap
));
159 list
= (cnt
<= 128) ? buffer
: CFAllocatorAllocate(kCFAllocatorSystemDefault
, cnt
* sizeof(void *), 0); // GC OK
160 if (__CFOASafe
&& list
!= buffer
) __CFSetLastAllocationEventName(list
, "CFBinaryHeap (temp)");
161 CFBinaryHeapGetValues(heap
, list
);
162 for (idx
= 0; idx
< cnt
; idx
++) {
163 CFStringRef desc
= NULL
;
164 const void *item
= list
[idx
];
165 if (NULL
!= heap
->_callbacks
.copyDescription
) {
166 desc
= heap
->_callbacks
.copyDescription(item
);
169 CFStringAppendFormat(result
, NULL
, CFSTR("\t%u : %s\n"), idx
, desc
);
172 CFStringAppendFormat(result
, NULL
, CFSTR("\t%u : <0x%x>\n"), idx
, (UInt32
)item
);
175 CFStringAppend(result
, CFSTR(")}"));
176 if (list
!= buffer
) CFAllocatorDeallocate(CFGetAllocator(heap
), list
); // GC OK
180 static void __CFBinaryHeapDeallocate(CFTypeRef cf
) {
181 CFBinaryHeapRef heap
= (CFBinaryHeapRef
)cf
;
182 CFAllocatorRef allocator
= CFGetAllocator(heap
);
183 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator
)) {
184 if (heap
->_callbacks
.retain
== NULL
&& heap
->_callbacks
.release
== NULL
)
185 return; // GC: keep heap intact during finalization.
187 // CF: should make the heap mutable here first, a la CFArrayDeallocate
188 CFBinaryHeapRemoveAllValues(heap
);
189 // CF: does not release the context info
190 if (__CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapMutable
) {
191 _CFAllocatorDeallocateGC(allocator
, heap
->_buckets
);
195 static CFTypeID __kCFBinaryHeapTypeID
= _kCFRuntimeNotATypeID
;
197 static const CFRuntimeClass __CFBinaryHeapClass
= {
198 _kCFRuntimeScannedObject
,
202 __CFBinaryHeapDeallocate
,
203 (void *)__CFBinaryHeapEqual
,
206 __CFBinaryHeapCopyDescription
209 __private_extern__
void __CFBinaryHeapInitialize(void) {
210 __kCFBinaryHeapTypeID
= _CFRuntimeRegisterClass(&__CFBinaryHeapClass
);
213 CFTypeID
CFBinaryHeapGetTypeID(void) {
214 return __kCFBinaryHeapTypeID
;
217 static CFBinaryHeapRef
__CFBinaryHeapInit(CFAllocatorRef allocator
, UInt32 flags
, CFIndex capacity
, const void **values
, CFIndex numValues
, const CFBinaryHeapCallBacks
*callBacks
, const CFBinaryHeapCompareContext
*compareContext
) {
218 // CF: does not copy the compareContext argument into the object
219 CFBinaryHeapRef memory
;
223 CFAssert2(0 <= capacity
, __kCFLogAssertion
, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__
, capacity
);
224 CFAssert3(kCFBinaryHeapFixedMutable
!= __CFBinaryHeapMutableVarietyFromFlags(flags
) || numValues
<= capacity
, __kCFLogAssertion
, "%s(): for fixed-mutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__
, capacity
, numValues
);
225 CFAssert2(0 <= numValues
, __kCFLogAssertion
, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__
, numValues
);
226 size
= sizeof(struct __CFBinaryHeap
) - sizeof(CFRuntimeBase
);
227 if (__CFBinaryHeapMutableVarietyFromFlags(flags
) != kCFBinaryHeapMutable
)
228 size
+= sizeof(struct __CFBinaryHeapBucket
) * __CFBinaryHeapNumBucketsForCapacity(capacity
);
229 CFBinaryHeapCallBacks nonRetainingCallbacks
;
230 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator
)) {
231 if (!callBacks
|| (callBacks
->retain
== NULL
&& callBacks
->release
== NULL
)) {
232 __CFBitfieldSetValue(flags
, 4, 4, 1); // setWeak
235 if (callBacks
->retain
== __CFTypeCollectionRetain
&& callBacks
->release
== __CFTypeCollectionRelease
) {
236 nonRetainingCallbacks
= *callBacks
;
237 nonRetainingCallbacks
.retain
= NULL
;
238 nonRetainingCallbacks
.release
= NULL
;
239 callBacks
= &nonRetainingCallbacks
;
240 __CFBitfieldSetValue(flags
, 5, 5, 1); // setNeedsRestore
245 memory
= (CFBinaryHeapRef
)_CFRuntimeCreateInstance(allocator
, __kCFBinaryHeapTypeID
, size
, NULL
);
246 if (NULL
== memory
) {
249 switch (__CFBinaryHeapMutableVarietyFromFlags(flags
)) {
250 case kCFBinaryHeapMutable
:
251 __CFBinaryHeapSetCapacity(memory
, __CFBinaryHeapRoundUpCapacity(1));
252 __CFBinaryHeapSetNumBuckets(memory
, __CFBinaryHeapNumBucketsForCapacity(__CFBinaryHeapRoundUpCapacity(1)));
253 void *buckets
= _CFAllocatorAllocateGC(allocator
, __CFBinaryHeapNumBuckets(memory
) * sizeof(struct __CFBinaryHeapBucket
), isStrongMemory(memory
) ? AUTO_MEMORY_SCANNED
: AUTO_MEMORY_UNSCANNED
);
254 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, memory
, memory
->_buckets
, buckets
);
255 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
->_buckets
, "CFBinaryHeap (store)");
256 if (NULL
== memory
->_buckets
) {
261 case kCFBinaryHeapFixedMutable
:
262 case kCFBinaryHeapImmutable
:
263 if (!isStrongMemory(memory
)) { // if weak, don't scan
264 auto_zone_set_layout_type(__CFCollectableZone
, memory
, AUTO_OBJECT_UNSCANNED
);
266 /* Don't round up capacity */
267 __CFBinaryHeapSetCapacity(memory
, capacity
);
268 __CFBinaryHeapSetNumBuckets(memory
, __CFBinaryHeapNumBucketsForCapacity(capacity
));
269 memory
->_buckets
= (struct __CFBinaryHeapBucket
*)((int8_t *)memory
+ sizeof(struct __CFBinaryHeap
));
272 __CFBinaryHeapSetNumBucketsUsed(memory
, 0);
273 __CFBinaryHeapSetCount(memory
, 0);
274 if (NULL
!= callBacks
) {
275 memory
->_callbacks
.retain
= callBacks
->retain
;
276 memory
->_callbacks
.release
= callBacks
->release
;
277 memory
->_callbacks
.copyDescription
= callBacks
->copyDescription
;
278 memory
->_callbacks
.compare
= callBacks
->compare
;
280 memory
->_callbacks
.retain
= 0;
281 memory
->_callbacks
.release
= 0;
282 memory
->_callbacks
.copyDescription
= 0;
283 memory
->_callbacks
.compare
= 0;
285 if (__CFBinaryHeapMutableVarietyFromFlags(flags
) != kCFBinaryHeapMutable
) {
286 __CFBinaryHeapSetMutableVariety(memory
, kCFBinaryHeapFixedMutable
);
288 __CFBinaryHeapSetMutableVariety(memory
, kCFBinaryHeapMutable
);
290 for (idx
= 0; idx
< numValues
; idx
++) {
291 CFBinaryHeapAddValue(memory
, values
[idx
]);
293 __CFBinaryHeapSetMutableVariety(memory
, __CFBinaryHeapMutableVarietyFromFlags(flags
));
297 CFBinaryHeapRef
CFBinaryHeapCreate(CFAllocatorRef allocator
, CFIndex capacity
, const CFBinaryHeapCallBacks
*callBacks
, const CFBinaryHeapCompareContext
*compareContext
) {
298 return __CFBinaryHeapInit(allocator
, (0 == capacity
) ? kCFBinaryHeapMutable
: kCFBinaryHeapFixedMutable
, capacity
, NULL
, 0, callBacks
, compareContext
);
301 CFBinaryHeapRef
CFBinaryHeapCreateCopy(CFAllocatorRef allocator
, CFIndex capacity
, CFBinaryHeapRef heap
) {
302 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
303 return __CFBinaryHeapInit(allocator
, (0 == capacity
) ? kCFBinaryHeapMutable
: kCFBinaryHeapFixedMutable
, capacity
, (const void **)heap
->_buckets
, __CFBinaryHeapCount(heap
), &(heap
->_callbacks
), &(heap
->_context
));
306 CFIndex
CFBinaryHeapGetCount(CFBinaryHeapRef heap
) {
307 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
308 return __CFBinaryHeapCount(heap
);
311 CFIndex
CFBinaryHeapGetCountOfValue(CFBinaryHeapRef heap
, const void *value
) {
312 CFComparisonResult (*compare
)(const void *, const void *, void *);
314 CFIndex cnt
= 0, length
;
315 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
316 compare
= heap
->_callbacks
.compare
;
317 length
= __CFBinaryHeapCount(heap
);
318 for (idx
= 0; idx
< length
; idx
++) {
319 const void *item
= heap
->_buckets
[idx
]._item
;
320 if (value
== item
|| (compare
&& kCFCompareEqualTo
== compare(value
, item
, heap
->_context
.info
))) {
327 Boolean
CFBinaryHeapContainsValue(CFBinaryHeapRef heap
, const void *value
) {
328 CFComparisonResult (*compare
)(const void *, const void *, void *);
331 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
332 compare
= heap
->_callbacks
.compare
;
333 length
= __CFBinaryHeapCount(heap
);
334 for (idx
= 0; idx
< length
; idx
++) {
335 const void *item
= heap
->_buckets
[idx
]._item
;
336 if (value
== item
|| (compare
&& kCFCompareEqualTo
== compare(value
, item
, heap
->_context
.info
))) {
343 const void *CFBinaryHeapGetMinimum(CFBinaryHeapRef heap
) {
344 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
345 CFAssert1(0 < __CFBinaryHeapCount(heap
), __kCFLogAssertion
, "%s(): binary heap is empty", __PRETTY_FUNCTION__
);
346 return (0 < __CFBinaryHeapCount(heap
)) ? heap
->_buckets
[0]._item
: NULL
;
349 Boolean
CFBinaryHeapGetMinimumIfPresent(CFBinaryHeapRef heap
, const void **value
) {
350 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
351 if (0 == __CFBinaryHeapCount(heap
)) return false;
352 if (NULL
!= value
) __CFObjCStrongAssign(heap
->_buckets
[0]._item
, value
);
356 void CFBinaryHeapGetValues(CFBinaryHeapRef heap
, const void **values
) {
357 CFBinaryHeapRef heapCopy
;
360 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
361 CFAssert1(NULL
!= values
, __kCFLogAssertion
, "%s(): pointer to values may not be NULL", __PRETTY_FUNCTION__
);
362 cnt
= __CFBinaryHeapCount(heap
);
363 if (0 == cnt
) return;
364 if (CF_USING_COLLECTABLE_MEMORY
) {
365 // GC: speculatively issue a write-barrier on the copied to buffers (3743553).
366 __CFObjCWriteBarrierRange(values
, cnt
* sizeof(void *));
368 heapCopy
= CFBinaryHeapCreateCopy(CFGetAllocator(heap
), cnt
, heap
);
370 while (0 < __CFBinaryHeapCount(heapCopy
)) {
371 const void *value
= CFBinaryHeapGetMinimum(heapCopy
);
372 CFBinaryHeapRemoveMinimumValue(heapCopy
);
373 values
[idx
++] = value
;
378 void CFBinaryHeapApplyFunction(CFBinaryHeapRef heap
, CFBinaryHeapApplierFunction applier
, void *context
) {
379 CFBinaryHeapRef heapCopy
;
381 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
382 CFAssert1(NULL
!= applier
, __kCFLogAssertion
, "%s(): pointer to applier function may not be NULL", __PRETTY_FUNCTION__
);
383 cnt
= __CFBinaryHeapCount(heap
);
384 if (0 == cnt
) return;
385 heapCopy
= CFBinaryHeapCreateCopy(CFGetAllocator(heap
), cnt
, heap
);
386 while (0 < __CFBinaryHeapCount(heapCopy
)) {
387 const void *value
= CFBinaryHeapGetMinimum(heapCopy
);
388 CFBinaryHeapRemoveMinimumValue(heapCopy
);
389 applier(value
, context
);
394 static void __CFBinaryHeapGrow(CFBinaryHeapRef heap
, CFIndex numNewValues
) {
395 CFIndex oldCount
= __CFBinaryHeapCount(heap
);
396 CFIndex capacity
= __CFBinaryHeapRoundUpCapacity(oldCount
+ numNewValues
);
397 CFAllocatorRef allocator
= CFGetAllocator(heap
);
398 __CFBinaryHeapSetCapacity(heap
, capacity
);
399 __CFBinaryHeapSetNumBuckets(heap
, __CFBinaryHeapNumBucketsForCapacity(capacity
));
400 void *buckets
= _CFAllocatorReallocateGC(allocator
, heap
->_buckets
, __CFBinaryHeapNumBuckets(heap
) * sizeof(struct __CFBinaryHeapBucket
), isStrongMemory(heap
) ? AUTO_MEMORY_SCANNED
: AUTO_MEMORY_UNSCANNED
);
401 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
, heap
->_buckets
, buckets
);
402 if (__CFOASafe
) __CFSetLastAllocationEventName(heap
->_buckets
, "CFBinaryHeap (store)");
403 if (NULL
== heap
->_buckets
) HALT
;
406 void CFBinaryHeapAddValue(CFBinaryHeapRef heap
, const void *value
) {
409 CFAllocatorRef allocator
= CFGetAllocator(heap
);
410 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
411 CFAssert1(__CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapMutable
|| __CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapFixedMutable
, __kCFLogAssertion
, "%s(): binary heap is immutable", __PRETTY_FUNCTION__
);
412 switch (__CFBinaryHeapMutableVariety(heap
)) {
413 case kCFBinaryHeapMutable
:
414 if (__CFBinaryHeapNumBucketsUsed(heap
) == __CFBinaryHeapCapacity(heap
))
415 __CFBinaryHeapGrow(heap
, 1);
417 case kCFBinaryHeapFixedMutable
:
418 CFAssert1(__CFBinaryHeapCount(heap
) < __CFBinaryHeapCapacity(heap
), __kCFLogAssertion
, "%s(): fixed-capacity binary heap is full", __PRETTY_FUNCTION__
);
421 cnt
= __CFBinaryHeapCount(heap
);
423 __CFBinaryHeapSetNumBucketsUsed(heap
, cnt
+ 1);
424 __CFBinaryHeapSetCount(heap
, cnt
+ 1);
425 pidx
= (idx
- 1) >> 1;
427 void *item
= heap
->_buckets
[pidx
]._item
;
428 if (kCFCompareGreaterThan
!= heap
->_callbacks
.compare(item
, value
, heap
->_context
.info
)) break;
429 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
->_buckets
, heap
->_buckets
[idx
]._item
, item
);
431 pidx
= (idx
- 1) >> 1;
433 if (heap
->_callbacks
.retain
) {
434 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
->_buckets
, heap
->_buckets
[idx
]._item
, (void *)heap
->_callbacks
.retain(allocator
, (void *)value
));
436 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
->_buckets
, heap
->_buckets
[idx
]._item
, (void *)value
);
440 void CFBinaryHeapRemoveMinimumValue(CFBinaryHeapRef heap
) {
444 CFAllocatorRef allocator
;
445 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
446 CFAssert1(__CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapMutable
|| __CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapFixedMutable
, __kCFLogAssertion
, "%s(): binary heap is immutable", __PRETTY_FUNCTION__
);
447 cnt
= __CFBinaryHeapCount(heap
);
448 if (0 == cnt
) return;
450 __CFBinaryHeapSetNumBucketsUsed(heap
, cnt
- 1);
451 __CFBinaryHeapSetCount(heap
, cnt
- 1);
452 allocator
= CFGetAllocator(heap
);
453 if (heap
->_callbacks
.release
)
454 heap
->_callbacks
.release(allocator
, heap
->_buckets
[idx
]._item
);
455 val
= heap
->_buckets
[cnt
- 1]._item
;
456 cidx
= (idx
<< 1) + 1;
457 while (cidx
< __CFBinaryHeapCount(heap
)) {
458 void *item
= heap
->_buckets
[cidx
]._item
;
459 if (cidx
+ 1 < __CFBinaryHeapCount(heap
)) {
460 void *item2
= heap
->_buckets
[cidx
+ 1]._item
;
461 if (kCFCompareGreaterThan
== heap
->_callbacks
.compare(item
, item2
, heap
->_context
.info
)) {
466 if (kCFCompareGreaterThan
== heap
->_callbacks
.compare(item
, val
, heap
->_context
.info
)) break;
467 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
->_buckets
, heap
->_buckets
[idx
]._item
, item
);
469 cidx
= (idx
<< 1) + 1;
471 CF_WRITE_BARRIER_BASE_ASSIGN(allocator
, heap
->_buckets
, heap
->_buckets
[idx
]._item
, val
);
474 void CFBinaryHeapRemoveAllValues(CFBinaryHeapRef heap
) {
477 __CFGenericValidateType(heap
, __kCFBinaryHeapTypeID
);
478 CFAssert1(__CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapMutable
|| __CFBinaryHeapMutableVariety(heap
) == kCFBinaryHeapFixedMutable
, __kCFLogAssertion
, "%s(): binary heap is immutable", __PRETTY_FUNCTION__
);
479 cnt
= __CFBinaryHeapCount(heap
);
480 if (heap
->_callbacks
.release
)
481 for (idx
= 0; idx
< cnt
; idx
++)
482 heap
->_callbacks
.release(CFGetAllocator(heap
), heap
->_buckets
[idx
]._item
);
483 __CFBinaryHeapSetNumBucketsUsed(heap
, 0);
484 __CFBinaryHeapSetCount(heap
, 0);