]> git.saurik.com Git - apple/cf.git/blob - Collections.subproj/CFBinaryHeap.c
CF-368.25.tar.gz
[apple/cf.git] / Collections.subproj / CFBinaryHeap.c
1 /*
2 * Copyright (c) 2005 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /* CFBinaryHeap.c
24 Copyright 1998-2002, Apple, Inc. All rights reserved.
25 Responsibility: Christopher Kane
26 */
27
28 #include <CoreFoundation/CFBinaryHeap.h>
29 #include "CFUtilitiesPriv.h"
30 #include "CFInternal.h"
31
32 const CFBinaryHeapCallBacks kCFStringBinaryHeapCallBacks = {0, __CFTypeCollectionRetain, __CFTypeCollectionRelease, CFCopyDescription, (CFComparisonResult (*)(const void *, const void *, void *))CFStringCompare};
33
34 struct __CFBinaryHeapBucket {
35 void *_item;
36 };
37
38 CF_INLINE CFIndex __CFBinaryHeapRoundUpCapacity(CFIndex capacity) {
39 if (capacity < 4) return 4;
40 return (1 << (CFLog2(capacity) + 1));
41 }
42
43 CF_INLINE CFIndex __CFBinaryHeapNumBucketsForCapacity(CFIndex capacity) {
44 return capacity;
45 }
46
47 struct __CFBinaryHeap {
48 CFRuntimeBase _base;
49 CFIndex _count; /* number of objects */
50 CFIndex _capacity; /* maximum number of objects */
51 CFBinaryHeapCallBacks _callbacks;
52 CFBinaryHeapCompareContext _context;
53 struct __CFBinaryHeapBucket *_buckets;
54 };
55
56 CF_INLINE CFIndex __CFBinaryHeapCount(CFBinaryHeapRef heap) {
57 return heap->_count;
58 }
59
60 CF_INLINE void __CFBinaryHeapSetCount(CFBinaryHeapRef heap, CFIndex v) {
61 /* for a CFBinaryHeap, _bucketsUsed == _count */
62 }
63
64 CF_INLINE CFIndex __CFBinaryHeapCapacity(CFBinaryHeapRef heap) {
65 return heap->_capacity;
66 }
67
68 CF_INLINE void __CFBinaryHeapSetCapacity(CFBinaryHeapRef heap, CFIndex v) {
69 /* for a CFBinaryHeap, _bucketsNum == _capacity */
70 }
71
72 CF_INLINE CFIndex __CFBinaryHeapNumBucketsUsed(CFBinaryHeapRef heap) {
73 return heap->_count;
74 }
75
76 CF_INLINE void __CFBinaryHeapSetNumBucketsUsed(CFBinaryHeapRef heap, CFIndex v) {
77 heap->_count = v;
78 }
79
80 CF_INLINE CFIndex __CFBinaryHeapNumBuckets(CFBinaryHeapRef heap) {
81 return heap->_capacity;
82 }
83
84 CF_INLINE void __CFBinaryHeapSetNumBuckets(CFBinaryHeapRef heap, CFIndex v) {
85 heap->_capacity = v;
86 }
87
88 enum { /* bits 1-0 */
89 kCFBinaryHeapImmutable = 0x0, /* unchangable and fixed capacity; default */
90 kCFBinaryHeapMutable = 0x1, /* changeable and variable capacity */
91 kCFBinaryHeapFixedMutable = 0x3 /* changeable and fixed capacity */
92 };
93
94 /* Bits 4-5 are used by GC */
95
96 CF_INLINE bool isStrongMemory(CFTypeRef collection) {
97 return ! __CFBitfieldGetValue(((const CFRuntimeBase *)collection)->_info, 4, 4);
98 }
99
100 CF_INLINE bool needsRestore(CFTypeRef collection) {
101 return __CFBitfieldGetValue(((const CFRuntimeBase *)collection)->_info, 5, 5);
102 }
103
104
105 CF_INLINE UInt32 __CFBinaryHeapMutableVariety(const void *cf) {
106 return __CFBitfieldGetValue(((const CFRuntimeBase *)cf)->_info, 3, 2);
107 }
108
109 CF_INLINE void __CFBinaryHeapSetMutableVariety(void *cf, UInt32 v) {
110 __CFBitfieldSetValue(((CFRuntimeBase *)cf)->_info, 3, 2, v);
111 }
112
113 CF_INLINE UInt32 __CFBinaryHeapMutableVarietyFromFlags(UInt32 flags) {
114 return __CFBitfieldGetValue(flags, 1, 0);
115 }
116
117 static bool __CFBinaryHeapEqual(CFTypeRef cf1, CFTypeRef cf2) {
118 CFBinaryHeapRef heap1 = (CFBinaryHeapRef)cf1;
119 CFBinaryHeapRef heap2 = (CFBinaryHeapRef)cf2;
120 CFComparisonResult (*compare)(const void *, const void *, void *);
121 CFIndex idx;
122 CFIndex cnt;
123 const void **list1, **list2, *buffer[256];
124 cnt = __CFBinaryHeapCount(heap1);
125 if (cnt != __CFBinaryHeapCount(heap2)) return false;
126 compare = heap1->_callbacks.compare;
127 if (compare != heap2->_callbacks.compare) return false;
128 if (0 == cnt) return true; /* after function comparison */
129 list1 = (cnt <= 128) ? buffer : CFAllocatorAllocate(kCFAllocatorSystemDefault, 2 * cnt * sizeof(void *), 0); // GC OK
130 if (__CFOASafe && list1 != buffer) __CFSetLastAllocationEventName(list1, "CFBinaryHeap (temp)");
131 list2 = (cnt <= 128) ? buffer + 128 : list1 + cnt;
132 CFBinaryHeapGetValues(heap1, list1);
133 CFBinaryHeapGetValues(heap2, list2);
134 for (idx = 0; idx < cnt; idx++) {
135 const void *val1 = list1[idx];
136 const void *val2 = list2[idx];
137 // CF: which context info should be passed in? both?
138 // CF: if the context infos are not equal, should the heaps not be equal?
139 if (val1 != val2 && compare && !compare(val1, val2, heap1->_context.info)) return false;
140 }
141 if (list1 != buffer) CFAllocatorDeallocate(CFGetAllocator(heap1), list1); // GC OK
142 return true;
143 }
144
145 static UInt32 __CFBinaryHeapHash(CFTypeRef cf) {
146 CFBinaryHeapRef heap = (CFBinaryHeapRef)cf;
147 return __CFBinaryHeapCount(heap);
148 }
149
150 static CFStringRef __CFBinaryHeapCopyDescription(CFTypeRef cf) {
151 CFBinaryHeapRef heap = (CFBinaryHeapRef)cf;
152 CFMutableStringRef result;
153 CFIndex idx;
154 CFIndex cnt;
155 const void **list, *buffer[256];
156 cnt = __CFBinaryHeapCount(heap);
157 result = CFStringCreateMutable(CFGetAllocator(heap), 0);
158 CFStringAppendFormat(result, NULL, CFSTR("<CFBinaryHeap %p [%p]>{count = %u, capacity = %u, objects = (\n"), cf, CFGetAllocator(heap), cnt, __CFBinaryHeapCapacity(heap));
159 list = (cnt <= 128) ? buffer : CFAllocatorAllocate(kCFAllocatorSystemDefault, cnt * sizeof(void *), 0); // GC OK
160 if (__CFOASafe && list != buffer) __CFSetLastAllocationEventName(list, "CFBinaryHeap (temp)");
161 CFBinaryHeapGetValues(heap, list);
162 for (idx = 0; idx < cnt; idx++) {
163 CFStringRef desc = NULL;
164 const void *item = list[idx];
165 if (NULL != heap->_callbacks.copyDescription) {
166 desc = heap->_callbacks.copyDescription(item);
167 }
168 if (NULL != desc) {
169 CFStringAppendFormat(result, NULL, CFSTR("\t%u : %s\n"), idx, desc);
170 CFRelease(desc);
171 } else {
172 CFStringAppendFormat(result, NULL, CFSTR("\t%u : <0x%x>\n"), idx, (UInt32)item);
173 }
174 }
175 CFStringAppend(result, CFSTR(")}"));
176 if (list != buffer) CFAllocatorDeallocate(CFGetAllocator(heap), list); // GC OK
177 return result;
178 }
179
180 static void __CFBinaryHeapDeallocate(CFTypeRef cf) {
181 CFBinaryHeapRef heap = (CFBinaryHeapRef)cf;
182 CFAllocatorRef allocator = CFGetAllocator(heap);
183 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) {
184 if (heap->_callbacks.retain == NULL && heap->_callbacks.release == NULL)
185 return; // GC: keep heap intact during finalization.
186 }
187 // CF: should make the heap mutable here first, a la CFArrayDeallocate
188 CFBinaryHeapRemoveAllValues(heap);
189 // CF: does not release the context info
190 if (__CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapMutable) {
191 _CFAllocatorDeallocateGC(allocator, heap->_buckets);
192 }
193 }
194
195 static CFTypeID __kCFBinaryHeapTypeID = _kCFRuntimeNotATypeID;
196
197 static const CFRuntimeClass __CFBinaryHeapClass = {
198 _kCFRuntimeScannedObject,
199 "CFBinaryHeap",
200 NULL, // init
201 NULL, // copy
202 __CFBinaryHeapDeallocate,
203 (void *)__CFBinaryHeapEqual,
204 __CFBinaryHeapHash,
205 NULL, //
206 __CFBinaryHeapCopyDescription
207 };
208
209 __private_extern__ void __CFBinaryHeapInitialize(void) {
210 __kCFBinaryHeapTypeID = _CFRuntimeRegisterClass(&__CFBinaryHeapClass);
211 }
212
213 CFTypeID CFBinaryHeapGetTypeID(void) {
214 return __kCFBinaryHeapTypeID;
215 }
216
217 static CFBinaryHeapRef __CFBinaryHeapInit(CFAllocatorRef allocator, UInt32 flags, CFIndex capacity, const void **values, CFIndex numValues, const CFBinaryHeapCallBacks *callBacks, const CFBinaryHeapCompareContext *compareContext) {
218 // CF: does not copy the compareContext argument into the object
219 CFBinaryHeapRef memory;
220 CFIndex idx;
221 CFIndex size;
222
223 CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity);
224 CFAssert3(kCFBinaryHeapFixedMutable != __CFBinaryHeapMutableVarietyFromFlags(flags) || numValues <= capacity, __kCFLogAssertion, "%s(): for fixed-mutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__, capacity, numValues);
225 CFAssert2(0 <= numValues, __kCFLogAssertion, "%s(): numValues (%d) cannot be less than zero", __PRETTY_FUNCTION__, numValues);
226 size = sizeof(struct __CFBinaryHeap) - sizeof(CFRuntimeBase);
227 if (__CFBinaryHeapMutableVarietyFromFlags(flags) != kCFBinaryHeapMutable)
228 size += sizeof(struct __CFBinaryHeapBucket) * __CFBinaryHeapNumBucketsForCapacity(capacity);
229 CFBinaryHeapCallBacks nonRetainingCallbacks;
230 if (CF_IS_COLLECTABLE_ALLOCATOR(allocator)) {
231 if (!callBacks || (callBacks->retain == NULL && callBacks->release == NULL)) {
232 __CFBitfieldSetValue(flags, 4, 4, 1); // setWeak
233 }
234 else {
235 if (callBacks->retain == __CFTypeCollectionRetain && callBacks->release == __CFTypeCollectionRelease) {
236 nonRetainingCallbacks = *callBacks;
237 nonRetainingCallbacks.retain = NULL;
238 nonRetainingCallbacks.release = NULL;
239 callBacks = &nonRetainingCallbacks;
240 __CFBitfieldSetValue(flags, 5, 5, 1); // setNeedsRestore
241 }
242 }
243 }
244
245 memory = (CFBinaryHeapRef)_CFRuntimeCreateInstance(allocator, __kCFBinaryHeapTypeID, size, NULL);
246 if (NULL == memory) {
247 return NULL;
248 }
249 switch (__CFBinaryHeapMutableVarietyFromFlags(flags)) {
250 case kCFBinaryHeapMutable:
251 __CFBinaryHeapSetCapacity(memory, __CFBinaryHeapRoundUpCapacity(1));
252 __CFBinaryHeapSetNumBuckets(memory, __CFBinaryHeapNumBucketsForCapacity(__CFBinaryHeapRoundUpCapacity(1)));
253 void *buckets = _CFAllocatorAllocateGC(allocator, __CFBinaryHeapNumBuckets(memory) * sizeof(struct __CFBinaryHeapBucket), isStrongMemory(memory) ? AUTO_MEMORY_SCANNED : AUTO_MEMORY_UNSCANNED);
254 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, memory, memory->_buckets, buckets);
255 if (__CFOASafe) __CFSetLastAllocationEventName(memory->_buckets, "CFBinaryHeap (store)");
256 if (NULL == memory->_buckets) {
257 CFRelease(memory);
258 return NULL;
259 }
260 break;
261 case kCFBinaryHeapFixedMutable:
262 case kCFBinaryHeapImmutable:
263 if (!isStrongMemory(memory)) { // if weak, don't scan
264 auto_zone_set_layout_type(__CFCollectableZone, memory, AUTO_OBJECT_UNSCANNED);
265 }
266 /* Don't round up capacity */
267 __CFBinaryHeapSetCapacity(memory, capacity);
268 __CFBinaryHeapSetNumBuckets(memory, __CFBinaryHeapNumBucketsForCapacity(capacity));
269 memory->_buckets = (struct __CFBinaryHeapBucket *)((int8_t *)memory + sizeof(struct __CFBinaryHeap));
270 break;
271 }
272 __CFBinaryHeapSetNumBucketsUsed(memory, 0);
273 __CFBinaryHeapSetCount(memory, 0);
274 if (NULL != callBacks) {
275 memory->_callbacks.retain = callBacks->retain;
276 memory->_callbacks.release = callBacks->release;
277 memory->_callbacks.copyDescription = callBacks->copyDescription;
278 memory->_callbacks.compare = callBacks->compare;
279 } else {
280 memory->_callbacks.retain = 0;
281 memory->_callbacks.release = 0;
282 memory->_callbacks.copyDescription = 0;
283 memory->_callbacks.compare = 0;
284 }
285 if (__CFBinaryHeapMutableVarietyFromFlags(flags) != kCFBinaryHeapMutable) {
286 __CFBinaryHeapSetMutableVariety(memory, kCFBinaryHeapFixedMutable);
287 } else {
288 __CFBinaryHeapSetMutableVariety(memory, kCFBinaryHeapMutable);
289 }
290 for (idx = 0; idx < numValues; idx++) {
291 CFBinaryHeapAddValue(memory, values[idx]);
292 }
293 __CFBinaryHeapSetMutableVariety(memory, __CFBinaryHeapMutableVarietyFromFlags(flags));
294 return memory;
295 }
296
297 CFBinaryHeapRef CFBinaryHeapCreate(CFAllocatorRef allocator, CFIndex capacity, const CFBinaryHeapCallBacks *callBacks, const CFBinaryHeapCompareContext *compareContext) {
298 return __CFBinaryHeapInit(allocator, (0 == capacity) ? kCFBinaryHeapMutable : kCFBinaryHeapFixedMutable, capacity, NULL, 0, callBacks, compareContext);
299 }
300
301 CFBinaryHeapRef CFBinaryHeapCreateCopy(CFAllocatorRef allocator, CFIndex capacity, CFBinaryHeapRef heap) {
302 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
303 return __CFBinaryHeapInit(allocator, (0 == capacity) ? kCFBinaryHeapMutable : kCFBinaryHeapFixedMutable, capacity, (const void **)heap->_buckets, __CFBinaryHeapCount(heap), &(heap->_callbacks), &(heap->_context));
304 }
305
306 CFIndex CFBinaryHeapGetCount(CFBinaryHeapRef heap) {
307 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
308 return __CFBinaryHeapCount(heap);
309 }
310
311 CFIndex CFBinaryHeapGetCountOfValue(CFBinaryHeapRef heap, const void *value) {
312 CFComparisonResult (*compare)(const void *, const void *, void *);
313 CFIndex idx;
314 CFIndex cnt = 0, length;
315 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
316 compare = heap->_callbacks.compare;
317 length = __CFBinaryHeapCount(heap);
318 for (idx = 0; idx < length; idx++) {
319 const void *item = heap->_buckets[idx]._item;
320 if (value == item || (compare && kCFCompareEqualTo == compare(value, item, heap->_context.info))) {
321 cnt++;
322 }
323 }
324 return cnt;
325 }
326
327 Boolean CFBinaryHeapContainsValue(CFBinaryHeapRef heap, const void *value) {
328 CFComparisonResult (*compare)(const void *, const void *, void *);
329 CFIndex idx;
330 CFIndex length;
331 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
332 compare = heap->_callbacks.compare;
333 length = __CFBinaryHeapCount(heap);
334 for (idx = 0; idx < length; idx++) {
335 const void *item = heap->_buckets[idx]._item;
336 if (value == item || (compare && kCFCompareEqualTo == compare(value, item, heap->_context.info))) {
337 return true;
338 }
339 }
340 return false;
341 }
342
343 const void *CFBinaryHeapGetMinimum(CFBinaryHeapRef heap) {
344 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
345 CFAssert1(0 < __CFBinaryHeapCount(heap), __kCFLogAssertion, "%s(): binary heap is empty", __PRETTY_FUNCTION__);
346 return (0 < __CFBinaryHeapCount(heap)) ? heap->_buckets[0]._item : NULL;
347 }
348
349 Boolean CFBinaryHeapGetMinimumIfPresent(CFBinaryHeapRef heap, const void **value) {
350 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
351 if (0 == __CFBinaryHeapCount(heap)) return false;
352 if (NULL != value) __CFObjCStrongAssign(heap->_buckets[0]._item, value);
353 return true;
354 }
355
356 void CFBinaryHeapGetValues(CFBinaryHeapRef heap, const void **values) {
357 CFBinaryHeapRef heapCopy;
358 CFIndex idx;
359 CFIndex cnt;
360 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
361 CFAssert1(NULL != values, __kCFLogAssertion, "%s(): pointer to values may not be NULL", __PRETTY_FUNCTION__);
362 cnt = __CFBinaryHeapCount(heap);
363 if (0 == cnt) return;
364 if (CF_USING_COLLECTABLE_MEMORY) {
365 // GC: speculatively issue a write-barrier on the copied to buffers (3743553).
366 __CFObjCWriteBarrierRange(values, cnt * sizeof(void *));
367 }
368 heapCopy = CFBinaryHeapCreateCopy(CFGetAllocator(heap), cnt, heap);
369 idx = 0;
370 while (0 < __CFBinaryHeapCount(heapCopy)) {
371 const void *value = CFBinaryHeapGetMinimum(heapCopy);
372 CFBinaryHeapRemoveMinimumValue(heapCopy);
373 values[idx++] = value;
374 }
375 CFRelease(heapCopy);
376 }
377
378 void CFBinaryHeapApplyFunction(CFBinaryHeapRef heap, CFBinaryHeapApplierFunction applier, void *context) {
379 CFBinaryHeapRef heapCopy;
380 CFIndex cnt;
381 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
382 CFAssert1(NULL != applier, __kCFLogAssertion, "%s(): pointer to applier function may not be NULL", __PRETTY_FUNCTION__);
383 cnt = __CFBinaryHeapCount(heap);
384 if (0 == cnt) return;
385 heapCopy = CFBinaryHeapCreateCopy(CFGetAllocator(heap), cnt, heap);
386 while (0 < __CFBinaryHeapCount(heapCopy)) {
387 const void *value = CFBinaryHeapGetMinimum(heapCopy);
388 CFBinaryHeapRemoveMinimumValue(heapCopy);
389 applier(value, context);
390 }
391 CFRelease(heapCopy);
392 }
393
394 static void __CFBinaryHeapGrow(CFBinaryHeapRef heap, CFIndex numNewValues) {
395 CFIndex oldCount = __CFBinaryHeapCount(heap);
396 CFIndex capacity = __CFBinaryHeapRoundUpCapacity(oldCount + numNewValues);
397 CFAllocatorRef allocator = CFGetAllocator(heap);
398 __CFBinaryHeapSetCapacity(heap, capacity);
399 __CFBinaryHeapSetNumBuckets(heap, __CFBinaryHeapNumBucketsForCapacity(capacity));
400 void *buckets = _CFAllocatorReallocateGC(allocator, heap->_buckets, __CFBinaryHeapNumBuckets(heap) * sizeof(struct __CFBinaryHeapBucket), isStrongMemory(heap) ? AUTO_MEMORY_SCANNED : AUTO_MEMORY_UNSCANNED);
401 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap, heap->_buckets, buckets);
402 if (__CFOASafe) __CFSetLastAllocationEventName(heap->_buckets, "CFBinaryHeap (store)");
403 if (NULL == heap->_buckets) HALT;
404 }
405
406 void CFBinaryHeapAddValue(CFBinaryHeapRef heap, const void *value) {
407 CFIndex idx, pidx;
408 CFIndex cnt;
409 CFAllocatorRef allocator = CFGetAllocator(heap);
410 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
411 CFAssert1(__CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapMutable || __CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapFixedMutable, __kCFLogAssertion, "%s(): binary heap is immutable", __PRETTY_FUNCTION__);
412 switch (__CFBinaryHeapMutableVariety(heap)) {
413 case kCFBinaryHeapMutable:
414 if (__CFBinaryHeapNumBucketsUsed(heap) == __CFBinaryHeapCapacity(heap))
415 __CFBinaryHeapGrow(heap, 1);
416 break;
417 case kCFBinaryHeapFixedMutable:
418 CFAssert1(__CFBinaryHeapCount(heap) < __CFBinaryHeapCapacity(heap), __kCFLogAssertion, "%s(): fixed-capacity binary heap is full", __PRETTY_FUNCTION__);
419 break;
420 }
421 cnt = __CFBinaryHeapCount(heap);
422 idx = cnt;
423 __CFBinaryHeapSetNumBucketsUsed(heap, cnt + 1);
424 __CFBinaryHeapSetCount(heap, cnt + 1);
425 pidx = (idx - 1) >> 1;
426 while (0 < idx) {
427 void *item = heap->_buckets[pidx]._item;
428 if (kCFCompareGreaterThan != heap->_callbacks.compare(item, value, heap->_context.info)) break;
429 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap->_buckets, heap->_buckets[idx]._item, item);
430 idx = pidx;
431 pidx = (idx - 1) >> 1;
432 }
433 if (heap->_callbacks.retain) {
434 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap->_buckets, heap->_buckets[idx]._item, (void *)heap->_callbacks.retain(allocator, (void *)value));
435 } else {
436 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap->_buckets, heap->_buckets[idx]._item, (void *)value);
437 }
438 }
439
440 void CFBinaryHeapRemoveMinimumValue(CFBinaryHeapRef heap) {
441 void *val;
442 CFIndex idx, cidx;
443 CFIndex cnt;
444 CFAllocatorRef allocator;
445 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
446 CFAssert1(__CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapMutable || __CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapFixedMutable, __kCFLogAssertion, "%s(): binary heap is immutable", __PRETTY_FUNCTION__);
447 cnt = __CFBinaryHeapCount(heap);
448 if (0 == cnt) return;
449 idx = 0;
450 __CFBinaryHeapSetNumBucketsUsed(heap, cnt - 1);
451 __CFBinaryHeapSetCount(heap, cnt - 1);
452 allocator = CFGetAllocator(heap);
453 if (heap->_callbacks.release)
454 heap->_callbacks.release(allocator, heap->_buckets[idx]._item);
455 val = heap->_buckets[cnt - 1]._item;
456 cidx = (idx << 1) + 1;
457 while (cidx < __CFBinaryHeapCount(heap)) {
458 void *item = heap->_buckets[cidx]._item;
459 if (cidx + 1 < __CFBinaryHeapCount(heap)) {
460 void *item2 = heap->_buckets[cidx + 1]._item;
461 if (kCFCompareGreaterThan == heap->_callbacks.compare(item, item2, heap->_context.info)) {
462 cidx++;
463 item = item2;
464 }
465 }
466 if (kCFCompareGreaterThan == heap->_callbacks.compare(item, val, heap->_context.info)) break;
467 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap->_buckets, heap->_buckets[idx]._item, item);
468 idx = cidx;
469 cidx = (idx << 1) + 1;
470 }
471 CF_WRITE_BARRIER_BASE_ASSIGN(allocator, heap->_buckets, heap->_buckets[idx]._item, val);
472 }
473
474 void CFBinaryHeapRemoveAllValues(CFBinaryHeapRef heap) {
475 CFIndex idx;
476 CFIndex cnt;
477 __CFGenericValidateType(heap, __kCFBinaryHeapTypeID);
478 CFAssert1(__CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapMutable || __CFBinaryHeapMutableVariety(heap) == kCFBinaryHeapFixedMutable, __kCFLogAssertion, "%s(): binary heap is immutable", __PRETTY_FUNCTION__);
479 cnt = __CFBinaryHeapCount(heap);
480 if (heap->_callbacks.release)
481 for (idx = 0; idx < cnt; idx++)
482 heap->_callbacks.release(CFGetAllocator(heap), heap->_buckets[idx]._item);
483 __CFBinaryHeapSetNumBucketsUsed(heap, 0);
484 __CFBinaryHeapSetCount(heap, 0);
485 }
486