2 * Copyright (c) 2011 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
25 Copyright (c) 1998-2011, Apple Inc. All rights reserved.
26 Responsibility: Kevin Perry
29 #include <CoreFoundation/CFData.h>
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
33 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED
34 #include <mach/mach_init.h>
35 #elif DEPLOYMENT_TARGET_WINDOWS
36 #include <windows.h> // For GetSystemInfo()
40 #define CFDATA_MAX_SIZE ((1ULL << 42) - 1)
42 #define CFDATA_MAX_SIZE ((1ULL << 31) - 1)
45 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED
46 CF_INLINE
unsigned long __CFPageSize() { return vm_page_size
; }
47 #elif DEPLOYMENT_TARGET_WINDOWS
48 CF_INLINE
unsigned long __CFPageSize() {
50 GetSystemInfo(&sysInfo
);
51 return sysInfo
.dwPageSize
;
53 #elif DEPLOYMENT_TARGET_LINUX
55 CF_INLINE
unsigned long __CFPageSize() {
56 return (unsigned long)getpagesize();
60 #define INLINE_BYTES_THRESHOLD ((4 * __CFPageSize()) - sizeof(struct __CFData) - 15)
64 CFIndex _length
; /* number of bytes */
65 CFIndex _capacity
; /* maximum number of bytes */
66 CFAllocatorRef _bytesDeallocator
; /* used only for immutable; if NULL, no deallocation */
67 uint8_t *_bytes
; /* compaction: direct access to _bytes is only valid when data is not inline */
74 Bit 3 = use given CFAllocator
75 Bit 5 = allocate collectable memory
77 Bits 1-0 are used for mutability variation
79 Bit 6 = not all bytes have been zeroed yet (mutable)
85 __kCFMutableVarietyMask
= 0x03,
86 __kCFBytesInline
= 0x04,
87 __kCFUseAllocator
= 0x08,
88 __kCFAllocatesCollectable
= 0x20,
92 kCFImmutable
= 0x0, /* unchangable and fixed capacity; default */
93 kCFFixedMutable
= 0x1, /* changeable and fixed capacity */
94 kCFMutable
= 0x3 /* changeable and variable capacity */
97 CF_INLINE
void __CFDataSetInfoBits(CFDataRef data
, UInt32 v
) {__CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 5, 0, v
);}
98 CF_INLINE Boolean
__CFDataGetInfoBit(CFDataRef data
, UInt32 b
) {return ((((const CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
] & b
) != 0);}
99 CF_INLINE Boolean
__CFDataIsMutable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFMutable
);}
100 CF_INLINE Boolean
__CFDataIsGrowable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFGrowable
);}
101 CF_INLINE Boolean
__CFDataBytesInline(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFBytesInline
);}
102 CF_INLINE Boolean
__CFDataUseAllocator(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFUseAllocator
);}
103 CF_INLINE Boolean
__CFDataAllocatesCollectable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFAllocatesCollectable
);}
105 CF_INLINE UInt32
__CFMutableVariety(const void *cf
) {
106 return __CFBitfieldGetValue(((const CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0);
109 CF_INLINE
void __CFSetMutableVariety(void *cf
, UInt32 v
) {
110 __CFBitfieldSetValue(((CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0, v
);
113 CF_INLINE UInt32
__CFMutableVarietyFromFlags(UInt32 flags
) {
114 return (flags
& __kCFMutableVarietyMask
);
117 #define __CFGenericValidateMutabilityFlags(flags) \
118 CFAssert2(__CFMutableVarietyFromFlags(flags) != 0x2, __kCFLogAssertion, "%s(): flags 0x%x do not correctly specify the mutable variety", __PRETTY_FUNCTION__, flags);
120 CF_INLINE
void __CFDataSetInline(CFDataRef data
, Boolean flag
) {
121 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 2, 2, (flag
? 1 : 0));
124 CF_INLINE Boolean
__CFDataNeedsToZero(CFDataRef data
) {
125 return __CFBitfieldGetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6);
128 CF_INLINE
void __CFDataSetNeedsToZero(CFDataRef data
, Boolean zero
) {
129 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6, (zero
? 1 : 0));
132 CF_INLINE CFIndex
__CFDataLength(CFDataRef data
) {
133 return data
->_length
;
136 CF_INLINE
void __CFDataSetLength(CFMutableDataRef data
, CFIndex v
) {
137 /* for a CFData, _bytesUsed == _length */
140 CF_INLINE CFIndex
__CFDataCapacity(CFDataRef data
) {
141 return data
->_capacity
;
144 CF_INLINE
void __CFDataSetCapacity(CFMutableDataRef data
, CFIndex v
) {
145 /* for a CFData, _bytesNum == _capacity */
148 CF_INLINE CFIndex
__CFDataNumBytesUsed(CFDataRef data
) {
149 return data
->_length
;
152 CF_INLINE
void __CFDataSetNumBytesUsed(CFMutableDataRef data
, CFIndex v
) {
156 CF_INLINE CFIndex
__CFDataNumBytes(CFDataRef data
) {
157 return data
->_capacity
;
160 CF_INLINE
void __CFDataSetNumBytes(CFMutableDataRef data
, CFIndex v
) {
165 #define CHUNK_SIZE (1ULL << 29)
166 #define LOW_THRESHOLD (1ULL << 20)
167 #define HIGH_THRESHOLD (1ULL << 32)
169 #define CHUNK_SIZE (1ULL << 26)
170 #define LOW_THRESHOLD (1ULL << 20)
171 #define HIGH_THRESHOLD (1ULL << 29)
174 CF_INLINE CFIndex
__CFDataRoundUpCapacity(CFIndex capacity
) {
177 } else if (capacity
< LOW_THRESHOLD
) {
179 int idx
= flsl(capacity
);
180 return (1 << (idx
+ ((idx
% 2 == 0) ? 0 : 1)));
181 } else if (capacity
< HIGH_THRESHOLD
) {
183 return (1 << flsl(capacity
));
185 /* Round up to next multiple of CHUNK_SIZE */
186 unsigned long newCapacity
= CHUNK_SIZE
* (1+(capacity
>> (flsl(CHUNK_SIZE
)-1)));
187 return __CFMin(newCapacity
, CFDATA_MAX_SIZE
);
191 CF_INLINE CFIndex
__CFDataNumBytesForCapacity(CFIndex capacity
) {
195 static void __CFDataHandleOutOfMemory(CFTypeRef obj
, CFIndex numBytes
) {
197 if(0 < numBytes
&& numBytes
<= CFDATA_MAX_SIZE
) {
198 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed"), numBytes
);
200 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed. Maximum size: %ld"), numBytes
, CFDATA_MAX_SIZE
);
203 CFLog(kCFLogLevelCritical
, CFSTR("%@"), msg
);
210 CF_INLINE
void __CFDataValidateRange(CFDataRef data
, CFRange range
, const char *func
) {
211 CFAssert2(0 <= range
.location
&& range
.location
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): range.location index (%d) out of bounds", func
, range
.location
);
212 CFAssert2(0 <= range
.length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", func
, range
.length
);
213 CFAssert2(range
.location
+ range
.length
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): ending index (%d) out of bounds", func
, range
.location
+ range
.length
);
216 #define __CFDataValidateRange(a,r,f)
219 static Boolean
__CFDataEqual(CFTypeRef cf1
, CFTypeRef cf2
) {
220 CFDataRef data1
= (CFDataRef
)cf1
;
221 CFDataRef data2
= (CFDataRef
)cf2
;
223 length
= __CFDataLength(data1
);
224 if (length
!= __CFDataLength(data2
)) return false;
225 const uint8_t *bytePtr1
= CFDataGetBytePtr(data1
);
226 const uint8_t *bytePtr2
= CFDataGetBytePtr(data2
);
227 return 0 == memcmp(bytePtr1
, bytePtr2
, length
);
230 static CFHashCode
__CFDataHash(CFTypeRef cf
) {
231 CFDataRef data
= (CFDataRef
)cf
;
232 return CFHashBytes((uint8_t *)CFDataGetBytePtr(data
), __CFMin(__CFDataLength(data
), 80));
235 static CFStringRef
__CFDataCopyDescription(CFTypeRef cf
) {
236 CFDataRef data
= (CFDataRef
)cf
;
237 CFMutableStringRef result
;
240 const uint8_t *bytes
;
241 len
= __CFDataLength(data
);
242 bytes
= CFDataGetBytePtr(data
);
243 result
= CFStringCreateMutable(CFGetAllocator(data
), 0);
244 CFStringAppendFormat(result
, NULL
, CFSTR("<CFData %p [%p]>{length = %u, capacity = %u, bytes = 0x"), cf
, CFGetAllocator(data
), len
, __CFDataCapacity(data
));
246 for (idx
= 0; idx
< 16; idx
+= 4) {
247 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
249 CFStringAppend(result
, CFSTR(" ... "));
250 for (idx
= len
- 8; idx
< len
; idx
+= 4) {
251 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
254 for (idx
= 0; idx
< len
; idx
++) {
255 CFStringAppendFormat(result
, NULL
, CFSTR("%02x"), bytes
[idx
]);
258 CFStringAppend(result
, CFSTR("}"));
262 static void *__CFDataInlineBytesPtr(CFDataRef data
) {
263 return (void *)((uintptr_t)((int8_t *)data
+ sizeof(struct __CFData
) + 15) & ~0xF); // 16-byte align
266 static Boolean
__CFDataShouldAllocateCleared(CFDataRef data
, CFIndex size
) {
268 if (__CFDataUseAllocator(data
)) {
271 if (__CFDataAllocatesCollectable(data
)) {
275 result
= (size
> (64 * 1024));
278 result
= (size
> (128 * 1024));
285 // Check __CFDataShouldAllocateCleared before passing true.
286 static void *__CFDataAllocate(CFDataRef data
, CFIndex size
, Boolean clear
) {
288 if (__CFDataUseAllocator(data
)) {
289 CFAllocatorRef allocator
= __CFGetAllocator(data
);
290 bytes
= CFAllocatorAllocate(allocator
, size
, 0);
291 if (clear
) memset((uint8_t *)bytes
, 0, size
);
293 if (__CFDataAllocatesCollectable(data
)) {
294 bytes
= auto_zone_allocate_object(objc_collectableZone(), size
, AUTO_MEMORY_UNSCANNED
, 0, clear
);
297 bytes
= calloc(1, size
);
299 bytes
= malloc(size
);
306 static void __CFDataDeallocate(CFTypeRef cf
) {
307 CFMutableDataRef data
= (CFMutableDataRef
)cf
;
308 if (!__CFDataBytesInline(data
)) {
309 CFAllocatorRef deallocator
= data
->_bytesDeallocator
;
310 if (deallocator
!= NULL
) {
311 _CFAllocatorDeallocateGC(deallocator
, data
->_bytes
);
312 if (!_CFAllocatorIsGCRefZero(deallocator
)) CFRelease(deallocator
);
315 if (__CFDataUseAllocator(data
)) {
316 _CFAllocatorDeallocateGC(__CFGetAllocator(data
), data
->_bytes
);
317 } else if (!__CFDataAllocatesCollectable(data
) && data
->_bytes
) {
325 static CFTypeID __kCFDataTypeID
= _kCFRuntimeNotATypeID
;
327 static const CFRuntimeClass __CFDataClass
= {
328 _kCFRuntimeScannedObject
,
336 __CFDataCopyDescription
339 __private_extern__
void __CFDataInitialize(void) {
340 __kCFDataTypeID
= _CFRuntimeRegisterClass(&__CFDataClass
);
343 CFTypeID
CFDataGetTypeID(void) {
344 return __kCFDataTypeID
;
348 // NULL bytesDeallocator to this function does not mean the default allocator, it means
349 // that there should be no deallocator, and the bytes should be copied.
350 static CFMutableDataRef
__CFDataInit(CFAllocatorRef allocator
, CFOptionFlags flags
, CFIndex capacity
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
351 CFMutableDataRef memory
;
352 __CFGenericValidateMutabilityFlags(flags
);
353 CFAssert2(0 <= capacity
, __kCFLogAssertion
, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__
, capacity
);
354 CFAssert3(kCFFixedMutable
!= __CFMutableVarietyFromFlags(flags
) || length
<= capacity
, __kCFLogAssertion
, "%s(): for kCFFixedMutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__
, capacity
, length
);
355 CFAssert2(0 <= length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", __PRETTY_FUNCTION__
, length
);
357 Boolean collectableMemory
= CF_IS_COLLECTABLE_ALLOCATOR(allocator
);
358 Boolean noCopy
= bytesDeallocator
!= NULL
;
359 Boolean isMutable
= ((flags
& __kCFMutable
) != 0);
360 Boolean isGrowable
= ((flags
& __kCFGrowable
) != 0);
361 Boolean allocateInline
= !isGrowable
&& !noCopy
&& capacity
< INLINE_BYTES_THRESHOLD
;
362 allocator
= (allocator
== NULL
) ? __CFGetDefaultAllocator() : allocator
;
363 Boolean useAllocator
= (allocator
!= kCFAllocatorSystemDefault
&& allocator
!= kCFAllocatorMalloc
&& allocator
!= kCFAllocatorMallocZone
);
365 CFIndex size
= sizeof(struct __CFData
) - sizeof(CFRuntimeBase
);
366 if (allocateInline
) {
367 size
+= sizeof(uint8_t) * __CFDataNumBytesForCapacity(capacity
) + sizeof(uint8_t) * 15; // for 16-byte alignment fixup
369 memory
= (CFMutableDataRef
)_CFRuntimeCreateInstance(allocator
, __kCFDataTypeID
, size
, NULL
);
370 if (NULL
== memory
) {
373 __CFDataSetNumBytesUsed(memory
, 0);
374 __CFDataSetLength(memory
, 0);
375 __CFDataSetInfoBits(memory
,
376 (allocateInline
? __kCFBytesInline
: 0) |
377 (useAllocator
? __kCFUseAllocator
: 0) |
378 (collectableMemory
? __kCFAllocatesCollectable
: 0));
382 if (collectableMemory
) {
383 if (allocateInline
) {
384 // We have no pointer to anything that needs to be reclaimed, so don't scan or finalize.
388 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
)) {
389 // We're taking responsibility for externally GC-allocated memory, so scan us, but we don't need to finalize.
391 } else if (bytesDeallocator
== kCFAllocatorNull
) {
392 // We don't have responsibility for these bytes, so there's no need to be scanned and we don't need to finalize.
396 // We have a pointer to non-GC-allocated memory, so don't scan, but do finalize.
400 if (!scan
) auto_zone_set_unscanned(objc_collectableZone(), memory
);
401 if (!finalize
) auto_zone_set_nofinalize(objc_collectableZone(), memory
);
403 if (isMutable
&& isGrowable
) {
404 __CFDataSetCapacity(memory
, __CFDataRoundUpCapacity(1));
405 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(__CFDataRoundUpCapacity(1)));
406 __CFSetMutableVariety(memory
, kCFMutable
);
408 /* Don't round up capacity */
409 __CFDataSetCapacity(memory
, capacity
);
410 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(capacity
));
411 __CFSetMutableVariety(memory
, kCFFixedMutable
);
414 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, (uint8_t *)bytes
);
416 if (_CFAllocatorIsGCRefZero(bytesDeallocator
)) {
417 memory
->_bytesDeallocator
= bytesDeallocator
;
419 memory
->_bytesDeallocator
= (CFAllocatorRef
)CFRetain(_CFConvertAllocatorToNonGCRefZeroEquivalent(bytesDeallocator
));
422 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
) && !_CFAllocatorIsGCRefZero(bytesDeallocator
)) {
423 // When given a GC allocator which is not one of the GCRefZero ones as the deallocator, we assume that the no-copy memory is GC-allocated with a retain count of (at least) 1 and we should release it now instead of waiting until __CFDataDeallocate.
424 auto_zone_release(objc_collectableZone(), memory
->_bytes
);
426 __CFDataSetNumBytesUsed(memory
, length
);
427 __CFDataSetLength(memory
, length
);
428 // Mutable no-copy datas are not allowed, so don't bother setting needsToZero flag.
430 Boolean cleared
= (isMutable
&& !isGrowable
&& !_CFExecutableLinkedOnOrAfter(CFSystemVersionSnowLeopard
));
431 if (!allocateInline
) {
432 // assume that allocators give 16-byte aligned memory back -- it is their responsibility
433 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, __CFDataAllocate(memory
, __CFDataNumBytes(memory
) * sizeof(uint8_t), cleared
));
434 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
->_bytes
, "CFData (store)");
435 if (NULL
== memory
->_bytes
) {
440 if (length
== 0 && !isMutable
) {
441 // NSData sets its bytes pointer to NULL when its length is zero. Starting in 10.7 we do the same for CFData.
442 memory
->_bytes
= NULL
;
443 // It is important to set this data as not inlined, so we do not recalculate a bytes pointer from null.
444 __CFDataSetInline(memory
, false);
448 __CFDataSetNeedsToZero(memory
, !cleared
);
449 memory
->_bytesDeallocator
= NULL
;
450 CFDataReplaceBytes(memory
, CFRangeMake(0, 0), bytes
, length
);
452 __CFSetMutableVariety(memory
, __CFMutableVarietyFromFlags(flags
));
456 CFDataRef
CFDataCreate(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
) {
457 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, NULL
);
460 CFDataRef
CFDataCreateWithBytesNoCopy(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
461 CFAssert1((0 == length
|| bytes
!= NULL
), __kCFLogAssertion
, "%s(): bytes pointer cannot be NULL if length is non-zero", __PRETTY_FUNCTION__
);
462 if (NULL
== bytesDeallocator
) bytesDeallocator
= __CFGetDefaultAllocator();
463 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, bytesDeallocator
);
466 CFDataRef
CFDataCreateCopy(CFAllocatorRef allocator
, CFDataRef data
) {
467 CFIndex length
= CFDataGetLength(data
);
468 return __CFDataInit(allocator
, kCFImmutable
, length
, CFDataGetBytePtr(data
), length
, NULL
);
471 CFMutableDataRef
CFDataCreateMutable(CFAllocatorRef allocator
, CFIndex capacity
) {
472 // Do not allow magic allocator for now for mutable datas, because it
473 // isn't remembered for proper handling later when growth of the buffer
475 Boolean wasMagic
= _CFAllocatorIsGCRefZero(allocator
);
476 if (0 == capacity
) allocator
= _CFConvertAllocatorToNonGCRefZeroEquivalent(allocator
);
477 CFMutableDataRef r
= (CFMutableDataRef
)__CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, NULL
, 0, NULL
);
478 if (wasMagic
) CFMakeCollectable(r
);
482 CFMutableDataRef
CFDataCreateMutableCopy(CFAllocatorRef allocator
, CFIndex capacity
, CFDataRef data
) {
483 // Do not allow magic allocator for now for mutable datas, because it
484 // isn't remembered for proper handling later when growth of the buffer
486 Boolean wasMagic
= _CFAllocatorIsGCRefZero(allocator
);
487 if (0 == capacity
) allocator
= _CFConvertAllocatorToNonGCRefZeroEquivalent(allocator
);
488 CFMutableDataRef r
= (CFMutableDataRef
) __CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, CFDataGetBytePtr(data
), CFDataGetLength(data
), NULL
);
489 if (wasMagic
) CFMakeCollectable(r
);
493 CFIndex
CFDataGetLength(CFDataRef data
) {
494 CF_OBJC_FUNCDISPATCH0(__kCFDataTypeID
, CFIndex
, data
, "length");
495 __CFGenericValidateType(data
, __kCFDataTypeID
);
496 return __CFDataLength(data
);
499 const uint8_t *CFDataGetBytePtr(CFDataRef data
) {
500 CF_OBJC_FUNCDISPATCH0(__kCFDataTypeID
, const uint8_t *, data
, "bytes");
501 __CFGenericValidateType(data
, __kCFDataTypeID
);
502 // compaction: if inline, always do the computation.
503 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
506 uint8_t *CFDataGetMutableBytePtr(CFMutableDataRef data
) {
507 CF_OBJC_FUNCDISPATCH0(__kCFDataTypeID
, uint8_t *, data
, "mutableBytes");
508 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
509 // compaction: if inline, always do the computation.
510 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
513 void CFDataGetBytes(CFDataRef data
, CFRange range
, uint8_t *buffer
) {
514 CF_OBJC_FUNCDISPATCH2(__kCFDataTypeID
, void, data
, "getBytes:range:", buffer
, range
);
515 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
516 memmove(buffer
, CFDataGetBytePtr(data
) + range
.location
, range
.length
);
519 /* Allocates new block of data with at least numNewValues more bytes than the current length. If clear is true, the new bytes up to at least the new length with be zeroed. */
520 static void __CFDataGrow(CFMutableDataRef data
, CFIndex numNewValues
, Boolean clear
) {
521 CFIndex oldLength
= __CFDataLength(data
);
522 CFIndex newLength
= oldLength
+ numNewValues
;
523 if (newLength
> CFDATA_MAX_SIZE
|| newLength
< 0) __CFDataHandleOutOfMemory(data
, newLength
* sizeof(uint8_t));
524 CFIndex capacity
= __CFDataRoundUpCapacity(newLength
);
525 CFIndex numBytes
= __CFDataNumBytesForCapacity(capacity
);
526 CFAllocatorRef allocator
= CFGetAllocator(data
);
528 void *oldBytes
= data
->_bytes
;
529 Boolean allocateCleared
= clear
&& __CFDataShouldAllocateCleared(data
, numBytes
);
530 if (allocateCleared
&& !__CFDataUseAllocator(data
) && (oldLength
== 0 || (newLength
/ oldLength
) > 4)) {
531 // If the length that needs to be zeroed is significantly greater than the length of the data, then calloc/memmove is probably more efficient than realloc/memset.
532 bytes
= __CFDataAllocate(data
, numBytes
* sizeof(uint8_t), true);
534 memmove(bytes
, oldBytes
, oldLength
);
535 __CFDataDeallocate(data
);
539 // If the calloc/memmove approach either failed or was never attempted, then realloc.
540 allocateCleared
= false;
541 if (__CFDataUseAllocator(data
)) {
542 bytes
= CFAllocatorReallocate(allocator
, oldBytes
, numBytes
* sizeof(uint8_t), 0);
544 bytes
= realloc(oldBytes
, numBytes
* sizeof(uint8_t));
547 if (NULL
== bytes
) __CFDataHandleOutOfMemory(data
, numBytes
* sizeof(uint8_t));
548 __CFDataSetCapacity(data
, capacity
);
549 __CFDataSetNumBytes(data
, numBytes
);
550 if (clear
&& !allocateCleared
&& oldLength
< newLength
) memset((uint8_t *)bytes
+ oldLength
, 0, newLength
- oldLength
);
551 __CFDataSetNeedsToZero(data
, !allocateCleared
);
552 __CFAssignWithWriteBarrier((void **)&data
->_bytes
, bytes
);
553 if (__CFOASafe
) __CFSetLastAllocationEventName(data
->_bytes
, "CFData (store)");
556 void CFDataSetLength(CFMutableDataRef data
, CFIndex newLength
) {
557 CFIndex oldLength
, capacity
;
559 CF_OBJC_FUNCDISPATCH1(__kCFDataTypeID
, void, data
, "setLength:", newLength
);
560 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
561 oldLength
= __CFDataLength(data
);
562 capacity
= __CFDataCapacity(data
);
563 isGrowable
= __CFDataIsGrowable(data
);
564 if (__CFDataIsMutable(data
)) {
567 __CFDataHandleOutOfMemory(data
, newLength
);
571 } else if (capacity
< newLength
) {
573 __CFDataGrow(data
, newLength
- oldLength
, true);
575 CFAssert1(newLength
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
577 } else if (oldLength
< newLength
&& __CFDataNeedsToZero(data
)) {
578 memset(CFDataGetMutableBytePtr(data
) + oldLength
, 0, newLength
- oldLength
);
579 } else if (newLength
< oldLength
) {
580 __CFDataSetNeedsToZero(data
, true);
583 __CFDataSetLength(data
, newLength
);
584 __CFDataSetNumBytesUsed(data
, newLength
);
587 void CFDataIncreaseLength(CFMutableDataRef data
, CFIndex extraLength
) {
588 CF_OBJC_FUNCDISPATCH1(__kCFDataTypeID
, void, data
, "increaseLengthBy:", extraLength
);
589 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
590 if (extraLength
< 0) HALT
; // Avoid integer overflow.
591 CFDataSetLength(data
, __CFDataLength(data
) + extraLength
);
594 void CFDataAppendBytes(CFMutableDataRef data
, const uint8_t *bytes
, CFIndex length
) {
595 CF_OBJC_FUNCDISPATCH2(__kCFDataTypeID
, void, data
, "appendBytes:length:", bytes
, length
);
596 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
597 CFDataReplaceBytes(data
, CFRangeMake(__CFDataLength(data
), 0), bytes
, length
);
600 void CFDataDeleteBytes(CFMutableDataRef data
, CFRange range
) {
601 CF_OBJC_FUNCDISPATCH3(__kCFDataTypeID
, void, data
, "replaceBytesInRange:withBytes:length:", range
, NULL
, 0);
602 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
603 CFDataReplaceBytes(data
, range
, NULL
, 0);
606 void CFDataReplaceBytes(CFMutableDataRef data
, CFRange range
, const uint8_t *newBytes
, CFIndex newLength
) {
607 CF_OBJC_FUNCDISPATCH3(__kCFDataTypeID
, void, data
, "replaceBytesInRange:withBytes:length:", range
, newBytes
, newLength
);
608 __CFGenericValidateType(data
, __kCFDataTypeID
);
609 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
610 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
611 CFAssert2(0 <= newLength
, __kCFLogAssertion
, "%s(): newLength (%d) cannot be less than zero", __PRETTY_FUNCTION__
, newLength
);
613 CFIndex len
= __CFDataLength(data
);
614 if (len
< 0 || range
.length
< 0 || newLength
< 0) HALT
;
615 CFIndex newCount
= len
- range
.length
+ newLength
;
616 if (newCount
< 0) HALT
;
618 uint8_t *bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
619 uint8_t *srcBuf
= (uint8_t *)newBytes
;
620 switch (__CFMutableVariety(data
)) {
622 if (__CFDataNumBytes(data
) < newCount
) {
623 if (bytePtr
&& newBytes
&& newBytes
< bytePtr
+ __CFDataCapacity(data
) && bytePtr
< newBytes
+ newLength
) {
624 srcBuf
= (uint8_t *)malloc(newLength
* sizeof(uint8_t));
625 memmove(srcBuf
, newBytes
, newLength
* sizeof(uint8_t));
627 __CFDataGrow(data
, newLength
- range
.length
, false);
628 bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
631 case kCFFixedMutable
:
632 CFAssert1(newCount
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
633 // Continuing after this could cause buffer overruns.
634 if (newCount
> __CFDataCapacity(data
)) HALT
;
637 if (newLength
!= range
.length
&& range
.location
+ range
.length
< len
) {
638 memmove(bytePtr
+ range
.location
+ newLength
, bytePtr
+ range
.location
+ range
.length
, (len
- range
.location
- range
.length
) * sizeof(uint8_t));
641 memmove(bytePtr
+ range
.location
, srcBuf
, newLength
* sizeof(uint8_t));
643 if (srcBuf
!= newBytes
) free(srcBuf
);
644 __CFDataSetNumBytesUsed(data
, newCount
);
645 __CFDataSetLength(data
, newCount
);
648 #define REVERSE_BUFFER(type, buf, len) { \
650 for(int i = 0; i < (len)/2; i++) { \
652 (buf)[i] = (buf)[(len) - i - 1]; \
653 (buf)[(len) - i - 1] = tmp; \
657 static void _computeGoodSubstringShift(const uint8_t *needle
, int needleLength
, unsigned long shift
[], unsigned long suff
[]) {
660 // Compute suffix lengths
662 suff
[needleLength
- 1] = needleLength
;
663 f
= g
= needleLength
- 1;
664 for (i
= needleLength
- 2; i
>= 0; --i
) {
665 if (i
> g
&& suff
[i
+ needleLength
- 1 - f
] < i
- g
)
666 suff
[i
] = suff
[i
+ needleLength
- 1 - f
];
671 while (g
>= 0 && needle
[g
] == needle
[g
+ needleLength
- 1 - f
])
677 // Compute shift table
679 for (i
= 0; i
< needleLength
; ++i
)
680 shift
[i
] = needleLength
;
682 for (i
= needleLength
- 1; i
>= 0; --i
)
683 if (suff
[i
] == i
+ 1)
684 for (; j
< needleLength
- 1 - i
; ++j
)
685 if (shift
[j
] == needleLength
)
686 shift
[j
] = needleLength
- 1 - i
;
687 // Set the amount of shift necessary to move each of the suffix matches found into a position where it overlaps with the suffix. If there are duplicate matches the latest one is the one that should take effect.
688 for (i
= 0; i
<= needleLength
- 2; ++i
)
689 shift
[needleLength
- 1 - suff
[i
]] = needleLength
- 1 - i
;
690 // Since the Boyer-Moore algorithm moves the pointer back while scanning substrings, add the distance to the end of the potential substring.
691 for (i
= 0; i
< needleLength
- 1; ++i
) {
692 shift
[i
] += (needleLength
- 1 - i
);
696 static const uint8_t * __CFDataSearchBoyerMoore(const CFDataRef data
, const uint8_t *haystack
, unsigned long haystackLength
, const uint8_t *needle
, unsigned long needleLength
, Boolean backwards
) {
697 unsigned long badCharacterShift
[UCHAR_MAX
+ 1] = {0};
698 unsigned long *goodSubstringShift
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
699 unsigned long *suffixLengths
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
700 if (!goodSubstringShift
|| !suffixLengths
) {
701 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(unsigned long));
705 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
706 badCharacterShift
[i
] = needleLength
;
708 for (int i
= needleLength
- 1; i
>= 0; i
--)
709 badCharacterShift
[needle
[i
]] = i
;
711 // To get the correct shift table for backwards search reverse the needle, compute the forwards shift table, and then reverse the result.
712 uint8_t *needleCopy
= (uint8_t *)malloc(needleLength
* sizeof(uint8_t));
714 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(uint8_t));
716 memmove(needleCopy
, needle
, needleLength
);
717 REVERSE_BUFFER(uint8_t, needleCopy
, needleLength
);
718 _computeGoodSubstringShift(needleCopy
, needleLength
, goodSubstringShift
, suffixLengths
);
719 REVERSE_BUFFER(unsigned long, goodSubstringShift
, needleLength
);
722 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
723 badCharacterShift
[i
] = needleLength
;
725 for (int i
= 0; i
< needleLength
; i
++)
726 badCharacterShift
[needle
[i
]] = needleLength
- i
- 1;
728 _computeGoodSubstringShift(needle
, needleLength
, goodSubstringShift
, suffixLengths
);
731 const uint8_t *scan_needle
;
732 const uint8_t *scan_haystack
;
733 const uint8_t *result
= NULL
;
735 const uint8_t *const end_needle
= needle
+ needleLength
;
736 scan_needle
= needle
;
737 scan_haystack
= haystack
+ haystackLength
- needleLength
;
738 while (scan_haystack
>= haystack
&& scan_needle
< end_needle
) {
739 if (*scan_haystack
== *scan_needle
) {
743 scan_haystack
-= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
744 scan_needle
= needle
;
747 if (scan_needle
== end_needle
) {
748 result
= (scan_haystack
- needleLength
);
751 const uint8_t *const end_haystack
= haystack
+ haystackLength
;
752 scan_needle
= needle
+ needleLength
- 1;
753 scan_haystack
= haystack
+ needleLength
- 1;
754 while (scan_haystack
< end_haystack
&& scan_needle
>= needle
) {
755 if (*scan_haystack
== *scan_needle
) {
759 scan_haystack
+= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
760 scan_needle
= needle
+ needleLength
- 1;
763 if (scan_needle
< needle
) {
764 result
= (scan_haystack
+ 1);
768 free(goodSubstringShift
);
774 CFRange
_CFDataFindBytes(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
775 const uint8_t *fullHaystack
= CFDataGetBytePtr(data
);
776 const uint8_t *needle
= CFDataGetBytePtr(dataToFind
);
777 unsigned long fullHaystackLength
= CFDataGetLength(data
);
778 unsigned long needleLength
= CFDataGetLength(dataToFind
);
780 if(compareOptions
& kCFDataSearchAnchored
) {
781 if(searchRange
.length
> needleLength
) {
782 if(compareOptions
& kCFDataSearchBackwards
) {
783 searchRange
.location
+= (searchRange
.length
- needleLength
);
785 searchRange
.length
= needleLength
;
788 if(searchRange
.length
> fullHaystackLength
- searchRange
.location
) {
789 searchRange
.length
= fullHaystackLength
- searchRange
.location
;
792 if(searchRange
.length
< needleLength
|| fullHaystackLength
== 0 || needleLength
== 0) {
793 return CFRangeMake(kCFNotFound
, 0);
796 const uint8_t *haystack
= fullHaystack
+ searchRange
.location
;
797 const uint8_t *searchResult
= __CFDataSearchBoyerMoore(data
, haystack
, searchRange
.length
, needle
, needleLength
, (compareOptions
& kCFDataSearchBackwards
) != 0);
798 CFIndex resultLocation
= (searchResult
== NULL
) ? kCFNotFound
: searchRange
.location
+ (searchResult
- haystack
);
800 return CFRangeMake(resultLocation
, resultLocation
== kCFNotFound
? 0: needleLength
);
803 CFRange
CFDataFind(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
805 __CFGenericValidateType(data
, __kCFDataTypeID
);
806 __CFGenericValidateType(dataToFind
, __kCFDataTypeID
);
807 __CFDataValidateRange(data
, searchRange
, __PRETTY_FUNCTION__
);
809 return _CFDataFindBytes(data
, dataToFind
, searchRange
, compareOptions
);
812 #undef __CFDataValidateRange
813 #undef __CFGenericValidateMutabilityFlags
814 #undef INLINE_BYTES_THRESHOLD
815 #undef CFDATA_MAX_SIZE
816 #undef REVERSE_BUFFER