2 * Copyright (c) 2014 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
25 Copyright (c) 1998-2013, Apple Inc. All rights reserved.
26 Responsibility: Kevin Perry
29 #include <CoreFoundation/CFData.h>
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
37 #define CFDATA_MAX_SIZE ((1ULL << 42) - 1)
39 #define CFDATA_MAX_SIZE ((1ULL << 31) - 1)
42 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI
44 CF_INLINE
unsigned long __CFPageSize() { return vm_page_size
; }
45 #elif DEPLOYMENT_TARGET_WINDOWS
46 CF_INLINE
unsigned long __CFPageSize() {
48 GetSystemInfo(&sysInfo
);
49 return sysInfo
.dwPageSize
;
51 #elif DEPLOYMENT_TARGET_LINUX
53 CF_INLINE
unsigned long __CFPageSize() {
54 return (unsigned long)getpagesize();
58 #define INLINE_BYTES_THRESHOLD ((4 * __CFPageSize()) - sizeof(struct __CFData) - 15)
62 CFIndex _length
; /* number of bytes */
63 CFIndex _capacity
; /* maximum number of bytes */
64 CFAllocatorRef _bytesDeallocator
; /* used only for immutable; if NULL, no deallocation */
65 uint8_t *_bytes
; /* compaction: direct access to _bytes is only valid when data is not inline */
72 Bit 3 = use given CFAllocator
73 Bit 5 = allocate collectable memory
75 Bits 1-0 are used for mutability variation
77 Bit 6 = not all bytes have been zeroed yet (mutable)
83 __kCFMutableVarietyMask
= 0x03,
84 __kCFBytesInline
= 0x04,
85 __kCFUseAllocator
= 0x08,
86 __kCFAllocatesCollectable
= 0x20,
90 kCFImmutable
= 0x0, /* unchangable and fixed capacity; default */
91 kCFFixedMutable
= 0x1, /* changeable and fixed capacity */
92 kCFMutable
= 0x3 /* changeable and variable capacity */
95 CF_INLINE
void __CFDataSetInfoBits(CFDataRef data
, UInt32 v
) {__CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 5, 0, v
);}
96 CF_INLINE Boolean
__CFDataGetInfoBit(CFDataRef data
, UInt32 b
) {return ((((const CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
] & b
) != 0);}
97 CF_INLINE Boolean
__CFDataIsMutable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFMutable
);}
98 CF_INLINE Boolean
__CFDataIsGrowable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFGrowable
);}
99 CF_INLINE Boolean
__CFDataBytesInline(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFBytesInline
);}
100 CF_INLINE Boolean
__CFDataUseAllocator(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFUseAllocator
);}
101 CF_INLINE Boolean
__CFDataAllocatesCollectable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFAllocatesCollectable
);}
103 CF_INLINE UInt32
__CFMutableVariety(const void *cf
) {
104 return __CFBitfieldGetValue(((const CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0);
107 CF_INLINE
void __CFSetMutableVariety(void *cf
, UInt32 v
) {
108 __CFBitfieldSetValue(((CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0, v
);
111 CF_INLINE UInt32
__CFMutableVarietyFromFlags(UInt32 flags
) {
112 return (flags
& __kCFMutableVarietyMask
);
115 #define __CFGenericValidateMutabilityFlags(flags) \
116 CFAssert2(__CFMutableVarietyFromFlags(flags) != 0x2, __kCFLogAssertion, "%s(): flags 0x%x do not correctly specify the mutable variety", __PRETTY_FUNCTION__, flags);
118 CF_INLINE
void __CFDataSetInline(CFDataRef data
, Boolean flag
) {
119 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 2, 2, (flag
? 1 : 0));
122 CF_INLINE Boolean
__CFDataNeedsToZero(CFDataRef data
) {
123 return __CFBitfieldGetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6);
126 CF_INLINE
void __CFDataSetNeedsToZero(CFDataRef data
, Boolean zero
) {
127 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6, (zero
? 1 : 0));
130 CF_INLINE CFIndex
__CFDataLength(CFDataRef data
) {
131 return data
->_length
;
134 CF_INLINE
void __CFDataSetLength(CFMutableDataRef data
, CFIndex v
) {
135 /* for a CFData, _bytesUsed == _length */
138 CF_INLINE CFIndex
__CFDataCapacity(CFDataRef data
) {
139 return data
->_capacity
;
142 CF_INLINE
void __CFDataSetCapacity(CFMutableDataRef data
, CFIndex v
) {
143 /* for a CFData, _bytesNum == _capacity */
146 CF_INLINE CFIndex
__CFDataNumBytesUsed(CFDataRef data
) {
147 return data
->_length
;
150 CF_INLINE
void __CFDataSetNumBytesUsed(CFMutableDataRef data
, CFIndex v
) {
154 CF_INLINE CFIndex
__CFDataNumBytes(CFDataRef data
) {
155 return data
->_capacity
;
158 CF_INLINE
void __CFDataSetNumBytes(CFMutableDataRef data
, CFIndex v
) {
163 #define CHUNK_SIZE (1ULL << 29)
164 #define LOW_THRESHOLD (1ULL << 20)
165 #define HIGH_THRESHOLD (1ULL << 32)
167 #define CHUNK_SIZE (1ULL << 26)
168 #define LOW_THRESHOLD (1ULL << 20)
169 #define HIGH_THRESHOLD (1ULL << 29)
172 CF_INLINE CFIndex
__CFDataRoundUpCapacity(CFIndex capacity
) {
175 } else if (capacity
< LOW_THRESHOLD
) {
177 long idx
= flsl(capacity
);
178 return (1L << (long)(idx
+ ((idx
% 2 == 0) ? 0 : 1)));
179 } else if (capacity
< HIGH_THRESHOLD
) {
181 return (1L << (long)flsl(capacity
));
183 /* Round up to next multiple of CHUNK_SIZE */
184 unsigned long newCapacity
= CHUNK_SIZE
* (1+(capacity
>> ((long)flsl(CHUNK_SIZE
)-1)));
185 return __CFMin(newCapacity
, CFDATA_MAX_SIZE
);
189 CF_INLINE CFIndex
__CFDataNumBytesForCapacity(CFIndex capacity
) {
193 static void __CFDataHandleOutOfMemory(CFTypeRef obj
, CFIndex numBytes
) {
195 if(0 < numBytes
&& numBytes
<= CFDATA_MAX_SIZE
) {
196 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed"), numBytes
);
198 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed. Maximum size: %lld"), numBytes
, CFDATA_MAX_SIZE
);
201 CFLog(kCFLogLevelCritical
, CFSTR("%@"), msg
);
208 CF_INLINE
void __CFDataValidateRange(CFDataRef data
, CFRange range
, const char *func
) {
209 CFAssert2(0 <= range
.location
&& range
.location
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): range.location index (%d) out of bounds", func
, range
.location
);
210 CFAssert2(0 <= range
.length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", func
, range
.length
);
211 CFAssert2(range
.location
+ range
.length
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): ending index (%d) out of bounds", func
, range
.location
+ range
.length
);
214 #define __CFDataValidateRange(a,r,f)
217 static Boolean
__CFDataEqual(CFTypeRef cf1
, CFTypeRef cf2
) {
218 CFDataRef data1
= (CFDataRef
)cf1
;
219 CFDataRef data2
= (CFDataRef
)cf2
;
221 length
= __CFDataLength(data1
);
222 if (length
!= __CFDataLength(data2
)) return false;
223 const uint8_t *bytePtr1
= CFDataGetBytePtr(data1
);
224 const uint8_t *bytePtr2
= CFDataGetBytePtr(data2
);
225 return 0 == memcmp(bytePtr1
, bytePtr2
, length
);
228 static CFHashCode
__CFDataHash(CFTypeRef cf
) {
229 CFDataRef data
= (CFDataRef
)cf
;
230 return CFHashBytes((uint8_t *)CFDataGetBytePtr(data
), __CFMin(__CFDataLength(data
), 80));
233 static CFStringRef
__CFDataCopyDescription(CFTypeRef cf
) {
234 CFDataRef data
= (CFDataRef
)cf
;
235 CFMutableStringRef result
;
238 const uint8_t *bytes
;
239 len
= __CFDataLength(data
);
240 bytes
= CFDataGetBytePtr(data
);
241 result
= CFStringCreateMutable(CFGetAllocator(data
), 0);
242 CFStringAppendFormat(result
, NULL
, CFSTR("<CFData %p [%p]>{length = %lu, capacity = %lu, bytes = 0x"), cf
, CFGetAllocator(data
), (unsigned long)len
, (unsigned long)__CFDataCapacity(data
));
244 for (idx
= 0; idx
< 16; idx
+= 4) {
245 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
247 CFStringAppend(result
, CFSTR(" ... "));
248 for (idx
= len
- 8; idx
< len
; idx
+= 4) {
249 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
252 for (idx
= 0; idx
< len
; idx
++) {
253 CFStringAppendFormat(result
, NULL
, CFSTR("%02x"), bytes
[idx
]);
256 CFStringAppend(result
, CFSTR("}"));
260 static void *__CFDataInlineBytesPtr(CFDataRef data
) {
261 return (void *)((uintptr_t)((int8_t *)data
+ sizeof(struct __CFData
) + 15) & ~0xF); // 16-byte align
264 static Boolean
__CFDataShouldAllocateCleared(CFDataRef data
, CFIndex size
) {
266 if (__CFDataUseAllocator(data
)) {
269 if (__CFDataAllocatesCollectable(data
)) {
273 result
= (size
> (64 * 1024));
276 result
= (size
> (128 * 1024));
283 // Check __CFDataShouldAllocateCleared before passing true.
284 static void *__CFDataAllocate(CFDataRef data
, CFIndex size
, Boolean clear
) {
286 if (__CFDataUseAllocator(data
)) {
287 CFAllocatorRef allocator
= __CFGetAllocator(data
);
288 bytes
= CFAllocatorAllocate(allocator
, size
, 0);
289 if (clear
) memset((uint8_t *)bytes
, 0, size
);
291 if (__CFDataAllocatesCollectable(data
)) {
292 bytes
= auto_zone_allocate_object(objc_collectableZone(), size
, AUTO_MEMORY_UNSCANNED
, 0, clear
);
295 bytes
= calloc(1, size
);
297 bytes
= malloc(size
);
304 static void __CFDataDeallocate(CFTypeRef cf
) {
305 CFMutableDataRef data
= (CFMutableDataRef
)cf
;
306 if (!__CFDataBytesInline(data
)) {
307 CFAllocatorRef deallocator
= data
->_bytesDeallocator
;
308 if (deallocator
!= NULL
) {
309 _CFAllocatorDeallocateGC(deallocator
, data
->_bytes
);
310 CFRelease(deallocator
);
313 if (__CFDataUseAllocator(data
)) {
314 _CFAllocatorDeallocateGC(__CFGetAllocator(data
), data
->_bytes
);
315 } else if (!__CFDataAllocatesCollectable(data
) && data
->_bytes
) {
323 static CFTypeID __kCFDataTypeID
= _kCFRuntimeNotATypeID
;
325 static const CFRuntimeClass __CFDataClass
= {
326 _kCFRuntimeScannedObject
,
334 __CFDataCopyDescription
337 CF_PRIVATE
void __CFDataInitialize(void) {
338 __kCFDataTypeID
= _CFRuntimeRegisterClass(&__CFDataClass
);
341 CFTypeID
CFDataGetTypeID(void) {
342 return __kCFDataTypeID
;
346 // NULL bytesDeallocator to this function does not mean the default allocator, it means
347 // that there should be no deallocator, and the bytes should be copied.
348 static CFMutableDataRef
__CFDataInit(CFAllocatorRef allocator
, CFOptionFlags flags
, CFIndex capacity
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
349 CFMutableDataRef memory
;
350 __CFGenericValidateMutabilityFlags(flags
);
351 CFAssert2(0 <= capacity
, __kCFLogAssertion
, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__
, capacity
);
352 CFAssert3(kCFFixedMutable
!= __CFMutableVarietyFromFlags(flags
) || length
<= capacity
, __kCFLogAssertion
, "%s(): for kCFFixedMutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__
, capacity
, length
);
353 CFAssert2(0 <= length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", __PRETTY_FUNCTION__
, length
);
355 Boolean collectableMemory
= CF_IS_COLLECTABLE_ALLOCATOR(allocator
);
356 Boolean noCopy
= bytesDeallocator
!= NULL
;
357 Boolean isMutable
= ((flags
& __kCFMutable
) != 0);
358 Boolean isGrowable
= ((flags
& __kCFGrowable
) != 0);
359 Boolean allocateInline
= !isGrowable
&& !noCopy
&& capacity
< INLINE_BYTES_THRESHOLD
;
360 allocator
= (allocator
== NULL
) ? __CFGetDefaultAllocator() : allocator
;
361 Boolean useAllocator
= (allocator
!= kCFAllocatorSystemDefault
&& allocator
!= kCFAllocatorMalloc
&& allocator
!= kCFAllocatorMallocZone
);
363 CFIndex size
= sizeof(struct __CFData
) - sizeof(CFRuntimeBase
);
364 if (allocateInline
) {
365 size
+= sizeof(uint8_t) * __CFDataNumBytesForCapacity(capacity
) + sizeof(uint8_t) * 15; // for 16-byte alignment fixup
367 memory
= (CFMutableDataRef
)_CFRuntimeCreateInstance(allocator
, __kCFDataTypeID
, size
, NULL
);
368 if (NULL
== memory
) {
371 __CFDataSetNumBytesUsed(memory
, 0);
372 __CFDataSetLength(memory
, 0);
373 __CFDataSetInfoBits(memory
,
374 (allocateInline
? __kCFBytesInline
: 0) |
375 (useAllocator
? __kCFUseAllocator
: 0) |
376 (collectableMemory
? __kCFAllocatesCollectable
: 0));
380 if (collectableMemory
) {
381 if (allocateInline
) {
382 // We have no pointer to anything that needs to be reclaimed, so don't scan or finalize.
386 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
)) {
387 // We're taking responsibility for externally GC-allocated memory, so scan us, but we don't need to finalize.
389 } else if (bytesDeallocator
== kCFAllocatorNull
) {
390 // We don't have responsibility for these bytes, so there's no need to be scanned and we don't need to finalize.
394 // We have a pointer to non-GC-allocated memory, so don't scan, but do finalize.
398 if (!scan
) auto_zone_set_unscanned(objc_collectableZone(), memory
);
399 if (!finalize
) auto_zone_set_nofinalize(objc_collectableZone(), memory
);
401 if (isMutable
&& isGrowable
) {
402 __CFDataSetCapacity(memory
, __CFDataRoundUpCapacity(1));
403 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(__CFDataRoundUpCapacity(1)));
404 __CFSetMutableVariety(memory
, kCFMutable
);
406 /* Don't round up capacity */
407 __CFDataSetCapacity(memory
, capacity
);
408 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(capacity
));
409 __CFSetMutableVariety(memory
, kCFFixedMutable
);
412 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, (uint8_t *)bytes
);
415 memory
->_bytesDeallocator
= bytesDeallocator
;
417 memory
->_bytesDeallocator
= (CFAllocatorRef
)CFRetain(bytesDeallocator
);
420 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
) && !(0)) {
421 // we assume that the no-copy memory is GC-allocated with a retain count of (at least) 1 and we should release it now instead of waiting until __CFDataDeallocate.
422 auto_zone_release(objc_collectableZone(), memory
->_bytes
);
424 __CFDataSetNumBytesUsed(memory
, length
);
425 __CFDataSetLength(memory
, length
);
426 // Mutable no-copy datas are not allowed, so don't bother setting needsToZero flag.
428 Boolean cleared
= (isMutable
&& !isGrowable
&& !_CFExecutableLinkedOnOrAfter(CFSystemVersionSnowLeopard
));
429 if (!allocateInline
) {
430 // assume that allocators give 16-byte aligned memory back -- it is their responsibility
431 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, __CFDataAllocate(memory
, __CFDataNumBytes(memory
) * sizeof(uint8_t), cleared
));
432 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
->_bytes
, "CFData (store)");
433 if (NULL
== memory
->_bytes
) {
438 if (length
== 0 && !isMutable
) {
439 // NSData sets its bytes pointer to NULL when its length is zero. Starting in 10.7 we do the same for CFData.
440 memory
->_bytes
= NULL
;
441 // It is important to set this data as not inlined, so we do not recalculate a bytes pointer from null.
442 __CFDataSetInline(memory
, false);
446 __CFDataSetNeedsToZero(memory
, !cleared
);
447 memory
->_bytesDeallocator
= NULL
;
448 CFDataReplaceBytes(memory
, CFRangeMake(0, 0), bytes
, length
);
450 __CFSetMutableVariety(memory
, __CFMutableVarietyFromFlags(flags
));
454 CFDataRef
CFDataCreate(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
) {
455 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, NULL
);
458 CFDataRef
CFDataCreateWithBytesNoCopy(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
459 CFAssert1((0 == length
|| bytes
!= NULL
), __kCFLogAssertion
, "%s(): bytes pointer cannot be NULL if length is non-zero", __PRETTY_FUNCTION__
);
460 if (NULL
== bytesDeallocator
) bytesDeallocator
= __CFGetDefaultAllocator();
461 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, bytesDeallocator
);
464 CFDataRef
CFDataCreateCopy(CFAllocatorRef allocator
, CFDataRef data
) {
465 CFIndex length
= CFDataGetLength(data
);
466 return __CFDataInit(allocator
, kCFImmutable
, length
, CFDataGetBytePtr(data
), length
, NULL
);
469 CFMutableDataRef
CFDataCreateMutable(CFAllocatorRef allocator
, CFIndex capacity
) {
470 // Do not allow magic allocator for now for mutable datas, because it
471 // isn't remembered for proper handling later when growth of the buffer
473 Boolean wasMagic
= (0);
474 CFMutableDataRef r
= (CFMutableDataRef
)__CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, NULL
, 0, NULL
);
475 if (wasMagic
) CFMakeCollectable(r
);
479 CFMutableDataRef
CFDataCreateMutableCopy(CFAllocatorRef allocator
, CFIndex capacity
, CFDataRef data
) {
480 // Do not allow magic allocator for now for mutable datas, because it
481 // isn't remembered for proper handling later when growth of the buffer
483 Boolean wasMagic
= (0);
484 CFMutableDataRef r
= (CFMutableDataRef
) __CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, CFDataGetBytePtr(data
), CFDataGetLength(data
), NULL
);
485 if (wasMagic
) CFMakeCollectable(r
);
489 CFIndex
CFDataGetLength(CFDataRef data
) {
490 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, CFIndex
, (NSData
*)data
, length
);
491 __CFGenericValidateType(data
, __kCFDataTypeID
);
492 return __CFDataLength(data
);
495 const uint8_t *CFDataGetBytePtr(CFDataRef data
) {
496 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, const uint8_t *, (NSData
*)data
, bytes
);
497 __CFGenericValidateType(data
, __kCFDataTypeID
);
498 // compaction: if inline, always do the computation.
499 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
502 uint8_t *CFDataGetMutableBytePtr(CFMutableDataRef data
) {
503 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, uint8_t *, (NSMutableData
*)data
, mutableBytes
);
504 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
505 // compaction: if inline, always do the computation.
506 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
509 void CFDataGetBytes(CFDataRef data
, CFRange range
, uint8_t *buffer
) {
510 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSData
*)data
, getBytes
:(void *)buffer range
:NSMakeRange(range
.location
, range
.length
));
511 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
512 memmove(buffer
, CFDataGetBytePtr(data
) + range
.location
, range
.length
);
515 /* Allocates new block of data with at least numNewValues more bytes than the current length. If clear is true, the new bytes up to at least the new length with be zeroed. */
516 static void __CFDataGrow(CFMutableDataRef data
, CFIndex numNewValues
, Boolean clear
) {
517 CFIndex oldLength
= __CFDataLength(data
);
518 CFIndex newLength
= oldLength
+ numNewValues
;
519 if (newLength
> CFDATA_MAX_SIZE
|| newLength
< 0) __CFDataHandleOutOfMemory(data
, newLength
* sizeof(uint8_t));
520 CFIndex capacity
= __CFDataRoundUpCapacity(newLength
);
521 CFIndex numBytes
= __CFDataNumBytesForCapacity(capacity
);
522 CFAllocatorRef allocator
= CFGetAllocator(data
);
524 void *oldBytes
= data
->_bytes
;
525 Boolean allocateCleared
= clear
&& __CFDataShouldAllocateCleared(data
, numBytes
);
526 if (allocateCleared
&& !__CFDataUseAllocator(data
) && (oldLength
== 0 || (newLength
/ oldLength
) > 4)) {
527 // If the length that needs to be zeroed is significantly greater than the length of the data, then calloc/memmove is probably more efficient than realloc/memset.
528 bytes
= __CFDataAllocate(data
, numBytes
* sizeof(uint8_t), true);
530 memmove(bytes
, oldBytes
, oldLength
);
531 __CFDataDeallocate(data
);
535 // If the calloc/memmove approach either failed or was never attempted, then realloc.
536 allocateCleared
= false;
537 if (__CFDataUseAllocator(data
)) {
538 bytes
= CFAllocatorReallocate(allocator
, oldBytes
, numBytes
* sizeof(uint8_t), 0);
540 bytes
= realloc(oldBytes
, numBytes
* sizeof(uint8_t));
543 if (NULL
== bytes
) __CFDataHandleOutOfMemory(data
, numBytes
* sizeof(uint8_t));
544 __CFDataSetCapacity(data
, capacity
);
545 __CFDataSetNumBytes(data
, numBytes
);
546 if (clear
&& !allocateCleared
&& oldLength
< newLength
) memset((uint8_t *)bytes
+ oldLength
, 0, newLength
- oldLength
);
547 __CFDataSetNeedsToZero(data
, !allocateCleared
);
548 __CFAssignWithWriteBarrier((void **)&data
->_bytes
, bytes
);
549 if (__CFOASafe
) __CFSetLastAllocationEventName(data
->_bytes
, "CFData (store)");
552 void CFDataSetLength(CFMutableDataRef data
, CFIndex newLength
) {
553 CFIndex oldLength
, capacity
;
555 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, setLength
:(NSUInteger
)newLength
);
556 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
557 oldLength
= __CFDataLength(data
);
558 capacity
= __CFDataCapacity(data
);
559 isGrowable
= __CFDataIsGrowable(data
);
560 if (__CFDataIsMutable(data
)) {
563 __CFDataHandleOutOfMemory(data
, newLength
);
567 } else if (capacity
< newLength
) {
569 __CFDataGrow(data
, newLength
- oldLength
, true);
571 CFAssert1(newLength
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
573 } else if (oldLength
< newLength
&& __CFDataNeedsToZero(data
)) {
574 memset(CFDataGetMutableBytePtr(data
) + oldLength
, 0, newLength
- oldLength
);
575 } else if (newLength
< oldLength
) {
576 __CFDataSetNeedsToZero(data
, true);
579 __CFDataSetLength(data
, newLength
);
580 __CFDataSetNumBytesUsed(data
, newLength
);
583 void CFDataIncreaseLength(CFMutableDataRef data
, CFIndex extraLength
) {
584 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, increaseLengthBy
:(NSUInteger
)extraLength
);
585 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
586 if (extraLength
< 0) HALT
; // Avoid integer overflow.
587 CFDataSetLength(data
, __CFDataLength(data
) + extraLength
);
590 void CFDataAppendBytes(CFMutableDataRef data
, const uint8_t *bytes
, CFIndex length
) {
591 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, appendBytes
:(const void *)bytes length
:(NSUInteger
)length
);
592 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
593 CFDataReplaceBytes(data
, CFRangeMake(__CFDataLength(data
), 0), bytes
, length
);
596 void CFDataDeleteBytes(CFMutableDataRef data
, CFRange range
) {
597 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, replaceBytesInRange
:NSMakeRange(range
.location
, range
.length
) withBytes
:NULL length
:0);
598 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
599 CFDataReplaceBytes(data
, range
, NULL
, 0);
602 void CFDataReplaceBytes(CFMutableDataRef data
, CFRange range
, const uint8_t *newBytes
, CFIndex newLength
) {
603 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, replaceBytesInRange
:NSMakeRange(range
.location
, range
.length
) withBytes
:(const void *)newBytes length
:(NSUInteger
)newLength
);
604 __CFGenericValidateType(data
, __kCFDataTypeID
);
605 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
606 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
607 CFAssert2(0 <= newLength
, __kCFLogAssertion
, "%s(): newLength (%d) cannot be less than zero", __PRETTY_FUNCTION__
, newLength
);
609 CFIndex len
= __CFDataLength(data
);
610 if (len
< 0 || range
.length
< 0 || newLength
< 0) HALT
;
611 CFIndex newCount
= len
- range
.length
+ newLength
;
612 if (newCount
< 0) HALT
;
614 uint8_t *bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
615 uint8_t *srcBuf
= (uint8_t *)newBytes
;
616 switch (__CFMutableVariety(data
)) {
618 if (__CFDataNumBytes(data
) < newCount
) {
619 if (bytePtr
&& newBytes
&& newBytes
< bytePtr
+ __CFDataCapacity(data
) && bytePtr
< newBytes
+ newLength
) {
620 srcBuf
= (uint8_t *)malloc(newLength
* sizeof(uint8_t));
621 memmove(srcBuf
, newBytes
, newLength
* sizeof(uint8_t));
623 __CFDataGrow(data
, newLength
- range
.length
, false);
624 bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
627 case kCFFixedMutable
:
628 CFAssert1(newCount
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
629 // Continuing after this could cause buffer overruns.
630 if (newCount
> __CFDataCapacity(data
)) HALT
;
633 if (newLength
!= range
.length
&& range
.location
+ range
.length
< len
) {
634 memmove(bytePtr
+ range
.location
+ newLength
, bytePtr
+ range
.location
+ range
.length
, (len
- range
.location
- range
.length
) * sizeof(uint8_t));
637 memmove(bytePtr
+ range
.location
, srcBuf
, newLength
* sizeof(uint8_t));
639 if (srcBuf
!= newBytes
) free(srcBuf
);
640 __CFDataSetNumBytesUsed(data
, newCount
);
641 __CFDataSetLength(data
, newCount
);
644 #define REVERSE_BUFFER(type, buf, len) { \
646 for(int i = 0; i < (len)/2; i++) { \
648 (buf)[i] = (buf)[(len) - i - 1]; \
649 (buf)[(len) - i - 1] = tmp; \
653 static void _computeGoodSubstringShift(const uint8_t *needle
, int needleLength
, unsigned long shift
[], unsigned long suff
[]) {
656 // Compute suffix lengths
658 suff
[needleLength
- 1] = needleLength
;
659 f
= g
= needleLength
- 1;
660 for (i
= needleLength
- 2; i
>= 0; --i
) {
661 if (i
> g
&& suff
[i
+ needleLength
- 1 - f
] < i
- g
)
662 suff
[i
] = suff
[i
+ needleLength
- 1 - f
];
667 while (g
>= 0 && needle
[g
] == needle
[g
+ needleLength
- 1 - f
])
673 // Compute shift table
675 for (i
= 0; i
< needleLength
; ++i
)
676 shift
[i
] = needleLength
;
678 for (i
= needleLength
- 1; i
>= 0; --i
)
679 if (suff
[i
] == i
+ 1)
680 for (; j
< needleLength
- 1 - i
; ++j
)
681 if (shift
[j
] == needleLength
)
682 shift
[j
] = needleLength
- 1 - i
;
683 // Set the amount of shift necessary to move each of the suffix matches found into a position where it overlaps with the suffix. If there are duplicate matches the latest one is the one that should take effect.
684 for (i
= 0; i
<= needleLength
- 2; ++i
)
685 shift
[needleLength
- 1 - suff
[i
]] = needleLength
- 1 - i
;
686 // Since the Boyer-Moore algorithm moves the pointer back while scanning substrings, add the distance to the end of the potential substring.
687 for (i
= 0; i
< needleLength
- 1; ++i
) {
688 shift
[i
] += (needleLength
- 1 - i
);
692 static const uint8_t * __CFDataSearchBoyerMoore(const CFDataRef data
, const uint8_t *haystack
, unsigned long haystackLength
, const uint8_t *needle
, unsigned long needleLength
, Boolean backwards
) {
693 unsigned long badCharacterShift
[UCHAR_MAX
+ 1] = {0};
694 unsigned long *goodSubstringShift
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
695 unsigned long *suffixLengths
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
696 if (!goodSubstringShift
|| !suffixLengths
) {
697 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(unsigned long));
701 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
702 badCharacterShift
[i
] = needleLength
;
704 for (int i
= needleLength
- 1; i
>= 0; i
--)
705 badCharacterShift
[needle
[i
]] = i
;
707 // To get the correct shift table for backwards search reverse the needle, compute the forwards shift table, and then reverse the result.
708 uint8_t *needleCopy
= (uint8_t *)malloc(needleLength
* sizeof(uint8_t));
710 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(uint8_t));
712 memmove(needleCopy
, needle
, needleLength
);
713 REVERSE_BUFFER(uint8_t, needleCopy
, needleLength
);
714 _computeGoodSubstringShift(needleCopy
, needleLength
, goodSubstringShift
, suffixLengths
);
715 REVERSE_BUFFER(unsigned long, goodSubstringShift
, needleLength
);
718 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
719 badCharacterShift
[i
] = needleLength
;
721 for (int i
= 0; i
< needleLength
; i
++)
722 badCharacterShift
[needle
[i
]] = needleLength
- i
- 1;
724 _computeGoodSubstringShift(needle
, needleLength
, goodSubstringShift
, suffixLengths
);
727 const uint8_t *scan_needle
;
728 const uint8_t *scan_haystack
;
729 const uint8_t *result
= NULL
;
731 const uint8_t *const end_needle
= needle
+ needleLength
;
732 scan_needle
= needle
;
733 scan_haystack
= haystack
+ haystackLength
- needleLength
;
734 while (scan_haystack
>= haystack
&& scan_needle
< end_needle
) {
735 if (*scan_haystack
== *scan_needle
) {
739 scan_haystack
-= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
740 scan_needle
= needle
;
743 if (scan_needle
== end_needle
) {
744 result
= (scan_haystack
- needleLength
);
747 const uint8_t *const end_haystack
= haystack
+ haystackLength
;
748 scan_needle
= needle
+ needleLength
- 1;
749 scan_haystack
= haystack
+ needleLength
- 1;
750 while (scan_haystack
< end_haystack
&& scan_needle
>= needle
) {
751 if (*scan_haystack
== *scan_needle
) {
755 scan_haystack
+= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
756 scan_needle
= needle
+ needleLength
- 1;
759 if (scan_needle
< needle
) {
760 result
= (scan_haystack
+ 1);
764 free(goodSubstringShift
);
770 CFRange
_CFDataFindBytes(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
771 const uint8_t *fullHaystack
= CFDataGetBytePtr(data
);
772 const uint8_t *needle
= CFDataGetBytePtr(dataToFind
);
773 unsigned long fullHaystackLength
= CFDataGetLength(data
);
774 unsigned long needleLength
= CFDataGetLength(dataToFind
);
776 if(compareOptions
& kCFDataSearchAnchored
) {
777 if(searchRange
.length
> needleLength
) {
778 if(compareOptions
& kCFDataSearchBackwards
) {
779 searchRange
.location
+= (searchRange
.length
- needleLength
);
781 searchRange
.length
= needleLength
;
784 if(searchRange
.length
> fullHaystackLength
- searchRange
.location
) {
785 searchRange
.length
= fullHaystackLength
- searchRange
.location
;
788 if(searchRange
.length
< needleLength
|| fullHaystackLength
== 0 || needleLength
== 0) {
789 return CFRangeMake(kCFNotFound
, 0);
792 const uint8_t *haystack
= fullHaystack
+ searchRange
.location
;
793 const uint8_t *searchResult
= __CFDataSearchBoyerMoore(data
, haystack
, searchRange
.length
, needle
, needleLength
, (compareOptions
& kCFDataSearchBackwards
) != 0);
794 CFIndex resultLocation
= (searchResult
== NULL
) ? kCFNotFound
: searchRange
.location
+ (searchResult
- haystack
);
796 return CFRangeMake(resultLocation
, resultLocation
== kCFNotFound
? 0: needleLength
);
799 CFRange
CFDataFind(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
801 __CFGenericValidateType(data
, __kCFDataTypeID
);
802 __CFGenericValidateType(dataToFind
, __kCFDataTypeID
);
803 __CFDataValidateRange(data
, searchRange
, __PRETTY_FUNCTION__
);
805 return _CFDataFindBytes(data
, dataToFind
, searchRange
, compareOptions
);
808 #undef __CFDataValidateRange
809 #undef __CFGenericValidateMutabilityFlags
810 #undef INLINE_BYTES_THRESHOLD
811 #undef CFDATA_MAX_SIZE
812 #undef REVERSE_BUFFER