2 * Copyright (c) 2012 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
25 Copyright (c) 1998-2012, Apple Inc. All rights reserved.
26 Responsibility: Kevin Perry
29 #include <CoreFoundation/CFData.h>
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
37 #define CFDATA_MAX_SIZE ((1ULL << 42) - 1)
39 #define CFDATA_MAX_SIZE ((1ULL << 31) - 1)
42 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI
44 CF_INLINE
unsigned long __CFPageSize() { return vm_page_size
; }
45 #elif DEPLOYMENT_TARGET_WINDOWS
46 CF_INLINE
unsigned long __CFPageSize() {
48 GetSystemInfo(&sysInfo
);
49 return sysInfo
.dwPageSize
;
51 #elif DEPLOYMENT_TARGET_LINUX
53 CF_INLINE
unsigned long __CFPageSize() {
54 return (unsigned long)getpagesize();
58 #define INLINE_BYTES_THRESHOLD ((4 * __CFPageSize()) - sizeof(struct __CFData) - 15)
62 CFIndex _length
; /* number of bytes */
63 CFIndex _capacity
; /* maximum number of bytes */
64 CFAllocatorRef _bytesDeallocator
; /* used only for immutable; if NULL, no deallocation */
65 uint8_t *_bytes
; /* compaction: direct access to _bytes is only valid when data is not inline */
72 Bit 3 = use given CFAllocator
73 Bit 5 = allocate collectable memory
75 Bits 1-0 are used for mutability variation
77 Bit 6 = not all bytes have been zeroed yet (mutable)
83 __kCFMutableVarietyMask
= 0x03,
84 __kCFBytesInline
= 0x04,
85 __kCFUseAllocator
= 0x08,
86 __kCFAllocatesCollectable
= 0x20,
90 kCFImmutable
= 0x0, /* unchangable and fixed capacity; default */
91 kCFFixedMutable
= 0x1, /* changeable and fixed capacity */
92 kCFMutable
= 0x3 /* changeable and variable capacity */
95 CF_INLINE
void __CFDataSetInfoBits(CFDataRef data
, UInt32 v
) {__CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 5, 0, v
);}
96 CF_INLINE Boolean
__CFDataGetInfoBit(CFDataRef data
, UInt32 b
) {return ((((const CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
] & b
) != 0);}
97 CF_INLINE Boolean
__CFDataIsMutable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFMutable
);}
98 CF_INLINE Boolean
__CFDataIsGrowable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFGrowable
);}
99 CF_INLINE Boolean
__CFDataBytesInline(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFBytesInline
);}
100 CF_INLINE Boolean
__CFDataUseAllocator(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFUseAllocator
);}
101 CF_INLINE Boolean
__CFDataAllocatesCollectable(CFDataRef data
) {return __CFDataGetInfoBit(data
, __kCFAllocatesCollectable
);}
103 CF_INLINE UInt32
__CFMutableVariety(const void *cf
) {
104 return __CFBitfieldGetValue(((const CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0);
107 CF_INLINE
void __CFSetMutableVariety(void *cf
, UInt32 v
) {
108 __CFBitfieldSetValue(((CFRuntimeBase
*)cf
)->_cfinfo
[CF_INFO_BITS
], 1, 0, v
);
111 CF_INLINE UInt32
__CFMutableVarietyFromFlags(UInt32 flags
) {
112 return (flags
& __kCFMutableVarietyMask
);
115 #define __CFGenericValidateMutabilityFlags(flags) \
116 CFAssert2(__CFMutableVarietyFromFlags(flags) != 0x2, __kCFLogAssertion, "%s(): flags 0x%x do not correctly specify the mutable variety", __PRETTY_FUNCTION__, flags);
118 CF_INLINE
void __CFDataSetInline(CFDataRef data
, Boolean flag
) {
119 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 2, 2, (flag
? 1 : 0));
122 CF_INLINE Boolean
__CFDataNeedsToZero(CFDataRef data
) {
123 return __CFBitfieldGetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6);
126 CF_INLINE
void __CFDataSetNeedsToZero(CFDataRef data
, Boolean zero
) {
127 __CFBitfieldSetValue(((CFRuntimeBase
*)data
)->_cfinfo
[CF_INFO_BITS
], 6, 6, (zero
? 1 : 0));
130 CF_INLINE CFIndex
__CFDataLength(CFDataRef data
) {
131 return data
->_length
;
134 CF_INLINE
void __CFDataSetLength(CFMutableDataRef data
, CFIndex v
) {
135 /* for a CFData, _bytesUsed == _length */
138 CF_INLINE CFIndex
__CFDataCapacity(CFDataRef data
) {
139 return data
->_capacity
;
142 CF_INLINE
void __CFDataSetCapacity(CFMutableDataRef data
, CFIndex v
) {
143 /* for a CFData, _bytesNum == _capacity */
146 CF_INLINE CFIndex
__CFDataNumBytesUsed(CFDataRef data
) {
147 return data
->_length
;
150 CF_INLINE
void __CFDataSetNumBytesUsed(CFMutableDataRef data
, CFIndex v
) {
154 CF_INLINE CFIndex
__CFDataNumBytes(CFDataRef data
) {
155 return data
->_capacity
;
158 CF_INLINE
void __CFDataSetNumBytes(CFMutableDataRef data
, CFIndex v
) {
163 #define CHUNK_SIZE (1ULL << 29)
164 #define LOW_THRESHOLD (1ULL << 20)
165 #define HIGH_THRESHOLD (1ULL << 32)
167 #define CHUNK_SIZE (1ULL << 26)
168 #define LOW_THRESHOLD (1ULL << 20)
169 #define HIGH_THRESHOLD (1ULL << 29)
172 CF_INLINE CFIndex
__CFDataRoundUpCapacity(CFIndex capacity
) {
175 } else if (capacity
< LOW_THRESHOLD
) {
177 long idx
= flsl(capacity
);
178 return (1L << (long)(idx
+ ((idx
% 2 == 0) ? 0 : 1)));
179 } else if (capacity
< HIGH_THRESHOLD
) {
181 return (1L << (long)flsl(capacity
));
183 /* Round up to next multiple of CHUNK_SIZE */
184 unsigned long newCapacity
= CHUNK_SIZE
* (1+(capacity
>> ((long)flsl(CHUNK_SIZE
)-1)));
185 return __CFMin(newCapacity
, CFDATA_MAX_SIZE
);
189 CF_INLINE CFIndex
__CFDataNumBytesForCapacity(CFIndex capacity
) {
193 static void __CFDataHandleOutOfMemory(CFTypeRef obj
, CFIndex numBytes
) {
195 if(0 < numBytes
&& numBytes
<= CFDATA_MAX_SIZE
) {
196 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed"), numBytes
);
198 msg
= CFStringCreateWithFormat(kCFAllocatorSystemDefault
, NULL
, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed. Maximum size: %ld"), numBytes
, CFDATA_MAX_SIZE
);
201 CFLog(kCFLogLevelCritical
, CFSTR("%@"), msg
);
208 CF_INLINE
void __CFDataValidateRange(CFDataRef data
, CFRange range
, const char *func
) {
209 CFAssert2(0 <= range
.location
&& range
.location
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): range.location index (%d) out of bounds", func
, range
.location
);
210 CFAssert2(0 <= range
.length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", func
, range
.length
);
211 CFAssert2(range
.location
+ range
.length
<= __CFDataLength(data
), __kCFLogAssertion
, "%s(): ending index (%d) out of bounds", func
, range
.location
+ range
.length
);
214 #define __CFDataValidateRange(a,r,f)
217 static Boolean
__CFDataEqual(CFTypeRef cf1
, CFTypeRef cf2
) {
218 CFDataRef data1
= (CFDataRef
)cf1
;
219 CFDataRef data2
= (CFDataRef
)cf2
;
221 length
= __CFDataLength(data1
);
222 if (length
!= __CFDataLength(data2
)) return false;
223 const uint8_t *bytePtr1
= CFDataGetBytePtr(data1
);
224 const uint8_t *bytePtr2
= CFDataGetBytePtr(data2
);
225 return 0 == memcmp(bytePtr1
, bytePtr2
, length
);
228 static CFHashCode
__CFDataHash(CFTypeRef cf
) {
229 CFDataRef data
= (CFDataRef
)cf
;
230 return CFHashBytes((uint8_t *)CFDataGetBytePtr(data
), __CFMin(__CFDataLength(data
), 80));
233 static CFStringRef
__CFDataCopyDescription(CFTypeRef cf
) {
234 CFDataRef data
= (CFDataRef
)cf
;
235 CFMutableStringRef result
;
238 const uint8_t *bytes
;
239 len
= __CFDataLength(data
);
240 bytes
= CFDataGetBytePtr(data
);
241 result
= CFStringCreateMutable(CFGetAllocator(data
), 0);
242 CFStringAppendFormat(result
, NULL
, CFSTR("<CFData %p [%p]>{length = %u, capacity = %u, bytes = 0x"), cf
, CFGetAllocator(data
), len
, __CFDataCapacity(data
));
244 for (idx
= 0; idx
< 16; idx
+= 4) {
245 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
247 CFStringAppend(result
, CFSTR(" ... "));
248 for (idx
= len
- 8; idx
< len
; idx
+= 4) {
249 CFStringAppendFormat(result
, NULL
, CFSTR("%02x%02x%02x%02x"), bytes
[idx
], bytes
[idx
+ 1], bytes
[idx
+ 2], bytes
[idx
+ 3]);
252 for (idx
= 0; idx
< len
; idx
++) {
253 CFStringAppendFormat(result
, NULL
, CFSTR("%02x"), bytes
[idx
]);
256 CFStringAppend(result
, CFSTR("}"));
260 static void *__CFDataInlineBytesPtr(CFDataRef data
) {
261 return (void *)((uintptr_t)((int8_t *)data
+ sizeof(struct __CFData
) + 15) & ~0xF); // 16-byte align
264 static Boolean
__CFDataShouldAllocateCleared(CFDataRef data
, CFIndex size
) {
266 if (__CFDataUseAllocator(data
)) {
269 if (__CFDataAllocatesCollectable(data
)) {
273 result
= (size
> (64 * 1024));
276 result
= (size
> (128 * 1024));
283 // Check __CFDataShouldAllocateCleared before passing true.
284 static void *__CFDataAllocate(CFDataRef data
, CFIndex size
, Boolean clear
) {
286 if (__CFDataUseAllocator(data
)) {
287 CFAllocatorRef allocator
= __CFGetAllocator(data
);
288 bytes
= CFAllocatorAllocate(allocator
, size
, 0);
289 if (clear
) memset((uint8_t *)bytes
, 0, size
);
291 if (__CFDataAllocatesCollectable(data
)) {
292 bytes
= auto_zone_allocate_object(objc_collectableZone(), size
, AUTO_MEMORY_UNSCANNED
, 0, clear
);
295 bytes
= calloc(1, size
);
297 bytes
= malloc(size
);
304 static void __CFDataDeallocate(CFTypeRef cf
) {
305 CFMutableDataRef data
= (CFMutableDataRef
)cf
;
306 if (!__CFDataBytesInline(data
)) {
307 CFAllocatorRef deallocator
= data
->_bytesDeallocator
;
308 if (deallocator
!= NULL
) {
309 _CFAllocatorDeallocateGC(deallocator
, data
->_bytes
);
310 if (!_CFAllocatorIsGCRefZero(deallocator
)) CFRelease(deallocator
);
313 if (__CFDataUseAllocator(data
)) {
314 _CFAllocatorDeallocateGC(__CFGetAllocator(data
), data
->_bytes
);
315 } else if (!__CFDataAllocatesCollectable(data
) && data
->_bytes
) {
323 static CFTypeID __kCFDataTypeID
= _kCFRuntimeNotATypeID
;
325 static const CFRuntimeClass __CFDataClass
= {
326 _kCFRuntimeScannedObject
,
334 __CFDataCopyDescription
337 __private_extern__
void __CFDataInitialize(void) {
338 __kCFDataTypeID
= _CFRuntimeRegisterClass(&__CFDataClass
);
341 CFTypeID
CFDataGetTypeID(void) {
342 return __kCFDataTypeID
;
346 // NULL bytesDeallocator to this function does not mean the default allocator, it means
347 // that there should be no deallocator, and the bytes should be copied.
348 static CFMutableDataRef
__CFDataInit(CFAllocatorRef allocator
, CFOptionFlags flags
, CFIndex capacity
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
349 CFMutableDataRef memory
;
350 __CFGenericValidateMutabilityFlags(flags
);
351 CFAssert2(0 <= capacity
, __kCFLogAssertion
, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__
, capacity
);
352 CFAssert3(kCFFixedMutable
!= __CFMutableVarietyFromFlags(flags
) || length
<= capacity
, __kCFLogAssertion
, "%s(): for kCFFixedMutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__
, capacity
, length
);
353 CFAssert2(0 <= length
, __kCFLogAssertion
, "%s(): length (%d) cannot be less than zero", __PRETTY_FUNCTION__
, length
);
355 Boolean collectableMemory
= CF_IS_COLLECTABLE_ALLOCATOR(allocator
);
356 Boolean noCopy
= bytesDeallocator
!= NULL
;
357 Boolean isMutable
= ((flags
& __kCFMutable
) != 0);
358 Boolean isGrowable
= ((flags
& __kCFGrowable
) != 0);
359 Boolean allocateInline
= !isGrowable
&& !noCopy
&& capacity
< INLINE_BYTES_THRESHOLD
;
360 allocator
= (allocator
== NULL
) ? __CFGetDefaultAllocator() : allocator
;
361 Boolean useAllocator
= (allocator
!= kCFAllocatorSystemDefault
&& allocator
!= kCFAllocatorMalloc
&& allocator
!= kCFAllocatorMallocZone
);
363 CFIndex size
= sizeof(struct __CFData
) - sizeof(CFRuntimeBase
);
364 if (allocateInline
) {
365 size
+= sizeof(uint8_t) * __CFDataNumBytesForCapacity(capacity
) + sizeof(uint8_t) * 15; // for 16-byte alignment fixup
367 memory
= (CFMutableDataRef
)_CFRuntimeCreateInstance(allocator
, __kCFDataTypeID
, size
, NULL
);
368 if (NULL
== memory
) {
371 __CFDataSetNumBytesUsed(memory
, 0);
372 __CFDataSetLength(memory
, 0);
373 __CFDataSetInfoBits(memory
,
374 (allocateInline
? __kCFBytesInline
: 0) |
375 (useAllocator
? __kCFUseAllocator
: 0) |
376 (collectableMemory
? __kCFAllocatesCollectable
: 0));
380 if (collectableMemory
) {
381 if (allocateInline
) {
382 // We have no pointer to anything that needs to be reclaimed, so don't scan or finalize.
386 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
)) {
387 // We're taking responsibility for externally GC-allocated memory, so scan us, but we don't need to finalize.
389 } else if (bytesDeallocator
== kCFAllocatorNull
) {
390 // We don't have responsibility for these bytes, so there's no need to be scanned and we don't need to finalize.
394 // We have a pointer to non-GC-allocated memory, so don't scan, but do finalize.
398 if (!scan
) auto_zone_set_unscanned(objc_collectableZone(), memory
);
399 if (!finalize
) auto_zone_set_nofinalize(objc_collectableZone(), memory
);
401 if (isMutable
&& isGrowable
) {
402 __CFDataSetCapacity(memory
, __CFDataRoundUpCapacity(1));
403 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(__CFDataRoundUpCapacity(1)));
404 __CFSetMutableVariety(memory
, kCFMutable
);
406 /* Don't round up capacity */
407 __CFDataSetCapacity(memory
, capacity
);
408 __CFDataSetNumBytes(memory
, __CFDataNumBytesForCapacity(capacity
));
409 __CFSetMutableVariety(memory
, kCFFixedMutable
);
412 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, (uint8_t *)bytes
);
414 if (_CFAllocatorIsGCRefZero(bytesDeallocator
)) {
415 memory
->_bytesDeallocator
= bytesDeallocator
;
417 memory
->_bytesDeallocator
= (CFAllocatorRef
)CFRetain(_CFConvertAllocatorToNonGCRefZeroEquivalent(bytesDeallocator
));
420 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator
) && !_CFAllocatorIsGCRefZero(bytesDeallocator
)) {
421 // When given a GC allocator which is not one of the GCRefZero ones as the deallocator, we assume that the no-copy memory is GC-allocated with a retain count of (at least) 1 and we should release it now instead of waiting until __CFDataDeallocate.
422 auto_zone_release(objc_collectableZone(), memory
->_bytes
);
424 __CFDataSetNumBytesUsed(memory
, length
);
425 __CFDataSetLength(memory
, length
);
426 // Mutable no-copy datas are not allowed, so don't bother setting needsToZero flag.
428 Boolean cleared
= (isMutable
&& !isGrowable
&& !_CFExecutableLinkedOnOrAfter(CFSystemVersionSnowLeopard
));
429 if (!allocateInline
) {
430 // assume that allocators give 16-byte aligned memory back -- it is their responsibility
431 __CFAssignWithWriteBarrier((void **)&memory
->_bytes
, __CFDataAllocate(memory
, __CFDataNumBytes(memory
) * sizeof(uint8_t), cleared
));
432 if (__CFOASafe
) __CFSetLastAllocationEventName(memory
->_bytes
, "CFData (store)");
433 if (NULL
== memory
->_bytes
) {
438 if (length
== 0 && !isMutable
) {
439 // NSData sets its bytes pointer to NULL when its length is zero. Starting in 10.7 we do the same for CFData.
440 memory
->_bytes
= NULL
;
441 // It is important to set this data as not inlined, so we do not recalculate a bytes pointer from null.
442 __CFDataSetInline(memory
, false);
446 __CFDataSetNeedsToZero(memory
, !cleared
);
447 memory
->_bytesDeallocator
= NULL
;
448 CFDataReplaceBytes(memory
, CFRangeMake(0, 0), bytes
, length
);
450 __CFSetMutableVariety(memory
, __CFMutableVarietyFromFlags(flags
));
454 CFDataRef
CFDataCreate(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
) {
455 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, NULL
);
458 CFDataRef
CFDataCreateWithBytesNoCopy(CFAllocatorRef allocator
, const uint8_t *bytes
, CFIndex length
, CFAllocatorRef bytesDeallocator
) {
459 CFAssert1((0 == length
|| bytes
!= NULL
), __kCFLogAssertion
, "%s(): bytes pointer cannot be NULL if length is non-zero", __PRETTY_FUNCTION__
);
460 if (NULL
== bytesDeallocator
) bytesDeallocator
= __CFGetDefaultAllocator();
461 return __CFDataInit(allocator
, kCFImmutable
, length
, bytes
, length
, bytesDeallocator
);
464 CFDataRef
CFDataCreateCopy(CFAllocatorRef allocator
, CFDataRef data
) {
465 CFIndex length
= CFDataGetLength(data
);
466 return __CFDataInit(allocator
, kCFImmutable
, length
, CFDataGetBytePtr(data
), length
, NULL
);
469 CFMutableDataRef
CFDataCreateMutable(CFAllocatorRef allocator
, CFIndex capacity
) {
470 // Do not allow magic allocator for now for mutable datas, because it
471 // isn't remembered for proper handling later when growth of the buffer
473 Boolean wasMagic
= _CFAllocatorIsGCRefZero(allocator
);
474 if (0 == capacity
) allocator
= _CFConvertAllocatorToNonGCRefZeroEquivalent(allocator
);
475 CFMutableDataRef r
= (CFMutableDataRef
)__CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, NULL
, 0, NULL
);
476 if (wasMagic
) CFMakeCollectable(r
);
480 CFMutableDataRef
CFDataCreateMutableCopy(CFAllocatorRef allocator
, CFIndex capacity
, CFDataRef data
) {
481 // Do not allow magic allocator for now for mutable datas, because it
482 // isn't remembered for proper handling later when growth of the buffer
484 Boolean wasMagic
= _CFAllocatorIsGCRefZero(allocator
);
485 if (0 == capacity
) allocator
= _CFConvertAllocatorToNonGCRefZeroEquivalent(allocator
);
486 CFMutableDataRef r
= (CFMutableDataRef
) __CFDataInit(allocator
, (0 == capacity
) ? kCFMutable
: kCFFixedMutable
, capacity
, CFDataGetBytePtr(data
), CFDataGetLength(data
), NULL
);
487 if (wasMagic
) CFMakeCollectable(r
);
491 CFIndex
CFDataGetLength(CFDataRef data
) {
492 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, CFIndex
, (NSData
*)data
, length
);
493 __CFGenericValidateType(data
, __kCFDataTypeID
);
494 return __CFDataLength(data
);
497 const uint8_t *CFDataGetBytePtr(CFDataRef data
) {
498 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, const uint8_t *, (NSData
*)data
, bytes
);
499 __CFGenericValidateType(data
, __kCFDataTypeID
);
500 // compaction: if inline, always do the computation.
501 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
504 uint8_t *CFDataGetMutableBytePtr(CFMutableDataRef data
) {
505 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, uint8_t *, (NSMutableData
*)data
, mutableBytes
);
506 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
507 // compaction: if inline, always do the computation.
508 return __CFDataBytesInline(data
) ? (uint8_t *)__CFDataInlineBytesPtr(data
) : data
->_bytes
;
511 void CFDataGetBytes(CFDataRef data
, CFRange range
, uint8_t *buffer
) {
512 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSData
*)data
, getBytes
:(void *)buffer range
:NSMakeRange(range
.location
, range
.length
));
513 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
514 memmove(buffer
, CFDataGetBytePtr(data
) + range
.location
, range
.length
);
517 /* Allocates new block of data with at least numNewValues more bytes than the current length. If clear is true, the new bytes up to at least the new length with be zeroed. */
518 static void __CFDataGrow(CFMutableDataRef data
, CFIndex numNewValues
, Boolean clear
) {
519 CFIndex oldLength
= __CFDataLength(data
);
520 CFIndex newLength
= oldLength
+ numNewValues
;
521 if (newLength
> CFDATA_MAX_SIZE
|| newLength
< 0) __CFDataHandleOutOfMemory(data
, newLength
* sizeof(uint8_t));
522 CFIndex capacity
= __CFDataRoundUpCapacity(newLength
);
523 CFIndex numBytes
= __CFDataNumBytesForCapacity(capacity
);
524 CFAllocatorRef allocator
= CFGetAllocator(data
);
526 void *oldBytes
= data
->_bytes
;
527 Boolean allocateCleared
= clear
&& __CFDataShouldAllocateCleared(data
, numBytes
);
528 if (allocateCleared
&& !__CFDataUseAllocator(data
) && (oldLength
== 0 || (newLength
/ oldLength
) > 4)) {
529 // If the length that needs to be zeroed is significantly greater than the length of the data, then calloc/memmove is probably more efficient than realloc/memset.
530 bytes
= __CFDataAllocate(data
, numBytes
* sizeof(uint8_t), true);
532 memmove(bytes
, oldBytes
, oldLength
);
533 __CFDataDeallocate(data
);
537 // If the calloc/memmove approach either failed or was never attempted, then realloc.
538 allocateCleared
= false;
539 if (__CFDataUseAllocator(data
)) {
540 bytes
= CFAllocatorReallocate(allocator
, oldBytes
, numBytes
* sizeof(uint8_t), 0);
542 bytes
= realloc(oldBytes
, numBytes
* sizeof(uint8_t));
545 if (NULL
== bytes
) __CFDataHandleOutOfMemory(data
, numBytes
* sizeof(uint8_t));
546 __CFDataSetCapacity(data
, capacity
);
547 __CFDataSetNumBytes(data
, numBytes
);
548 if (clear
&& !allocateCleared
&& oldLength
< newLength
) memset((uint8_t *)bytes
+ oldLength
, 0, newLength
- oldLength
);
549 __CFDataSetNeedsToZero(data
, !allocateCleared
);
550 __CFAssignWithWriteBarrier((void **)&data
->_bytes
, bytes
);
551 if (__CFOASafe
) __CFSetLastAllocationEventName(data
->_bytes
, "CFData (store)");
554 void CFDataSetLength(CFMutableDataRef data
, CFIndex newLength
) {
555 CFIndex oldLength
, capacity
;
557 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, setLength
:(NSUInteger
)newLength
);
558 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
559 oldLength
= __CFDataLength(data
);
560 capacity
= __CFDataCapacity(data
);
561 isGrowable
= __CFDataIsGrowable(data
);
562 if (__CFDataIsMutable(data
)) {
565 __CFDataHandleOutOfMemory(data
, newLength
);
569 } else if (capacity
< newLength
) {
571 __CFDataGrow(data
, newLength
- oldLength
, true);
573 CFAssert1(newLength
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
575 } else if (oldLength
< newLength
&& __CFDataNeedsToZero(data
)) {
576 memset(CFDataGetMutableBytePtr(data
) + oldLength
, 0, newLength
- oldLength
);
577 } else if (newLength
< oldLength
) {
578 __CFDataSetNeedsToZero(data
, true);
581 __CFDataSetLength(data
, newLength
);
582 __CFDataSetNumBytesUsed(data
, newLength
);
585 void CFDataIncreaseLength(CFMutableDataRef data
, CFIndex extraLength
) {
586 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, increaseLengthBy
:(NSUInteger
)extraLength
);
587 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
588 if (extraLength
< 0) HALT
; // Avoid integer overflow.
589 CFDataSetLength(data
, __CFDataLength(data
) + extraLength
);
592 void CFDataAppendBytes(CFMutableDataRef data
, const uint8_t *bytes
, CFIndex length
) {
593 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, appendBytes
:(const void *)bytes length
:(NSUInteger
)length
);
594 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
595 CFDataReplaceBytes(data
, CFRangeMake(__CFDataLength(data
), 0), bytes
, length
);
598 void CFDataDeleteBytes(CFMutableDataRef data
, CFRange range
) {
599 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, replaceBytesInRange
:NSMakeRange(range
.location
, range
.length
) withBytes
:NULL length
:0);
600 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
601 CFDataReplaceBytes(data
, range
, NULL
, 0);
604 void CFDataReplaceBytes(CFMutableDataRef data
, CFRange range
, const uint8_t *newBytes
, CFIndex newLength
) {
605 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID
, void, (NSMutableData
*)data
, replaceBytesInRange
:NSMakeRange(range
.location
, range
.length
) withBytes
:(const void *)newBytes length
:(NSUInteger
)newLength
);
606 __CFGenericValidateType(data
, __kCFDataTypeID
);
607 __CFDataValidateRange(data
, range
, __PRETTY_FUNCTION__
);
608 CFAssert1(__CFDataIsMutable(data
), __kCFLogAssertion
, "%s(): data is immutable", __PRETTY_FUNCTION__
);
609 CFAssert2(0 <= newLength
, __kCFLogAssertion
, "%s(): newLength (%d) cannot be less than zero", __PRETTY_FUNCTION__
, newLength
);
611 CFIndex len
= __CFDataLength(data
);
612 if (len
< 0 || range
.length
< 0 || newLength
< 0) HALT
;
613 CFIndex newCount
= len
- range
.length
+ newLength
;
614 if (newCount
< 0) HALT
;
616 uint8_t *bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
617 uint8_t *srcBuf
= (uint8_t *)newBytes
;
618 switch (__CFMutableVariety(data
)) {
620 if (__CFDataNumBytes(data
) < newCount
) {
621 if (bytePtr
&& newBytes
&& newBytes
< bytePtr
+ __CFDataCapacity(data
) && bytePtr
< newBytes
+ newLength
) {
622 srcBuf
= (uint8_t *)malloc(newLength
* sizeof(uint8_t));
623 memmove(srcBuf
, newBytes
, newLength
* sizeof(uint8_t));
625 __CFDataGrow(data
, newLength
- range
.length
, false);
626 bytePtr
= (uint8_t *)CFDataGetMutableBytePtr(data
);
629 case kCFFixedMutable
:
630 CFAssert1(newCount
<= __CFDataCapacity(data
), __kCFLogAssertion
, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__
);
631 // Continuing after this could cause buffer overruns.
632 if (newCount
> __CFDataCapacity(data
)) HALT
;
635 if (newLength
!= range
.length
&& range
.location
+ range
.length
< len
) {
636 memmove(bytePtr
+ range
.location
+ newLength
, bytePtr
+ range
.location
+ range
.length
, (len
- range
.location
- range
.length
) * sizeof(uint8_t));
639 memmove(bytePtr
+ range
.location
, srcBuf
, newLength
* sizeof(uint8_t));
641 if (srcBuf
!= newBytes
) free(srcBuf
);
642 __CFDataSetNumBytesUsed(data
, newCount
);
643 __CFDataSetLength(data
, newCount
);
646 #define REVERSE_BUFFER(type, buf, len) { \
648 for(int i = 0; i < (len)/2; i++) { \
650 (buf)[i] = (buf)[(len) - i - 1]; \
651 (buf)[(len) - i - 1] = tmp; \
655 static void _computeGoodSubstringShift(const uint8_t *needle
, int needleLength
, unsigned long shift
[], unsigned long suff
[]) {
658 // Compute suffix lengths
660 suff
[needleLength
- 1] = needleLength
;
661 f
= g
= needleLength
- 1;
662 for (i
= needleLength
- 2; i
>= 0; --i
) {
663 if (i
> g
&& suff
[i
+ needleLength
- 1 - f
] < i
- g
)
664 suff
[i
] = suff
[i
+ needleLength
- 1 - f
];
669 while (g
>= 0 && needle
[g
] == needle
[g
+ needleLength
- 1 - f
])
675 // Compute shift table
677 for (i
= 0; i
< needleLength
; ++i
)
678 shift
[i
] = needleLength
;
680 for (i
= needleLength
- 1; i
>= 0; --i
)
681 if (suff
[i
] == i
+ 1)
682 for (; j
< needleLength
- 1 - i
; ++j
)
683 if (shift
[j
] == needleLength
)
684 shift
[j
] = needleLength
- 1 - i
;
685 // Set the amount of shift necessary to move each of the suffix matches found into a position where it overlaps with the suffix. If there are duplicate matches the latest one is the one that should take effect.
686 for (i
= 0; i
<= needleLength
- 2; ++i
)
687 shift
[needleLength
- 1 - suff
[i
]] = needleLength
- 1 - i
;
688 // Since the Boyer-Moore algorithm moves the pointer back while scanning substrings, add the distance to the end of the potential substring.
689 for (i
= 0; i
< needleLength
- 1; ++i
) {
690 shift
[i
] += (needleLength
- 1 - i
);
694 static const uint8_t * __CFDataSearchBoyerMoore(const CFDataRef data
, const uint8_t *haystack
, unsigned long haystackLength
, const uint8_t *needle
, unsigned long needleLength
, Boolean backwards
) {
695 unsigned long badCharacterShift
[UCHAR_MAX
+ 1] = {0};
696 unsigned long *goodSubstringShift
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
697 unsigned long *suffixLengths
= (unsigned long *)malloc(needleLength
* sizeof(unsigned long));
698 if (!goodSubstringShift
|| !suffixLengths
) {
699 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(unsigned long));
703 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
704 badCharacterShift
[i
] = needleLength
;
706 for (int i
= needleLength
- 1; i
>= 0; i
--)
707 badCharacterShift
[needle
[i
]] = i
;
709 // To get the correct shift table for backwards search reverse the needle, compute the forwards shift table, and then reverse the result.
710 uint8_t *needleCopy
= (uint8_t *)malloc(needleLength
* sizeof(uint8_t));
712 __CFDataHandleOutOfMemory(data
, needleLength
* sizeof(uint8_t));
714 memmove(needleCopy
, needle
, needleLength
);
715 REVERSE_BUFFER(uint8_t, needleCopy
, needleLength
);
716 _computeGoodSubstringShift(needleCopy
, needleLength
, goodSubstringShift
, suffixLengths
);
717 REVERSE_BUFFER(unsigned long, goodSubstringShift
, needleLength
);
720 for (int i
= 0; i
< sizeof(badCharacterShift
) / sizeof(*badCharacterShift
); i
++)
721 badCharacterShift
[i
] = needleLength
;
723 for (int i
= 0; i
< needleLength
; i
++)
724 badCharacterShift
[needle
[i
]] = needleLength
- i
- 1;
726 _computeGoodSubstringShift(needle
, needleLength
, goodSubstringShift
, suffixLengths
);
729 const uint8_t *scan_needle
;
730 const uint8_t *scan_haystack
;
731 const uint8_t *result
= NULL
;
733 const uint8_t *const end_needle
= needle
+ needleLength
;
734 scan_needle
= needle
;
735 scan_haystack
= haystack
+ haystackLength
- needleLength
;
736 while (scan_haystack
>= haystack
&& scan_needle
< end_needle
) {
737 if (*scan_haystack
== *scan_needle
) {
741 scan_haystack
-= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
742 scan_needle
= needle
;
745 if (scan_needle
== end_needle
) {
746 result
= (scan_haystack
- needleLength
);
749 const uint8_t *const end_haystack
= haystack
+ haystackLength
;
750 scan_needle
= needle
+ needleLength
- 1;
751 scan_haystack
= haystack
+ needleLength
- 1;
752 while (scan_haystack
< end_haystack
&& scan_needle
>= needle
) {
753 if (*scan_haystack
== *scan_needle
) {
757 scan_haystack
+= __CFMax(badCharacterShift
[*scan_haystack
], goodSubstringShift
[scan_needle
- needle
]);
758 scan_needle
= needle
+ needleLength
- 1;
761 if (scan_needle
< needle
) {
762 result
= (scan_haystack
+ 1);
766 free(goodSubstringShift
);
772 CFRange
_CFDataFindBytes(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
773 const uint8_t *fullHaystack
= CFDataGetBytePtr(data
);
774 const uint8_t *needle
= CFDataGetBytePtr(dataToFind
);
775 unsigned long fullHaystackLength
= CFDataGetLength(data
);
776 unsigned long needleLength
= CFDataGetLength(dataToFind
);
778 if(compareOptions
& kCFDataSearchAnchored
) {
779 if(searchRange
.length
> needleLength
) {
780 if(compareOptions
& kCFDataSearchBackwards
) {
781 searchRange
.location
+= (searchRange
.length
- needleLength
);
783 searchRange
.length
= needleLength
;
786 if(searchRange
.length
> fullHaystackLength
- searchRange
.location
) {
787 searchRange
.length
= fullHaystackLength
- searchRange
.location
;
790 if(searchRange
.length
< needleLength
|| fullHaystackLength
== 0 || needleLength
== 0) {
791 return CFRangeMake(kCFNotFound
, 0);
794 const uint8_t *haystack
= fullHaystack
+ searchRange
.location
;
795 const uint8_t *searchResult
= __CFDataSearchBoyerMoore(data
, haystack
, searchRange
.length
, needle
, needleLength
, (compareOptions
& kCFDataSearchBackwards
) != 0);
796 CFIndex resultLocation
= (searchResult
== NULL
) ? kCFNotFound
: searchRange
.location
+ (searchResult
- haystack
);
798 return CFRangeMake(resultLocation
, resultLocation
== kCFNotFound
? 0: needleLength
);
801 CFRange
CFDataFind(CFDataRef data
, CFDataRef dataToFind
, CFRange searchRange
, CFDataSearchFlags compareOptions
) {
803 __CFGenericValidateType(data
, __kCFDataTypeID
);
804 __CFGenericValidateType(dataToFind
, __kCFDataTypeID
);
805 __CFDataValidateRange(data
, searchRange
, __PRETTY_FUNCTION__
);
807 return _CFDataFindBytes(data
, dataToFind
, searchRange
, compareOptions
);
810 #undef __CFDataValidateRange
811 #undef __CFGenericValidateMutabilityFlags
812 #undef INLINE_BYTES_THRESHOLD
813 #undef CFDATA_MAX_SIZE
814 #undef REVERSE_BUFFER