]> git.saurik.com Git - apple/cf.git/blob - CFData.c
CF-1152.14.tar.gz
[apple/cf.git] / CFData.c
1 /*
2 * Copyright (c) 2015 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /* CFData.c
25 Copyright (c) 1998-2014, Apple Inc. All rights reserved.
26 Responsibility: Kevin Perry
27 */
28
29 #include <CoreFoundation/CFData.h>
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
32 #include <string.h>
33
34
35
36 #if __LP64__
37 #define CFDATA_MAX_SIZE ((1ULL << 42) - 1)
38 #else
39 #define CFDATA_MAX_SIZE ((1ULL << 31) - 1)
40 #endif
41
42 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI
43 #import <mach/mach.h>
44 CF_INLINE unsigned long __CFPageSize() { return vm_page_size; }
45 #elif DEPLOYMENT_TARGET_WINDOWS
46 CF_INLINE unsigned long __CFPageSize() {
47 SYSTEM_INFO sysInfo;
48 GetSystemInfo(&sysInfo);
49 return sysInfo.dwPageSize;
50 }
51 #elif DEPLOYMENT_TARGET_LINUX
52 #include <unistd.h>
53 CF_INLINE unsigned long __CFPageSize() {
54 return (unsigned long)getpagesize();
55 }
56 #endif
57
58 #define INLINE_BYTES_THRESHOLD ((4 * __CFPageSize()) - sizeof(struct __CFData) - 15)
59
60 struct __CFData {
61 CFRuntimeBase _base;
62 CFIndex _length; /* number of bytes */
63 CFIndex _capacity; /* maximum number of bytes */
64 CFAllocatorRef _bytesDeallocator; /* used only for immutable; if NULL, no deallocation */
65 uint8_t *_bytes; /* compaction: direct access to _bytes is only valid when data is not inline */
66 };
67
68 /*
69 Bit 0 = is mutable
70 Bit 1 = growable
71 Bit 2 = bytes inline
72 Bit 3 = use given CFAllocator
73 Bit 5 = allocate collectable memory
74
75 Bits 1-0 are used for mutability variation
76
77 Bit 6 = not all bytes have been zeroed yet (mutable)
78 */
79
80 enum {
81 __kCFMutable = 0x01,
82 __kCFGrowable = 0x02,
83 __kCFMutableVarietyMask = 0x03,
84 __kCFBytesInline = 0x04,
85 __kCFUseAllocator = 0x08,
86 __kCFAllocatesCollectable = 0x20,
87 };
88
89 enum {
90 kCFImmutable = 0x0, /* unchangable and fixed capacity; default */
91 kCFFixedMutable = 0x1, /* changeable and fixed capacity */
92 kCFMutable = 0x3 /* changeable and variable capacity */
93 };
94
95 CF_INLINE void __CFDataSetInfoBits(CFDataRef data, UInt32 v) {__CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 5, 0, v);}
96 CF_INLINE Boolean __CFDataGetInfoBit(CFDataRef data, UInt32 b) {return ((((const CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS] & b) != 0);}
97 CF_INLINE Boolean __CFDataIsMutable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFMutable);}
98 CF_INLINE Boolean __CFDataIsGrowable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFGrowable);}
99 CF_INLINE Boolean __CFDataBytesInline(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFBytesInline);}
100 CF_INLINE Boolean __CFDataUseAllocator(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFUseAllocator);}
101 CF_INLINE Boolean __CFDataAllocatesCollectable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFAllocatesCollectable);}
102
103 CF_INLINE UInt32 __CFMutableVariety(const void *cf) {
104 return __CFBitfieldGetValue(((const CFRuntimeBase *)cf)->_cfinfo[CF_INFO_BITS], 1, 0);
105 }
106
107 CF_INLINE void __CFSetMutableVariety(void *cf, UInt32 v) {
108 __CFBitfieldSetValue(((CFRuntimeBase *)cf)->_cfinfo[CF_INFO_BITS], 1, 0, v);
109 }
110
111 CF_INLINE UInt32 __CFMutableVarietyFromFlags(UInt32 flags) {
112 return (flags & __kCFMutableVarietyMask);
113 }
114
115 #define __CFGenericValidateMutabilityFlags(flags) \
116 CFAssert2(__CFMutableVarietyFromFlags(flags) != 0x2, __kCFLogAssertion, "%s(): flags 0x%x do not correctly specify the mutable variety", __PRETTY_FUNCTION__, flags);
117
118 CF_INLINE void __CFDataSetInline(CFDataRef data, Boolean flag) {
119 __CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 2, 2, (flag ? 1 : 0));
120 }
121
122 CF_INLINE Boolean __CFDataNeedsToZero(CFDataRef data) {
123 return __CFBitfieldGetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 6, 6);
124 }
125
126 CF_INLINE void __CFDataSetNeedsToZero(CFDataRef data, Boolean zero) {
127 __CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 6, 6, (zero ? 1 : 0));
128 }
129
130 CF_INLINE CFIndex __CFDataLength(CFDataRef data) {
131 return data->_length;
132 }
133
134 CF_INLINE void __CFDataSetLength(CFMutableDataRef data, CFIndex v) {
135 /* for a CFData, _bytesUsed == _length */
136 }
137
138 CF_INLINE CFIndex __CFDataCapacity(CFDataRef data) {
139 return data->_capacity;
140 }
141
142 CF_INLINE void __CFDataSetCapacity(CFMutableDataRef data, CFIndex v) {
143 /* for a CFData, _bytesNum == _capacity */
144 }
145
146 CF_INLINE void __CFDataSetNumBytesUsed(CFMutableDataRef data, CFIndex v) {
147 data->_length = v;
148 }
149
150 CF_INLINE CFIndex __CFDataNumBytes(CFDataRef data) {
151 return data->_capacity;
152 }
153
154 CF_INLINE void __CFDataSetNumBytes(CFMutableDataRef data, CFIndex v) {
155 data->_capacity = v;
156 }
157
158 #if __LP64__
159 #define CHUNK_SIZE (1ULL << 29)
160 #define LOW_THRESHOLD (1ULL << 20)
161 #define HIGH_THRESHOLD (1ULL << 32)
162 #else
163 #define CHUNK_SIZE (1ULL << 26)
164 #define LOW_THRESHOLD (1ULL << 20)
165 #define HIGH_THRESHOLD (1ULL << 29)
166 #endif
167
168 CF_INLINE CFIndex __CFDataRoundUpCapacity(CFIndex capacity) {
169 if (capacity < 16) {
170 return 16;
171 } else if (capacity < LOW_THRESHOLD) {
172 /* Up to 4x */
173 long idx = flsl(capacity);
174 return (1L << (long)(idx + ((idx % 2 == 0) ? 0 : 1)));
175 } else if (capacity < HIGH_THRESHOLD) {
176 /* Up to 2x */
177 return (1L << (long)flsl(capacity));
178 } else {
179 /* Round up to next multiple of CHUNK_SIZE */
180 unsigned long newCapacity = CHUNK_SIZE * (1+(capacity >> ((long)flsl(CHUNK_SIZE)-1)));
181 return __CFMin(newCapacity, CFDATA_MAX_SIZE);
182 }
183 }
184
185 CF_INLINE CFIndex __CFDataNumBytesForCapacity(CFIndex capacity) {
186 return capacity;
187 }
188
189 static void __CFDataHandleOutOfMemory(CFTypeRef obj, CFIndex numBytes) {
190 CFStringRef msg;
191 if(0 < numBytes && numBytes <= CFDATA_MAX_SIZE) {
192 msg = CFStringCreateWithFormat(kCFAllocatorSystemDefault, NULL, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed"), numBytes);
193 } else {
194 msg = CFStringCreateWithFormat(kCFAllocatorSystemDefault, NULL, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed. Maximum size: %lld"), numBytes, CFDATA_MAX_SIZE);
195 }
196 {
197 CFLog(kCFLogLevelCritical, CFSTR("%@"), msg);
198 HALT;
199 }
200 CFRelease(msg);
201 }
202
203 #if defined(DEBUG)
204 CF_INLINE void __CFDataValidateRange(CFDataRef data, CFRange range, const char *func) {
205 CFAssert2(0 <= range.location && range.location <= __CFDataLength(data), __kCFLogAssertion, "%s(): range.location index (%d) out of bounds", func, range.location);
206 CFAssert2(0 <= range.length, __kCFLogAssertion, "%s(): length (%d) cannot be less than zero", func, range.length);
207 CFAssert2(range.location + range.length <= __CFDataLength(data), __kCFLogAssertion, "%s(): ending index (%d) out of bounds", func, range.location + range.length);
208 }
209 #else
210 #define __CFDataValidateRange(a,r,f)
211 #endif
212
213 static Boolean __CFDataEqual(CFTypeRef cf1, CFTypeRef cf2) {
214 CFDataRef data1 = (CFDataRef)cf1;
215 CFDataRef data2 = (CFDataRef)cf2;
216 CFIndex length;
217 length = __CFDataLength(data1);
218 if (length != __CFDataLength(data2)) return false;
219 const uint8_t *bytePtr1 = CFDataGetBytePtr(data1);
220 const uint8_t *bytePtr2 = CFDataGetBytePtr(data2);
221 return 0 == memcmp(bytePtr1, bytePtr2, length);
222 }
223
224 static CFHashCode __CFDataHash(CFTypeRef cf) {
225 CFDataRef data = (CFDataRef)cf;
226 return CFHashBytes((uint8_t *)CFDataGetBytePtr(data), __CFMin(__CFDataLength(data), 80));
227 }
228
229 static CFStringRef __CFDataCopyDescription(CFTypeRef cf) {
230 CFDataRef data = (CFDataRef)cf;
231 CFMutableStringRef result;
232 CFIndex idx;
233 CFIndex len;
234 const uint8_t *bytes;
235 len = __CFDataLength(data);
236 bytes = CFDataGetBytePtr(data);
237 result = CFStringCreateMutable(CFGetAllocator(data), 0);
238 CFStringAppendFormat(result, NULL, CFSTR("<CFData %p [%p]>{length = %lu, capacity = %lu, bytes = 0x"), cf, CFGetAllocator(data), (unsigned long)len, (unsigned long)__CFDataCapacity(data));
239 if (24 < len) {
240 for (idx = 0; idx < 16; idx += 4) {
241 CFStringAppendFormat(result, NULL, CFSTR("%02x%02x%02x%02x"), bytes[idx], bytes[idx + 1], bytes[idx + 2], bytes[idx + 3]);
242 }
243 CFStringAppend(result, CFSTR(" ... "));
244 for (idx = len - 8; idx < len; idx += 4) {
245 CFStringAppendFormat(result, NULL, CFSTR("%02x%02x%02x%02x"), bytes[idx], bytes[idx + 1], bytes[idx + 2], bytes[idx + 3]);
246 }
247 } else {
248 for (idx = 0; idx < len; idx++) {
249 CFStringAppendFormat(result, NULL, CFSTR("%02x"), bytes[idx]);
250 }
251 }
252 CFStringAppend(result, CFSTR("}"));
253 return result;
254 }
255
256 static void *__CFDataInlineBytesPtr(CFDataRef data) {
257 return (void *)((uintptr_t)((int8_t *)data + sizeof(struct __CFData) + 15) & ~0xF); // 16-byte align
258 }
259
260 static Boolean __CFDataShouldAllocateCleared(CFDataRef data, CFIndex size) {
261 Boolean result;
262 if (__CFDataUseAllocator(data)) {
263 result = false;
264 } else {
265 if (__CFDataAllocatesCollectable(data)) {
266 #if __LP64__
267 result = false;
268 #else
269 result = (size > (64 * 1024));
270 #endif
271 } else {
272 result = (size > (128 * 1024));
273 }
274 }
275 return result;
276 }
277
278
279 // Check __CFDataShouldAllocateCleared before passing true.
280 static void *__CFDataAllocate(CFDataRef data, CFIndex size, Boolean clear) {
281 void *bytes = NULL;
282 if (__CFDataUseAllocator(data)) {
283 CFAllocatorRef allocator = __CFGetAllocator(data);
284 bytes = CFAllocatorAllocate(allocator, size, 0);
285 if (clear) memset((uint8_t *)bytes, 0, size);
286 } else {
287 if (__CFDataAllocatesCollectable(data)) {
288 bytes = auto_zone_allocate_object(objc_collectableZone(), size, AUTO_MEMORY_UNSCANNED, 0, clear);
289 } else {
290 if (clear) {
291 bytes = calloc(1, size);
292 } else {
293 bytes = malloc(size);
294 }
295 }
296 }
297 return bytes;
298 }
299
300 static void __CFDataDeallocate(CFTypeRef cf) {
301 CFMutableDataRef data = (CFMutableDataRef)cf;
302 if (!__CFDataBytesInline(data)) {
303 CFAllocatorRef deallocator = data->_bytesDeallocator;
304 if (deallocator != NULL) {
305 _CFAllocatorDeallocateGC(deallocator, data->_bytes);
306 CFRelease(deallocator);
307 data->_bytes = NULL;
308 } else {
309 if (__CFDataUseAllocator(data)) {
310 _CFAllocatorDeallocateGC(__CFGetAllocator(data), data->_bytes);
311 } else if (!__CFDataAllocatesCollectable(data) && data->_bytes) {
312 free(data->_bytes);
313 }
314 data->_bytes = NULL;
315 }
316 }
317 }
318
319 static CFTypeID __kCFDataTypeID = _kCFRuntimeNotATypeID;
320
321 static const CFRuntimeClass __CFDataClass = {
322 _kCFRuntimeScannedObject,
323 "CFData",
324 NULL, // init
325 NULL, // copy
326 __CFDataDeallocate,
327 __CFDataEqual,
328 __CFDataHash,
329 NULL, //
330 __CFDataCopyDescription
331 };
332
333 CFTypeID CFDataGetTypeID(void) {
334 static dispatch_once_t initOnce;
335 dispatch_once(&initOnce, ^{ __kCFDataTypeID = _CFRuntimeRegisterClass(&__CFDataClass); });
336 return __kCFDataTypeID;
337 }
338
339
340 // NULL bytesDeallocator to this function does not mean the default allocator, it means
341 // that there should be no deallocator, and the bytes should be copied.
342 static CFMutableDataRef __CFDataInit(CFAllocatorRef allocator, CFOptionFlags flags, CFIndex capacity, const uint8_t *bytes, CFIndex length, CFAllocatorRef bytesDeallocator) {
343 CFMutableDataRef memory;
344 __CFGenericValidateMutabilityFlags(flags);
345 CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity);
346 CFAssert3(kCFFixedMutable != __CFMutableVarietyFromFlags(flags) || length <= capacity, __kCFLogAssertion, "%s(): for kCFFixedMutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__, capacity, length);
347 CFAssert2(0 <= length, __kCFLogAssertion, "%s(): length (%d) cannot be less than zero", __PRETTY_FUNCTION__, length);
348
349 Boolean collectableMemory = CF_IS_COLLECTABLE_ALLOCATOR(allocator);
350 Boolean noCopy = bytesDeallocator != NULL;
351 Boolean isMutable = ((flags & __kCFMutable) != 0);
352 Boolean isGrowable = ((flags & __kCFGrowable) != 0);
353 Boolean allocateInline = !isGrowable && !noCopy && capacity < INLINE_BYTES_THRESHOLD;
354 allocator = (allocator == NULL) ? __CFGetDefaultAllocator() : allocator;
355 Boolean useAllocator = (allocator != kCFAllocatorSystemDefault && allocator != kCFAllocatorMalloc && allocator != kCFAllocatorMallocZone);
356
357 CFIndex size = sizeof(struct __CFData) - sizeof(CFRuntimeBase);
358 if (allocateInline) {
359 size += sizeof(uint8_t) * __CFDataNumBytesForCapacity(capacity) + sizeof(uint8_t) * 15; // for 16-byte alignment fixup
360 }
361 memory = (CFMutableDataRef)_CFRuntimeCreateInstance(allocator, CFDataGetTypeID(), size, NULL);
362 if (NULL == memory) {
363 return NULL;
364 }
365 __CFDataSetNumBytesUsed(memory, 0);
366 __CFDataSetLength(memory, 0);
367 __CFDataSetInfoBits(memory,
368 (allocateInline ? __kCFBytesInline : 0) |
369 (useAllocator ? __kCFUseAllocator : 0) |
370 (collectableMemory ? __kCFAllocatesCollectable : 0));
371
372 BOOL finalize = YES;
373 BOOL scan = YES;
374 if (collectableMemory) {
375 if (allocateInline) {
376 // We have no pointer to anything that needs to be reclaimed, so don't scan or finalize.
377 scan = NO;
378 finalize = NO;
379 } else if (noCopy) {
380 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator)) {
381 // We're taking responsibility for externally GC-allocated memory, so scan us, but we don't need to finalize.
382 finalize = NO;
383 } else if (bytesDeallocator == kCFAllocatorNull) {
384 // We don't have responsibility for these bytes, so there's no need to be scanned and we don't need to finalize.
385 scan = NO;
386 finalize = NO;
387 } else {
388 // We have a pointer to non-GC-allocated memory, so don't scan, but do finalize.
389 scan = NO;
390 }
391 }
392 if (!scan) auto_zone_set_unscanned(objc_collectableZone(), memory);
393 if (!finalize) auto_zone_set_nofinalize(objc_collectableZone(), memory);
394 }
395 if (isMutable && isGrowable) {
396 __CFDataSetCapacity(memory, __CFDataRoundUpCapacity(1));
397 __CFDataSetNumBytes(memory, __CFDataNumBytesForCapacity(__CFDataRoundUpCapacity(1)));
398 __CFSetMutableVariety(memory, kCFMutable);
399 } else {
400 /* Don't round up capacity */
401 __CFDataSetCapacity(memory, capacity);
402 __CFDataSetNumBytes(memory, __CFDataNumBytesForCapacity(capacity));
403 __CFSetMutableVariety(memory, kCFFixedMutable);
404 }
405 if (noCopy) {
406 __CFAssignWithWriteBarrier((void **)&memory->_bytes, (uint8_t *)bytes);
407 if (finalize) {
408 if ((0)) {
409 memory->_bytesDeallocator = bytesDeallocator;
410 } else {
411 memory->_bytesDeallocator = (CFAllocatorRef)CFRetain(bytesDeallocator);
412 }
413 }
414 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator) && !(0)) {
415 // we assume that the no-copy memory is GC-allocated with a retain count of (at least) 1 and we should release it now instead of waiting until __CFDataDeallocate.
416 auto_zone_release(objc_collectableZone(), memory->_bytes);
417 }
418 __CFDataSetNumBytesUsed(memory, length);
419 __CFDataSetLength(memory, length);
420 // Mutable no-copy datas are not allowed, so don't bother setting needsToZero flag.
421 } else {
422 Boolean cleared = (isMutable && !isGrowable && !_CFExecutableLinkedOnOrAfter(CFSystemVersionSnowLeopard));
423 if (!allocateInline) {
424 // assume that allocators give 16-byte aligned memory back -- it is their responsibility
425 __CFAssignWithWriteBarrier((void **)&memory->_bytes, __CFDataAllocate(memory, __CFDataNumBytes(memory) * sizeof(uint8_t), cleared));
426 if (__CFOASafe) __CFSetLastAllocationEventName(memory->_bytes, "CFData (store)");
427 if (NULL == memory->_bytes) {
428 CFRelease(memory);
429 return NULL;
430 }
431 } else {
432 if (length == 0 && !isMutable) {
433 // NSData sets its bytes pointer to NULL when its length is zero. Starting in 10.7 we do the same for CFData.
434 memory->_bytes = NULL;
435 // It is important to set this data as not inlined, so we do not recalculate a bytes pointer from null.
436 __CFDataSetInline(memory, false);
437 }
438 cleared = true;
439 }
440 __CFDataSetNeedsToZero(memory, !cleared);
441 memory->_bytesDeallocator = NULL;
442 CFDataReplaceBytes(memory, CFRangeMake(0, 0), bytes, length);
443 }
444 __CFSetMutableVariety(memory, __CFMutableVarietyFromFlags(flags));
445 return memory;
446 }
447
448 CFDataRef CFDataCreate(CFAllocatorRef allocator, const uint8_t *bytes, CFIndex length) {
449 return __CFDataInit(allocator, kCFImmutable, length, bytes, length, NULL);
450 }
451
452 CFDataRef CFDataCreateWithBytesNoCopy(CFAllocatorRef allocator, const uint8_t *bytes, CFIndex length, CFAllocatorRef bytesDeallocator) {
453 CFAssert1((0 == length || bytes != NULL), __kCFLogAssertion, "%s(): bytes pointer cannot be NULL if length is non-zero", __PRETTY_FUNCTION__);
454 if (NULL == bytesDeallocator) bytesDeallocator = __CFGetDefaultAllocator();
455 return __CFDataInit(allocator, kCFImmutable, length, bytes, length, bytesDeallocator);
456 }
457
458 CFDataRef CFDataCreateCopy(CFAllocatorRef allocator, CFDataRef data) {
459 CFIndex length = CFDataGetLength(data);
460 return __CFDataInit(allocator, kCFImmutable, length, CFDataGetBytePtr(data), length, NULL);
461 }
462
463 CFMutableDataRef CFDataCreateMutable(CFAllocatorRef allocator, CFIndex capacity) {
464 // Do not allow magic allocator for now for mutable datas, because it
465 // isn't remembered for proper handling later when growth of the buffer
466 // has to occur.
467 Boolean wasMagic = (0);
468 CFMutableDataRef r = (CFMutableDataRef)__CFDataInit(allocator, (0 == capacity) ? kCFMutable : kCFFixedMutable, capacity, NULL, 0, NULL);
469 if (wasMagic) CFMakeCollectable(r);
470 return r;
471 }
472
473 CFMutableDataRef CFDataCreateMutableCopy(CFAllocatorRef allocator, CFIndex capacity, CFDataRef data) {
474 // Do not allow magic allocator for now for mutable datas, because it
475 // isn't remembered for proper handling later when growth of the buffer
476 // has to occur.
477 Boolean wasMagic = (0);
478 CFMutableDataRef r = (CFMutableDataRef) __CFDataInit(allocator, (0 == capacity) ? kCFMutable : kCFFixedMutable, capacity, CFDataGetBytePtr(data), CFDataGetLength(data), NULL);
479 if (wasMagic) CFMakeCollectable(r);
480 return r;
481 }
482
483 CFIndex CFDataGetLength(CFDataRef data) {
484 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), CFIndex, (NSData *)data, length);
485 __CFGenericValidateType(data, CFDataGetTypeID());
486 return __CFDataLength(data);
487 }
488
489 const uint8_t *CFDataGetBytePtr(CFDataRef data) {
490 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), const uint8_t *, (NSData *)data, bytes);
491 __CFGenericValidateType(data, CFDataGetTypeID());
492 // compaction: if inline, always do the computation.
493 return __CFDataBytesInline(data) ? (uint8_t *)__CFDataInlineBytesPtr(data) : data->_bytes;
494 }
495
496 uint8_t *CFDataGetMutableBytePtr(CFMutableDataRef data) {
497 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), uint8_t *, (NSMutableData *)data, mutableBytes);
498 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
499 // compaction: if inline, always do the computation.
500 return __CFDataBytesInline(data) ? (uint8_t *)__CFDataInlineBytesPtr(data) : data->_bytes;
501 }
502
503 void CFDataGetBytes(CFDataRef data, CFRange range, uint8_t *buffer) {
504 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSData *)data, getBytes:(void *)buffer range:NSMakeRange(range.location, range.length));
505 __CFDataValidateRange(data, range, __PRETTY_FUNCTION__);
506 memmove(buffer, CFDataGetBytePtr(data) + range.location, range.length);
507 }
508
509 /* Allocates new block of data with at least numNewValues more bytes than the current length. If clear is true, the new bytes up to at least the new length with be zeroed. */
510 static void __CFDataGrow(CFMutableDataRef data, CFIndex numNewValues, Boolean clear) {
511 CFIndex oldLength = __CFDataLength(data);
512 CFIndex newLength = oldLength + numNewValues;
513 if (newLength > CFDATA_MAX_SIZE || newLength < 0) __CFDataHandleOutOfMemory(data, newLength * sizeof(uint8_t));
514 CFIndex capacity = __CFDataRoundUpCapacity(newLength);
515 CFIndex numBytes = __CFDataNumBytesForCapacity(capacity);
516 CFAllocatorRef allocator = CFGetAllocator(data);
517 void *bytes = NULL;
518 void *oldBytes = data->_bytes;
519 Boolean allocateCleared = clear && __CFDataShouldAllocateCleared(data, numBytes);
520 if (allocateCleared && !__CFDataUseAllocator(data) && (oldLength == 0 || (newLength / oldLength) > 4)) {
521 // If the length that needs to be zeroed is significantly greater than the length of the data, then calloc/memmove is probably more efficient than realloc/memset.
522 bytes = __CFDataAllocate(data, numBytes * sizeof(uint8_t), true);
523 if (NULL != bytes) {
524 memmove(bytes, oldBytes, oldLength);
525 __CFDataDeallocate(data);
526 }
527 }
528 if (bytes == NULL) {
529 // If the calloc/memmove approach either failed or was never attempted, then realloc.
530 allocateCleared = false;
531 if (__CFDataUseAllocator(data)) {
532 bytes = CFAllocatorReallocate(allocator, oldBytes, numBytes * sizeof(uint8_t), 0);
533 } else {
534 bytes = realloc(oldBytes, numBytes * sizeof(uint8_t));
535 }
536 }
537 if (NULL == bytes) __CFDataHandleOutOfMemory(data, numBytes * sizeof(uint8_t));
538 __CFDataSetCapacity(data, capacity);
539 __CFDataSetNumBytes(data, numBytes);
540 if (clear && !allocateCleared && oldLength < newLength) memset((uint8_t *)bytes + oldLength, 0, newLength - oldLength);
541 __CFDataSetNeedsToZero(data, !allocateCleared);
542 __CFAssignWithWriteBarrier((void **)&data->_bytes, bytes);
543 if (__CFOASafe) __CFSetLastAllocationEventName(data->_bytes, "CFData (store)");
544 }
545
546 void CFDataSetLength(CFMutableDataRef data, CFIndex newLength) {
547 CFIndex oldLength, capacity;
548 Boolean isGrowable;
549 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSMutableData *)data, setLength:(NSUInteger)newLength);
550 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
551 oldLength = __CFDataLength(data);
552 capacity = __CFDataCapacity(data);
553 isGrowable = __CFDataIsGrowable(data);
554 if (__CFDataIsMutable(data)) {
555 if (newLength < 0) {
556 if (isGrowable) {
557 __CFDataHandleOutOfMemory(data, newLength);
558 } else {
559 HALT;
560 }
561 } else if (capacity < newLength) {
562 if (isGrowable) {
563 __CFDataGrow(data, newLength - oldLength, true);
564 } else {
565 CFAssert1(newLength <= __CFDataCapacity(data), __kCFLogAssertion, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__);
566 }
567 } else if (oldLength < newLength && __CFDataNeedsToZero(data)) {
568 memset(CFDataGetMutableBytePtr(data) + oldLength, 0, newLength - oldLength);
569 } else if (newLength < oldLength) {
570 __CFDataSetNeedsToZero(data, true);
571 }
572 }
573 __CFDataSetLength(data, newLength);
574 __CFDataSetNumBytesUsed(data, newLength);
575 }
576
577 void CFDataIncreaseLength(CFMutableDataRef data, CFIndex extraLength) {
578 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSMutableData *)data, increaseLengthBy:(NSUInteger)extraLength);
579 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
580 if (extraLength < 0) HALT; // Avoid integer overflow.
581 CFDataSetLength(data, __CFDataLength(data) + extraLength);
582 }
583
584 void CFDataAppendBytes(CFMutableDataRef data, const uint8_t *bytes, CFIndex length) {
585 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSMutableData *)data, appendBytes:(const void *)bytes length:(NSUInteger)length);
586 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
587 CFDataReplaceBytes(data, CFRangeMake(__CFDataLength(data), 0), bytes, length);
588 }
589
590 void CFDataDeleteBytes(CFMutableDataRef data, CFRange range) {
591 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSMutableData *)data, replaceBytesInRange:NSMakeRange(range.location, range.length) withBytes:NULL length:0);
592 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
593 CFDataReplaceBytes(data, range, NULL, 0);
594 }
595
596 void CFDataReplaceBytes(CFMutableDataRef data, CFRange range, const uint8_t *newBytes, CFIndex newLength) {
597 CF_OBJC_FUNCDISPATCHV(CFDataGetTypeID(), void, (NSMutableData *)data, replaceBytesInRange:NSMakeRange(range.location, range.length) withBytes:(const void *)newBytes length:(NSUInteger)newLength);
598 __CFGenericValidateType(data, CFDataGetTypeID());
599 __CFDataValidateRange(data, range, __PRETTY_FUNCTION__);
600 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
601 CFAssert2(0 <= newLength, __kCFLogAssertion, "%s(): newLength (%d) cannot be less than zero", __PRETTY_FUNCTION__, newLength);
602
603 CFIndex len = __CFDataLength(data);
604 if (len < 0 || range.length < 0 || newLength < 0) HALT;
605 CFIndex newCount = len - range.length + newLength;
606 if (newCount < 0) HALT;
607
608 uint8_t *bytePtr = (uint8_t *)CFDataGetMutableBytePtr(data);
609 uint8_t *srcBuf = (uint8_t *)newBytes;
610 switch (__CFMutableVariety(data)) {
611 case kCFMutable:
612 if (__CFDataNumBytes(data) < newCount) {
613 if (bytePtr && newBytes && newBytes < bytePtr + __CFDataCapacity(data) && bytePtr < newBytes + newLength) {
614 srcBuf = (uint8_t *)malloc(newLength * sizeof(uint8_t));
615 memmove(srcBuf, newBytes, newLength * sizeof(uint8_t));
616 }
617 __CFDataGrow(data, newLength - range.length, false);
618 bytePtr = (uint8_t *)CFDataGetMutableBytePtr(data);
619 }
620 break;
621 case kCFFixedMutable:
622 CFAssert1(newCount <= __CFDataCapacity(data), __kCFLogAssertion, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__);
623 // Continuing after this could cause buffer overruns.
624 if (newCount > __CFDataCapacity(data)) HALT;
625 break;
626 }
627 if (newLength != range.length && range.location + range.length < len) {
628 memmove(bytePtr + range.location + newLength, bytePtr + range.location + range.length, (len - range.location - range.length) * sizeof(uint8_t));
629 }
630 if (0 < newLength) {
631 memmove(bytePtr + range.location, srcBuf, newLength * sizeof(uint8_t));
632 }
633 if (srcBuf != newBytes) free(srcBuf);
634 __CFDataSetNumBytesUsed(data, newCount);
635 __CFDataSetLength(data, newCount);
636 }
637
638 #define REVERSE_BUFFER(type, buf, len) { \
639 type tmp; \
640 for(int i = 0; i < (len)/2; i++) { \
641 tmp = (buf)[i]; \
642 (buf)[i] = (buf)[(len) - i - 1]; \
643 (buf)[(len) - i - 1] = tmp; \
644 } \
645 }
646
647 static void _computeGoodSubstringShift(const uint8_t *needle, int needleLength, unsigned long shift[], unsigned long suff[]) {
648 int f, g, i, j;
649
650 // Compute suffix lengths
651
652 suff[needleLength - 1] = needleLength;
653 f = g = needleLength - 1;
654 for (i = needleLength - 2; i >= 0; --i) {
655 if (i > g && suff[i + needleLength - 1 - f] < i - g)
656 suff[i] = suff[i + needleLength - 1 - f];
657 else {
658 if (i < g)
659 g = i;
660 f = i;
661 while (g >= 0 && needle[g] == needle[g + needleLength - 1 - f])
662 --g;
663 suff[i] = f - g;
664 }
665 }
666
667 // Compute shift table
668
669 for (i = 0; i < needleLength; ++i)
670 shift[i] = needleLength;
671 j = 0;
672 for (i = needleLength - 1; i >= 0; --i)
673 if (suff[i] == i + 1)
674 for (; j < needleLength - 1 - i; ++j)
675 if (shift[j] == needleLength)
676 shift[j] = needleLength - 1 - i;
677 // Set the amount of shift necessary to move each of the suffix matches found into a position where it overlaps with the suffix. If there are duplicate matches the latest one is the one that should take effect.
678 for (i = 0; i <= needleLength - 2; ++i)
679 shift[needleLength - 1 - suff[i]] = needleLength - 1 - i;
680 // Since the Boyer-Moore algorithm moves the pointer back while scanning substrings, add the distance to the end of the potential substring.
681 for (i = 0; i < needleLength - 1; ++i) {
682 shift[i] += (needleLength - 1 - i);
683 }
684 }
685
686 static const uint8_t * __CFDataSearchBoyerMoore(const CFDataRef data, const uint8_t *haystack, unsigned long haystackLength, const uint8_t *needle, unsigned long needleLength, Boolean backwards) {
687 unsigned long badCharacterShift[UCHAR_MAX + 1] = {0};
688 unsigned long *goodSubstringShift = (unsigned long *)malloc(needleLength * sizeof(unsigned long));
689 unsigned long *suffixLengths = (unsigned long *)malloc(needleLength * sizeof(unsigned long));
690 if (!goodSubstringShift || !suffixLengths) {
691 __CFDataHandleOutOfMemory(data, needleLength * sizeof(unsigned long));
692 }
693
694 if(backwards) {
695 for (int i = 0; i < sizeof(badCharacterShift) / sizeof(*badCharacterShift); i++)
696 badCharacterShift[i] = needleLength;
697
698 for (int i = needleLength - 1; i >= 0; i--)
699 badCharacterShift[needle[i]] = i;
700
701 // To get the correct shift table for backwards search reverse the needle, compute the forwards shift table, and then reverse the result.
702 uint8_t *needleCopy = (uint8_t *)malloc(needleLength * sizeof(uint8_t));
703 if (!needleCopy) {
704 __CFDataHandleOutOfMemory(data, needleLength * sizeof(uint8_t));
705 }
706 memmove(needleCopy, needle, needleLength);
707 REVERSE_BUFFER(uint8_t, needleCopy, needleLength);
708 _computeGoodSubstringShift(needleCopy, needleLength, goodSubstringShift, suffixLengths);
709 REVERSE_BUFFER(unsigned long, goodSubstringShift, needleLength);
710 free(needleCopy);
711 } else {
712 for (int i = 0; i < sizeof(badCharacterShift) / sizeof(*badCharacterShift); i++)
713 badCharacterShift[i] = needleLength;
714
715 for (int i = 0; i < needleLength; i++)
716 badCharacterShift[needle[i]] = needleLength - i- 1;
717
718 _computeGoodSubstringShift(needle, needleLength, goodSubstringShift, suffixLengths);
719 }
720
721 const uint8_t *scan_needle;
722 const uint8_t *scan_haystack;
723 const uint8_t *result = NULL;
724 if(backwards) {
725 const uint8_t *const end_needle = needle + needleLength;
726 scan_needle = needle;
727 scan_haystack = haystack + haystackLength - needleLength;
728 while (scan_haystack >= haystack && scan_needle < end_needle) {
729 if (*scan_haystack == *scan_needle) {
730 scan_haystack++;
731 scan_needle++;
732 } else {
733 scan_haystack -= __CFMax(badCharacterShift[*scan_haystack], goodSubstringShift[scan_needle - needle]);
734 scan_needle = needle;
735 }
736 }
737 if (scan_needle == end_needle) {
738 result = (scan_haystack - needleLength);
739 }
740 } else {
741 const uint8_t *const end_haystack = haystack + haystackLength;
742 scan_needle = needle + needleLength - 1;
743 scan_haystack = haystack + needleLength - 1;
744 while (scan_haystack < end_haystack && scan_needle >= needle) {
745 if (*scan_haystack == *scan_needle) {
746 scan_haystack--;
747 scan_needle--;
748 } else {
749 scan_haystack += __CFMax(badCharacterShift[*scan_haystack], goodSubstringShift[scan_needle - needle]);
750 scan_needle = needle + needleLength - 1;
751 }
752 }
753 if (scan_needle < needle) {
754 result = (scan_haystack + 1);
755 }
756 }
757
758 free(goodSubstringShift);
759 free(suffixLengths);
760
761 return result;
762 }
763
764 CFRange _CFDataFindBytes(CFDataRef data, CFDataRef dataToFind, CFRange searchRange, CFDataSearchFlags compareOptions) {
765 const uint8_t *fullHaystack = CFDataGetBytePtr(data);
766 const uint8_t *needle = CFDataGetBytePtr(dataToFind);
767 unsigned long fullHaystackLength = CFDataGetLength(data);
768 unsigned long needleLength = CFDataGetLength(dataToFind);
769
770 if(compareOptions & kCFDataSearchAnchored) {
771 if(searchRange.length > needleLength) {
772 if(compareOptions & kCFDataSearchBackwards) {
773 searchRange.location += (searchRange.length - needleLength);
774 }
775 searchRange.length = needleLength;
776 }
777 }
778 if(searchRange.length > fullHaystackLength - searchRange.location) {
779 searchRange.length = fullHaystackLength - searchRange.location;
780 }
781
782 if(searchRange.length < needleLength || fullHaystackLength == 0 || needleLength == 0) {
783 return CFRangeMake(kCFNotFound, 0);
784 }
785
786 const uint8_t *haystack = fullHaystack + searchRange.location;
787 const uint8_t *searchResult = __CFDataSearchBoyerMoore(data, haystack, searchRange.length, needle, needleLength, (compareOptions & kCFDataSearchBackwards) != 0);
788 CFIndex resultLocation = (searchResult == NULL) ? kCFNotFound : searchRange.location + (searchResult - haystack);
789
790 return CFRangeMake(resultLocation, resultLocation == kCFNotFound ? 0: needleLength);
791 }
792
793 CFRange CFDataFind(CFDataRef data, CFDataRef dataToFind, CFRange searchRange, CFDataSearchFlags compareOptions) {
794 // No objc dispatch
795 __CFGenericValidateType(data, CFDataGetTypeID());
796 __CFGenericValidateType(dataToFind, CFDataGetTypeID());
797 __CFDataValidateRange(data, searchRange, __PRETTY_FUNCTION__);
798
799 return _CFDataFindBytes(data, dataToFind, searchRange, compareOptions);
800 }
801
802 #undef __CFDataValidateRange
803 #undef __CFGenericValidateMutabilityFlags
804 #undef INLINE_BYTES_THRESHOLD
805 #undef CFDATA_MAX_SIZE
806 #undef REVERSE_BUFFER