]> git.saurik.com Git - apple/cf.git/blob - CFData.c
CF-855.11.tar.gz
[apple/cf.git] / CFData.c
1 /*
2 * Copyright (c) 2013 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /* CFData.c
25 Copyright (c) 1998-2013, Apple Inc. All rights reserved.
26 Responsibility: Kevin Perry
27 */
28
29 #include <CoreFoundation/CFData.h>
30 #include <CoreFoundation/CFPriv.h>
31 #include "CFInternal.h"
32 #include <string.h>
33
34
35
36 #if __LP64__
37 #define CFDATA_MAX_SIZE ((1ULL << 42) - 1)
38 #else
39 #define CFDATA_MAX_SIZE ((1ULL << 31) - 1)
40 #endif
41
42 #if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_EMBEDDED || DEPLOYMENT_TARGET_EMBEDDED_MINI
43 #import <mach/mach.h>
44 CF_INLINE unsigned long __CFPageSize() { return vm_page_size; }
45 #elif DEPLOYMENT_TARGET_WINDOWS
46 CF_INLINE unsigned long __CFPageSize() {
47 SYSTEM_INFO sysInfo;
48 GetSystemInfo(&sysInfo);
49 return sysInfo.dwPageSize;
50 }
51 #elif DEPLOYMENT_TARGET_LINUX
52 #include <unistd.h>
53 CF_INLINE unsigned long __CFPageSize() {
54 return (unsigned long)getpagesize();
55 }
56 #endif
57
58 #define INLINE_BYTES_THRESHOLD ((4 * __CFPageSize()) - sizeof(struct __CFData) - 15)
59
60 struct __CFData {
61 CFRuntimeBase _base;
62 CFIndex _length; /* number of bytes */
63 CFIndex _capacity; /* maximum number of bytes */
64 CFAllocatorRef _bytesDeallocator; /* used only for immutable; if NULL, no deallocation */
65 uint8_t *_bytes; /* compaction: direct access to _bytes is only valid when data is not inline */
66 };
67
68 /*
69 Bit 0 = is mutable
70 Bit 1 = growable
71 Bit 2 = bytes inline
72 Bit 3 = use given CFAllocator
73 Bit 5 = allocate collectable memory
74
75 Bits 1-0 are used for mutability variation
76
77 Bit 6 = not all bytes have been zeroed yet (mutable)
78 */
79
80 enum {
81 __kCFMutable = 0x01,
82 __kCFGrowable = 0x02,
83 __kCFMutableVarietyMask = 0x03,
84 __kCFBytesInline = 0x04,
85 __kCFUseAllocator = 0x08,
86 __kCFAllocatesCollectable = 0x20,
87 };
88
89 enum {
90 kCFImmutable = 0x0, /* unchangable and fixed capacity; default */
91 kCFFixedMutable = 0x1, /* changeable and fixed capacity */
92 kCFMutable = 0x3 /* changeable and variable capacity */
93 };
94
95 CF_INLINE void __CFDataSetInfoBits(CFDataRef data, UInt32 v) {__CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 5, 0, v);}
96 CF_INLINE Boolean __CFDataGetInfoBit(CFDataRef data, UInt32 b) {return ((((const CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS] & b) != 0);}
97 CF_INLINE Boolean __CFDataIsMutable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFMutable);}
98 CF_INLINE Boolean __CFDataIsGrowable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFGrowable);}
99 CF_INLINE Boolean __CFDataBytesInline(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFBytesInline);}
100 CF_INLINE Boolean __CFDataUseAllocator(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFUseAllocator);}
101 CF_INLINE Boolean __CFDataAllocatesCollectable(CFDataRef data) {return __CFDataGetInfoBit(data, __kCFAllocatesCollectable);}
102
103 CF_INLINE UInt32 __CFMutableVariety(const void *cf) {
104 return __CFBitfieldGetValue(((const CFRuntimeBase *)cf)->_cfinfo[CF_INFO_BITS], 1, 0);
105 }
106
107 CF_INLINE void __CFSetMutableVariety(void *cf, UInt32 v) {
108 __CFBitfieldSetValue(((CFRuntimeBase *)cf)->_cfinfo[CF_INFO_BITS], 1, 0, v);
109 }
110
111 CF_INLINE UInt32 __CFMutableVarietyFromFlags(UInt32 flags) {
112 return (flags & __kCFMutableVarietyMask);
113 }
114
115 #define __CFGenericValidateMutabilityFlags(flags) \
116 CFAssert2(__CFMutableVarietyFromFlags(flags) != 0x2, __kCFLogAssertion, "%s(): flags 0x%x do not correctly specify the mutable variety", __PRETTY_FUNCTION__, flags);
117
118 CF_INLINE void __CFDataSetInline(CFDataRef data, Boolean flag) {
119 __CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 2, 2, (flag ? 1 : 0));
120 }
121
122 CF_INLINE Boolean __CFDataNeedsToZero(CFDataRef data) {
123 return __CFBitfieldGetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 6, 6);
124 }
125
126 CF_INLINE void __CFDataSetNeedsToZero(CFDataRef data, Boolean zero) {
127 __CFBitfieldSetValue(((CFRuntimeBase *)data)->_cfinfo[CF_INFO_BITS], 6, 6, (zero ? 1 : 0));
128 }
129
130 CF_INLINE CFIndex __CFDataLength(CFDataRef data) {
131 return data->_length;
132 }
133
134 CF_INLINE void __CFDataSetLength(CFMutableDataRef data, CFIndex v) {
135 /* for a CFData, _bytesUsed == _length */
136 }
137
138 CF_INLINE CFIndex __CFDataCapacity(CFDataRef data) {
139 return data->_capacity;
140 }
141
142 CF_INLINE void __CFDataSetCapacity(CFMutableDataRef data, CFIndex v) {
143 /* for a CFData, _bytesNum == _capacity */
144 }
145
146 CF_INLINE CFIndex __CFDataNumBytesUsed(CFDataRef data) {
147 return data->_length;
148 }
149
150 CF_INLINE void __CFDataSetNumBytesUsed(CFMutableDataRef data, CFIndex v) {
151 data->_length = v;
152 }
153
154 CF_INLINE CFIndex __CFDataNumBytes(CFDataRef data) {
155 return data->_capacity;
156 }
157
158 CF_INLINE void __CFDataSetNumBytes(CFMutableDataRef data, CFIndex v) {
159 data->_capacity = v;
160 }
161
162 #if __LP64__
163 #define CHUNK_SIZE (1ULL << 29)
164 #define LOW_THRESHOLD (1ULL << 20)
165 #define HIGH_THRESHOLD (1ULL << 32)
166 #else
167 #define CHUNK_SIZE (1ULL << 26)
168 #define LOW_THRESHOLD (1ULL << 20)
169 #define HIGH_THRESHOLD (1ULL << 29)
170 #endif
171
172 CF_INLINE CFIndex __CFDataRoundUpCapacity(CFIndex capacity) {
173 if (capacity < 16) {
174 return 16;
175 } else if (capacity < LOW_THRESHOLD) {
176 /* Up to 4x */
177 long idx = flsl(capacity);
178 return (1L << (long)(idx + ((idx % 2 == 0) ? 0 : 1)));
179 } else if (capacity < HIGH_THRESHOLD) {
180 /* Up to 2x */
181 return (1L << (long)flsl(capacity));
182 } else {
183 /* Round up to next multiple of CHUNK_SIZE */
184 unsigned long newCapacity = CHUNK_SIZE * (1+(capacity >> ((long)flsl(CHUNK_SIZE)-1)));
185 return __CFMin(newCapacity, CFDATA_MAX_SIZE);
186 }
187 }
188
189 CF_INLINE CFIndex __CFDataNumBytesForCapacity(CFIndex capacity) {
190 return capacity;
191 }
192
193 static void __CFDataHandleOutOfMemory(CFTypeRef obj, CFIndex numBytes) {
194 CFStringRef msg;
195 if(0 < numBytes && numBytes <= CFDATA_MAX_SIZE) {
196 msg = CFStringCreateWithFormat(kCFAllocatorSystemDefault, NULL, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed"), numBytes);
197 } else {
198 msg = CFStringCreateWithFormat(kCFAllocatorSystemDefault, NULL, CFSTR("Attempt to allocate %ld bytes for NS/CFData failed. Maximum size: %lld"), numBytes, CFDATA_MAX_SIZE);
199 }
200 {
201 CFLog(kCFLogLevelCritical, CFSTR("%@"), msg);
202 HALT;
203 }
204 CFRelease(msg);
205 }
206
207 #if defined(DEBUG)
208 CF_INLINE void __CFDataValidateRange(CFDataRef data, CFRange range, const char *func) {
209 CFAssert2(0 <= range.location && range.location <= __CFDataLength(data), __kCFLogAssertion, "%s(): range.location index (%d) out of bounds", func, range.location);
210 CFAssert2(0 <= range.length, __kCFLogAssertion, "%s(): length (%d) cannot be less than zero", func, range.length);
211 CFAssert2(range.location + range.length <= __CFDataLength(data), __kCFLogAssertion, "%s(): ending index (%d) out of bounds", func, range.location + range.length);
212 }
213 #else
214 #define __CFDataValidateRange(a,r,f)
215 #endif
216
217 static Boolean __CFDataEqual(CFTypeRef cf1, CFTypeRef cf2) {
218 CFDataRef data1 = (CFDataRef)cf1;
219 CFDataRef data2 = (CFDataRef)cf2;
220 CFIndex length;
221 length = __CFDataLength(data1);
222 if (length != __CFDataLength(data2)) return false;
223 const uint8_t *bytePtr1 = CFDataGetBytePtr(data1);
224 const uint8_t *bytePtr2 = CFDataGetBytePtr(data2);
225 return 0 == memcmp(bytePtr1, bytePtr2, length);
226 }
227
228 static CFHashCode __CFDataHash(CFTypeRef cf) {
229 CFDataRef data = (CFDataRef)cf;
230 return CFHashBytes((uint8_t *)CFDataGetBytePtr(data), __CFMin(__CFDataLength(data), 80));
231 }
232
233 static CFStringRef __CFDataCopyDescription(CFTypeRef cf) {
234 CFDataRef data = (CFDataRef)cf;
235 CFMutableStringRef result;
236 CFIndex idx;
237 CFIndex len;
238 const uint8_t *bytes;
239 len = __CFDataLength(data);
240 bytes = CFDataGetBytePtr(data);
241 result = CFStringCreateMutable(CFGetAllocator(data), 0);
242 CFStringAppendFormat(result, NULL, CFSTR("<CFData %p [%p]>{length = %lu, capacity = %lu, bytes = 0x"), cf, CFGetAllocator(data), (unsigned long)len, (unsigned long)__CFDataCapacity(data));
243 if (24 < len) {
244 for (idx = 0; idx < 16; idx += 4) {
245 CFStringAppendFormat(result, NULL, CFSTR("%02x%02x%02x%02x"), bytes[idx], bytes[idx + 1], bytes[idx + 2], bytes[idx + 3]);
246 }
247 CFStringAppend(result, CFSTR(" ... "));
248 for (idx = len - 8; idx < len; idx += 4) {
249 CFStringAppendFormat(result, NULL, CFSTR("%02x%02x%02x%02x"), bytes[idx], bytes[idx + 1], bytes[idx + 2], bytes[idx + 3]);
250 }
251 } else {
252 for (idx = 0; idx < len; idx++) {
253 CFStringAppendFormat(result, NULL, CFSTR("%02x"), bytes[idx]);
254 }
255 }
256 CFStringAppend(result, CFSTR("}"));
257 return result;
258 }
259
260 static void *__CFDataInlineBytesPtr(CFDataRef data) {
261 return (void *)((uintptr_t)((int8_t *)data + sizeof(struct __CFData) + 15) & ~0xF); // 16-byte align
262 }
263
264 static Boolean __CFDataShouldAllocateCleared(CFDataRef data, CFIndex size) {
265 Boolean result;
266 if (__CFDataUseAllocator(data)) {
267 result = false;
268 } else {
269 if (__CFDataAllocatesCollectable(data)) {
270 #if __LP64__
271 result = false;
272 #else
273 result = (size > (64 * 1024));
274 #endif
275 } else {
276 result = (size > (128 * 1024));
277 }
278 }
279 return result;
280 }
281
282
283 // Check __CFDataShouldAllocateCleared before passing true.
284 static void *__CFDataAllocate(CFDataRef data, CFIndex size, Boolean clear) {
285 void *bytes = NULL;
286 if (__CFDataUseAllocator(data)) {
287 CFAllocatorRef allocator = __CFGetAllocator(data);
288 bytes = CFAllocatorAllocate(allocator, size, 0);
289 if (clear) memset((uint8_t *)bytes, 0, size);
290 } else {
291 if (__CFDataAllocatesCollectable(data)) {
292 bytes = auto_zone_allocate_object(objc_collectableZone(), size, AUTO_MEMORY_UNSCANNED, 0, clear);
293 } else {
294 if (clear) {
295 bytes = calloc(1, size);
296 } else {
297 bytes = malloc(size);
298 }
299 }
300 }
301 return bytes;
302 }
303
304 static void __CFDataDeallocate(CFTypeRef cf) {
305 CFMutableDataRef data = (CFMutableDataRef)cf;
306 if (!__CFDataBytesInline(data)) {
307 CFAllocatorRef deallocator = data->_bytesDeallocator;
308 if (deallocator != NULL) {
309 _CFAllocatorDeallocateGC(deallocator, data->_bytes);
310 CFRelease(deallocator);
311 data->_bytes = NULL;
312 } else {
313 if (__CFDataUseAllocator(data)) {
314 _CFAllocatorDeallocateGC(__CFGetAllocator(data), data->_bytes);
315 } else if (!__CFDataAllocatesCollectable(data) && data->_bytes) {
316 free(data->_bytes);
317 }
318 data->_bytes = NULL;
319 }
320 }
321 }
322
323 static CFTypeID __kCFDataTypeID = _kCFRuntimeNotATypeID;
324
325 static const CFRuntimeClass __CFDataClass = {
326 _kCFRuntimeScannedObject,
327 "CFData",
328 NULL, // init
329 NULL, // copy
330 __CFDataDeallocate,
331 __CFDataEqual,
332 __CFDataHash,
333 NULL, //
334 __CFDataCopyDescription
335 };
336
337 CF_PRIVATE void __CFDataInitialize(void) {
338 __kCFDataTypeID = _CFRuntimeRegisterClass(&__CFDataClass);
339 }
340
341 CFTypeID CFDataGetTypeID(void) {
342 return __kCFDataTypeID;
343 }
344
345
346 // NULL bytesDeallocator to this function does not mean the default allocator, it means
347 // that there should be no deallocator, and the bytes should be copied.
348 static CFMutableDataRef __CFDataInit(CFAllocatorRef allocator, CFOptionFlags flags, CFIndex capacity, const uint8_t *bytes, CFIndex length, CFAllocatorRef bytesDeallocator) {
349 CFMutableDataRef memory;
350 __CFGenericValidateMutabilityFlags(flags);
351 CFAssert2(0 <= capacity, __kCFLogAssertion, "%s(): capacity (%d) cannot be less than zero", __PRETTY_FUNCTION__, capacity);
352 CFAssert3(kCFFixedMutable != __CFMutableVarietyFromFlags(flags) || length <= capacity, __kCFLogAssertion, "%s(): for kCFFixedMutable type, capacity (%d) must be greater than or equal to number of initial elements (%d)", __PRETTY_FUNCTION__, capacity, length);
353 CFAssert2(0 <= length, __kCFLogAssertion, "%s(): length (%d) cannot be less than zero", __PRETTY_FUNCTION__, length);
354
355 Boolean collectableMemory = CF_IS_COLLECTABLE_ALLOCATOR(allocator);
356 Boolean noCopy = bytesDeallocator != NULL;
357 Boolean isMutable = ((flags & __kCFMutable) != 0);
358 Boolean isGrowable = ((flags & __kCFGrowable) != 0);
359 Boolean allocateInline = !isGrowable && !noCopy && capacity < INLINE_BYTES_THRESHOLD;
360 allocator = (allocator == NULL) ? __CFGetDefaultAllocator() : allocator;
361 Boolean useAllocator = (allocator != kCFAllocatorSystemDefault && allocator != kCFAllocatorMalloc && allocator != kCFAllocatorMallocZone);
362
363 CFIndex size = sizeof(struct __CFData) - sizeof(CFRuntimeBase);
364 if (allocateInline) {
365 size += sizeof(uint8_t) * __CFDataNumBytesForCapacity(capacity) + sizeof(uint8_t) * 15; // for 16-byte alignment fixup
366 }
367 memory = (CFMutableDataRef)_CFRuntimeCreateInstance(allocator, __kCFDataTypeID, size, NULL);
368 if (NULL == memory) {
369 return NULL;
370 }
371 __CFDataSetNumBytesUsed(memory, 0);
372 __CFDataSetLength(memory, 0);
373 __CFDataSetInfoBits(memory,
374 (allocateInline ? __kCFBytesInline : 0) |
375 (useAllocator ? __kCFUseAllocator : 0) |
376 (collectableMemory ? __kCFAllocatesCollectable : 0));
377
378 BOOL finalize = YES;
379 BOOL scan = YES;
380 if (collectableMemory) {
381 if (allocateInline) {
382 // We have no pointer to anything that needs to be reclaimed, so don't scan or finalize.
383 scan = NO;
384 finalize = NO;
385 } else if (noCopy) {
386 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator)) {
387 // We're taking responsibility for externally GC-allocated memory, so scan us, but we don't need to finalize.
388 finalize = NO;
389 } else if (bytesDeallocator == kCFAllocatorNull) {
390 // We don't have responsibility for these bytes, so there's no need to be scanned and we don't need to finalize.
391 scan = NO;
392 finalize = NO;
393 } else {
394 // We have a pointer to non-GC-allocated memory, so don't scan, but do finalize.
395 scan = NO;
396 }
397 }
398 if (!scan) auto_zone_set_unscanned(objc_collectableZone(), memory);
399 if (!finalize) auto_zone_set_nofinalize(objc_collectableZone(), memory);
400 }
401 if (isMutable && isGrowable) {
402 __CFDataSetCapacity(memory, __CFDataRoundUpCapacity(1));
403 __CFDataSetNumBytes(memory, __CFDataNumBytesForCapacity(__CFDataRoundUpCapacity(1)));
404 __CFSetMutableVariety(memory, kCFMutable);
405 } else {
406 /* Don't round up capacity */
407 __CFDataSetCapacity(memory, capacity);
408 __CFDataSetNumBytes(memory, __CFDataNumBytesForCapacity(capacity));
409 __CFSetMutableVariety(memory, kCFFixedMutable);
410 }
411 if (noCopy) {
412 __CFAssignWithWriteBarrier((void **)&memory->_bytes, (uint8_t *)bytes);
413 if (finalize) {
414 if ((0)) {
415 memory->_bytesDeallocator = bytesDeallocator;
416 } else {
417 memory->_bytesDeallocator = (CFAllocatorRef)CFRetain(bytesDeallocator);
418 }
419 }
420 if (CF_IS_COLLECTABLE_ALLOCATOR(bytesDeallocator) && !(0)) {
421 // we assume that the no-copy memory is GC-allocated with a retain count of (at least) 1 and we should release it now instead of waiting until __CFDataDeallocate.
422 auto_zone_release(objc_collectableZone(), memory->_bytes);
423 }
424 __CFDataSetNumBytesUsed(memory, length);
425 __CFDataSetLength(memory, length);
426 // Mutable no-copy datas are not allowed, so don't bother setting needsToZero flag.
427 } else {
428 Boolean cleared = (isMutable && !isGrowable && !_CFExecutableLinkedOnOrAfter(CFSystemVersionSnowLeopard));
429 if (!allocateInline) {
430 // assume that allocators give 16-byte aligned memory back -- it is their responsibility
431 __CFAssignWithWriteBarrier((void **)&memory->_bytes, __CFDataAllocate(memory, __CFDataNumBytes(memory) * sizeof(uint8_t), cleared));
432 if (__CFOASafe) __CFSetLastAllocationEventName(memory->_bytes, "CFData (store)");
433 if (NULL == memory->_bytes) {
434 CFRelease(memory);
435 return NULL;
436 }
437 } else {
438 if (length == 0 && !isMutable) {
439 // NSData sets its bytes pointer to NULL when its length is zero. Starting in 10.7 we do the same for CFData.
440 memory->_bytes = NULL;
441 // It is important to set this data as not inlined, so we do not recalculate a bytes pointer from null.
442 __CFDataSetInline(memory, false);
443 }
444 cleared = true;
445 }
446 __CFDataSetNeedsToZero(memory, !cleared);
447 memory->_bytesDeallocator = NULL;
448 CFDataReplaceBytes(memory, CFRangeMake(0, 0), bytes, length);
449 }
450 __CFSetMutableVariety(memory, __CFMutableVarietyFromFlags(flags));
451 return memory;
452 }
453
454 CFDataRef CFDataCreate(CFAllocatorRef allocator, const uint8_t *bytes, CFIndex length) {
455 return __CFDataInit(allocator, kCFImmutable, length, bytes, length, NULL);
456 }
457
458 CFDataRef CFDataCreateWithBytesNoCopy(CFAllocatorRef allocator, const uint8_t *bytes, CFIndex length, CFAllocatorRef bytesDeallocator) {
459 CFAssert1((0 == length || bytes != NULL), __kCFLogAssertion, "%s(): bytes pointer cannot be NULL if length is non-zero", __PRETTY_FUNCTION__);
460 if (NULL == bytesDeallocator) bytesDeallocator = __CFGetDefaultAllocator();
461 return __CFDataInit(allocator, kCFImmutable, length, bytes, length, bytesDeallocator);
462 }
463
464 CFDataRef CFDataCreateCopy(CFAllocatorRef allocator, CFDataRef data) {
465 CFIndex length = CFDataGetLength(data);
466 return __CFDataInit(allocator, kCFImmutable, length, CFDataGetBytePtr(data), length, NULL);
467 }
468
469 CFMutableDataRef CFDataCreateMutable(CFAllocatorRef allocator, CFIndex capacity) {
470 // Do not allow magic allocator for now for mutable datas, because it
471 // isn't remembered for proper handling later when growth of the buffer
472 // has to occur.
473 Boolean wasMagic = (0);
474 CFMutableDataRef r = (CFMutableDataRef)__CFDataInit(allocator, (0 == capacity) ? kCFMutable : kCFFixedMutable, capacity, NULL, 0, NULL);
475 if (wasMagic) CFMakeCollectable(r);
476 return r;
477 }
478
479 CFMutableDataRef CFDataCreateMutableCopy(CFAllocatorRef allocator, CFIndex capacity, CFDataRef data) {
480 // Do not allow magic allocator for now for mutable datas, because it
481 // isn't remembered for proper handling later when growth of the buffer
482 // has to occur.
483 Boolean wasMagic = (0);
484 CFMutableDataRef r = (CFMutableDataRef) __CFDataInit(allocator, (0 == capacity) ? kCFMutable : kCFFixedMutable, capacity, CFDataGetBytePtr(data), CFDataGetLength(data), NULL);
485 if (wasMagic) CFMakeCollectable(r);
486 return r;
487 }
488
489 CFIndex CFDataGetLength(CFDataRef data) {
490 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, CFIndex, (NSData *)data, length);
491 __CFGenericValidateType(data, __kCFDataTypeID);
492 return __CFDataLength(data);
493 }
494
495 const uint8_t *CFDataGetBytePtr(CFDataRef data) {
496 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, const uint8_t *, (NSData *)data, bytes);
497 __CFGenericValidateType(data, __kCFDataTypeID);
498 // compaction: if inline, always do the computation.
499 return __CFDataBytesInline(data) ? (uint8_t *)__CFDataInlineBytesPtr(data) : data->_bytes;
500 }
501
502 uint8_t *CFDataGetMutableBytePtr(CFMutableDataRef data) {
503 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, uint8_t *, (NSMutableData *)data, mutableBytes);
504 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
505 // compaction: if inline, always do the computation.
506 return __CFDataBytesInline(data) ? (uint8_t *)__CFDataInlineBytesPtr(data) : data->_bytes;
507 }
508
509 void CFDataGetBytes(CFDataRef data, CFRange range, uint8_t *buffer) {
510 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSData *)data, getBytes:(void *)buffer range:NSMakeRange(range.location, range.length));
511 __CFDataValidateRange(data, range, __PRETTY_FUNCTION__);
512 memmove(buffer, CFDataGetBytePtr(data) + range.location, range.length);
513 }
514
515 /* Allocates new block of data with at least numNewValues more bytes than the current length. If clear is true, the new bytes up to at least the new length with be zeroed. */
516 static void __CFDataGrow(CFMutableDataRef data, CFIndex numNewValues, Boolean clear) {
517 CFIndex oldLength = __CFDataLength(data);
518 CFIndex newLength = oldLength + numNewValues;
519 if (newLength > CFDATA_MAX_SIZE || newLength < 0) __CFDataHandleOutOfMemory(data, newLength * sizeof(uint8_t));
520 CFIndex capacity = __CFDataRoundUpCapacity(newLength);
521 CFIndex numBytes = __CFDataNumBytesForCapacity(capacity);
522 CFAllocatorRef allocator = CFGetAllocator(data);
523 void *bytes = NULL;
524 void *oldBytes = data->_bytes;
525 Boolean allocateCleared = clear && __CFDataShouldAllocateCleared(data, numBytes);
526 if (allocateCleared && !__CFDataUseAllocator(data) && (oldLength == 0 || (newLength / oldLength) > 4)) {
527 // If the length that needs to be zeroed is significantly greater than the length of the data, then calloc/memmove is probably more efficient than realloc/memset.
528 bytes = __CFDataAllocate(data, numBytes * sizeof(uint8_t), true);
529 if (NULL != bytes) {
530 memmove(bytes, oldBytes, oldLength);
531 __CFDataDeallocate(data);
532 }
533 }
534 if (bytes == NULL) {
535 // If the calloc/memmove approach either failed or was never attempted, then realloc.
536 allocateCleared = false;
537 if (__CFDataUseAllocator(data)) {
538 bytes = CFAllocatorReallocate(allocator, oldBytes, numBytes * sizeof(uint8_t), 0);
539 } else {
540 bytes = realloc(oldBytes, numBytes * sizeof(uint8_t));
541 }
542 }
543 if (NULL == bytes) __CFDataHandleOutOfMemory(data, numBytes * sizeof(uint8_t));
544 __CFDataSetCapacity(data, capacity);
545 __CFDataSetNumBytes(data, numBytes);
546 if (clear && !allocateCleared && oldLength < newLength) memset((uint8_t *)bytes + oldLength, 0, newLength - oldLength);
547 __CFDataSetNeedsToZero(data, !allocateCleared);
548 __CFAssignWithWriteBarrier((void **)&data->_bytes, bytes);
549 if (__CFOASafe) __CFSetLastAllocationEventName(data->_bytes, "CFData (store)");
550 }
551
552 void CFDataSetLength(CFMutableDataRef data, CFIndex newLength) {
553 CFIndex oldLength, capacity;
554 Boolean isGrowable;
555 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSMutableData *)data, setLength:(NSUInteger)newLength);
556 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
557 oldLength = __CFDataLength(data);
558 capacity = __CFDataCapacity(data);
559 isGrowable = __CFDataIsGrowable(data);
560 if (__CFDataIsMutable(data)) {
561 if (newLength < 0) {
562 if (isGrowable) {
563 __CFDataHandleOutOfMemory(data, newLength);
564 } else {
565 HALT;
566 }
567 } else if (capacity < newLength) {
568 if (isGrowable) {
569 __CFDataGrow(data, newLength - oldLength, true);
570 } else {
571 CFAssert1(newLength <= __CFDataCapacity(data), __kCFLogAssertion, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__);
572 }
573 } else if (oldLength < newLength && __CFDataNeedsToZero(data)) {
574 memset(CFDataGetMutableBytePtr(data) + oldLength, 0, newLength - oldLength);
575 } else if (newLength < oldLength) {
576 __CFDataSetNeedsToZero(data, true);
577 }
578 }
579 __CFDataSetLength(data, newLength);
580 __CFDataSetNumBytesUsed(data, newLength);
581 }
582
583 void CFDataIncreaseLength(CFMutableDataRef data, CFIndex extraLength) {
584 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSMutableData *)data, increaseLengthBy:(NSUInteger)extraLength);
585 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
586 if (extraLength < 0) HALT; // Avoid integer overflow.
587 CFDataSetLength(data, __CFDataLength(data) + extraLength);
588 }
589
590 void CFDataAppendBytes(CFMutableDataRef data, const uint8_t *bytes, CFIndex length) {
591 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSMutableData *)data, appendBytes:(const void *)bytes length:(NSUInteger)length);
592 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
593 CFDataReplaceBytes(data, CFRangeMake(__CFDataLength(data), 0), bytes, length);
594 }
595
596 void CFDataDeleteBytes(CFMutableDataRef data, CFRange range) {
597 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSMutableData *)data, replaceBytesInRange:NSMakeRange(range.location, range.length) withBytes:NULL length:0);
598 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
599 CFDataReplaceBytes(data, range, NULL, 0);
600 }
601
602 void CFDataReplaceBytes(CFMutableDataRef data, CFRange range, const uint8_t *newBytes, CFIndex newLength) {
603 CF_OBJC_FUNCDISPATCHV(__kCFDataTypeID, void, (NSMutableData *)data, replaceBytesInRange:NSMakeRange(range.location, range.length) withBytes:(const void *)newBytes length:(NSUInteger)newLength);
604 __CFGenericValidateType(data, __kCFDataTypeID);
605 __CFDataValidateRange(data, range, __PRETTY_FUNCTION__);
606 CFAssert1(__CFDataIsMutable(data), __kCFLogAssertion, "%s(): data is immutable", __PRETTY_FUNCTION__);
607 CFAssert2(0 <= newLength, __kCFLogAssertion, "%s(): newLength (%d) cannot be less than zero", __PRETTY_FUNCTION__, newLength);
608
609 CFIndex len = __CFDataLength(data);
610 if (len < 0 || range.length < 0 || newLength < 0) HALT;
611 CFIndex newCount = len - range.length + newLength;
612 if (newCount < 0) HALT;
613
614 uint8_t *bytePtr = (uint8_t *)CFDataGetMutableBytePtr(data);
615 uint8_t *srcBuf = (uint8_t *)newBytes;
616 switch (__CFMutableVariety(data)) {
617 case kCFMutable:
618 if (__CFDataNumBytes(data) < newCount) {
619 if (bytePtr && newBytes && newBytes < bytePtr + __CFDataCapacity(data) && bytePtr < newBytes + newLength) {
620 srcBuf = (uint8_t *)malloc(newLength * sizeof(uint8_t));
621 memmove(srcBuf, newBytes, newLength * sizeof(uint8_t));
622 }
623 __CFDataGrow(data, newLength - range.length, false);
624 bytePtr = (uint8_t *)CFDataGetMutableBytePtr(data);
625 }
626 break;
627 case kCFFixedMutable:
628 CFAssert1(newCount <= __CFDataCapacity(data), __kCFLogAssertion, "%s(): fixed-capacity data is full", __PRETTY_FUNCTION__);
629 // Continuing after this could cause buffer overruns.
630 if (newCount > __CFDataCapacity(data)) HALT;
631 break;
632 }
633 if (newLength != range.length && range.location + range.length < len) {
634 memmove(bytePtr + range.location + newLength, bytePtr + range.location + range.length, (len - range.location - range.length) * sizeof(uint8_t));
635 }
636 if (0 < newLength) {
637 memmove(bytePtr + range.location, srcBuf, newLength * sizeof(uint8_t));
638 }
639 if (srcBuf != newBytes) free(srcBuf);
640 __CFDataSetNumBytesUsed(data, newCount);
641 __CFDataSetLength(data, newCount);
642 }
643
644 #define REVERSE_BUFFER(type, buf, len) { \
645 type tmp; \
646 for(int i = 0; i < (len)/2; i++) { \
647 tmp = (buf)[i]; \
648 (buf)[i] = (buf)[(len) - i - 1]; \
649 (buf)[(len) - i - 1] = tmp; \
650 } \
651 }
652
653 static void _computeGoodSubstringShift(const uint8_t *needle, int needleLength, unsigned long shift[], unsigned long suff[]) {
654 int f, g, i, j;
655
656 // Compute suffix lengths
657
658 suff[needleLength - 1] = needleLength;
659 f = g = needleLength - 1;
660 for (i = needleLength - 2; i >= 0; --i) {
661 if (i > g && suff[i + needleLength - 1 - f] < i - g)
662 suff[i] = suff[i + needleLength - 1 - f];
663 else {
664 if (i < g)
665 g = i;
666 f = i;
667 while (g >= 0 && needle[g] == needle[g + needleLength - 1 - f])
668 --g;
669 suff[i] = f - g;
670 }
671 }
672
673 // Compute shift table
674
675 for (i = 0; i < needleLength; ++i)
676 shift[i] = needleLength;
677 j = 0;
678 for (i = needleLength - 1; i >= 0; --i)
679 if (suff[i] == i + 1)
680 for (; j < needleLength - 1 - i; ++j)
681 if (shift[j] == needleLength)
682 shift[j] = needleLength - 1 - i;
683 // Set the amount of shift necessary to move each of the suffix matches found into a position where it overlaps with the suffix. If there are duplicate matches the latest one is the one that should take effect.
684 for (i = 0; i <= needleLength - 2; ++i)
685 shift[needleLength - 1 - suff[i]] = needleLength - 1 - i;
686 // Since the Boyer-Moore algorithm moves the pointer back while scanning substrings, add the distance to the end of the potential substring.
687 for (i = 0; i < needleLength - 1; ++i) {
688 shift[i] += (needleLength - 1 - i);
689 }
690 }
691
692 static const uint8_t * __CFDataSearchBoyerMoore(const CFDataRef data, const uint8_t *haystack, unsigned long haystackLength, const uint8_t *needle, unsigned long needleLength, Boolean backwards) {
693 unsigned long badCharacterShift[UCHAR_MAX + 1] = {0};
694 unsigned long *goodSubstringShift = (unsigned long *)malloc(needleLength * sizeof(unsigned long));
695 unsigned long *suffixLengths = (unsigned long *)malloc(needleLength * sizeof(unsigned long));
696 if (!goodSubstringShift || !suffixLengths) {
697 __CFDataHandleOutOfMemory(data, needleLength * sizeof(unsigned long));
698 }
699
700 if(backwards) {
701 for (int i = 0; i < sizeof(badCharacterShift) / sizeof(*badCharacterShift); i++)
702 badCharacterShift[i] = needleLength;
703
704 for (int i = needleLength - 1; i >= 0; i--)
705 badCharacterShift[needle[i]] = i;
706
707 // To get the correct shift table for backwards search reverse the needle, compute the forwards shift table, and then reverse the result.
708 uint8_t *needleCopy = (uint8_t *)malloc(needleLength * sizeof(uint8_t));
709 if (!needleCopy) {
710 __CFDataHandleOutOfMemory(data, needleLength * sizeof(uint8_t));
711 }
712 memmove(needleCopy, needle, needleLength);
713 REVERSE_BUFFER(uint8_t, needleCopy, needleLength);
714 _computeGoodSubstringShift(needleCopy, needleLength, goodSubstringShift, suffixLengths);
715 REVERSE_BUFFER(unsigned long, goodSubstringShift, needleLength);
716 free(needleCopy);
717 } else {
718 for (int i = 0; i < sizeof(badCharacterShift) / sizeof(*badCharacterShift); i++)
719 badCharacterShift[i] = needleLength;
720
721 for (int i = 0; i < needleLength; i++)
722 badCharacterShift[needle[i]] = needleLength - i- 1;
723
724 _computeGoodSubstringShift(needle, needleLength, goodSubstringShift, suffixLengths);
725 }
726
727 const uint8_t *scan_needle;
728 const uint8_t *scan_haystack;
729 const uint8_t *result = NULL;
730 if(backwards) {
731 const uint8_t *const end_needle = needle + needleLength;
732 scan_needle = needle;
733 scan_haystack = haystack + haystackLength - needleLength;
734 while (scan_haystack >= haystack && scan_needle < end_needle) {
735 if (*scan_haystack == *scan_needle) {
736 scan_haystack++;
737 scan_needle++;
738 } else {
739 scan_haystack -= __CFMax(badCharacterShift[*scan_haystack], goodSubstringShift[scan_needle - needle]);
740 scan_needle = needle;
741 }
742 }
743 if (scan_needle == end_needle) {
744 result = (scan_haystack - needleLength);
745 }
746 } else {
747 const uint8_t *const end_haystack = haystack + haystackLength;
748 scan_needle = needle + needleLength - 1;
749 scan_haystack = haystack + needleLength - 1;
750 while (scan_haystack < end_haystack && scan_needle >= needle) {
751 if (*scan_haystack == *scan_needle) {
752 scan_haystack--;
753 scan_needle--;
754 } else {
755 scan_haystack += __CFMax(badCharacterShift[*scan_haystack], goodSubstringShift[scan_needle - needle]);
756 scan_needle = needle + needleLength - 1;
757 }
758 }
759 if (scan_needle < needle) {
760 result = (scan_haystack + 1);
761 }
762 }
763
764 free(goodSubstringShift);
765 free(suffixLengths);
766
767 return result;
768 }
769
770 CFRange _CFDataFindBytes(CFDataRef data, CFDataRef dataToFind, CFRange searchRange, CFDataSearchFlags compareOptions) {
771 const uint8_t *fullHaystack = CFDataGetBytePtr(data);
772 const uint8_t *needle = CFDataGetBytePtr(dataToFind);
773 unsigned long fullHaystackLength = CFDataGetLength(data);
774 unsigned long needleLength = CFDataGetLength(dataToFind);
775
776 if(compareOptions & kCFDataSearchAnchored) {
777 if(searchRange.length > needleLength) {
778 if(compareOptions & kCFDataSearchBackwards) {
779 searchRange.location += (searchRange.length - needleLength);
780 }
781 searchRange.length = needleLength;
782 }
783 }
784 if(searchRange.length > fullHaystackLength - searchRange.location) {
785 searchRange.length = fullHaystackLength - searchRange.location;
786 }
787
788 if(searchRange.length < needleLength || fullHaystackLength == 0 || needleLength == 0) {
789 return CFRangeMake(kCFNotFound, 0);
790 }
791
792 const uint8_t *haystack = fullHaystack + searchRange.location;
793 const uint8_t *searchResult = __CFDataSearchBoyerMoore(data, haystack, searchRange.length, needle, needleLength, (compareOptions & kCFDataSearchBackwards) != 0);
794 CFIndex resultLocation = (searchResult == NULL) ? kCFNotFound : searchRange.location + (searchResult - haystack);
795
796 return CFRangeMake(resultLocation, resultLocation == kCFNotFound ? 0: needleLength);
797 }
798
799 CFRange CFDataFind(CFDataRef data, CFDataRef dataToFind, CFRange searchRange, CFDataSearchFlags compareOptions) {
800 // No objc dispatch
801 __CFGenericValidateType(data, __kCFDataTypeID);
802 __CFGenericValidateType(dataToFind, __kCFDataTypeID);
803 __CFDataValidateRange(data, searchRange, __PRETTY_FUNCTION__);
804
805 return _CFDataFindBytes(data, dataToFind, searchRange, compareOptions);
806 }
807
808 #undef __CFDataValidateRange
809 #undef __CFGenericValidateMutabilityFlags
810 #undef INLINE_BYTES_THRESHOLD
811 #undef CFDATA_MAX_SIZE
812 #undef REVERSE_BUFFER