]>
Commit | Line | Data |
---|---|---|
b1ab9ed8 | 1 | /* |
d8f41ccd | 2 | * Copyright (c) 2000-2006,2011-2012,2014 Apple Inc. All Rights Reserved. |
b1ab9ed8 A |
3 | * |
4 | * @APPLE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * This file contains Original Code and/or Modifications of Original Code | |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. Please obtain a copy of the License at | |
10 | * http://www.opensource.apple.com/apsl/ and read it before using this | |
11 | * file. | |
12 | * | |
13 | * The Original Code and all software distributed under the License are | |
14 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
15 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
16 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
17 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
18 | * Please see the License for the specific language governing rights and | |
19 | * limitations under the License. | |
20 | * | |
21 | * @APPLE_LICENSE_HEADER_END@ | |
22 | */ | |
23 | ||
24 | ||
25 | // | |
26 | // walkers - facilities for traversing and manipulating recursive data structures | |
27 | // | |
28 | // Very briefly, this facility allows for deep traversals of (potentially) recursive | |
29 | // data structures through templated structure "walkers." Standard operations include | |
30 | // deep copying to a contiguous memory buffer, size calculation, deep freeing, reconstitution | |
31 | // after relocation (e.g. via IPC), and others. You can add other operations (e.g. scattered deep | |
32 | // copy, debug dumping, etc.) by defining operations classes and applying them to the | |
33 | // existing walkers. You can also extend the reach of the facility to new data structures | |
34 | // by writing appropriate walker functions for them. | |
35 | // | |
36 | // NOTE: We no longer have a default walker for flat structures. You must define | |
37 | // a walk(operate, foo * &) function for every data type encountered during a walk | |
38 | // or you will get compile-time errors. | |
39 | // | |
40 | // For more detailed rules and regulations, see the accompanying documentation. | |
41 | // | |
42 | #ifndef _H_WALKERS | |
43 | #define _H_WALKERS | |
44 | ||
45 | #include <security_utilities/alloc.h> | |
46 | #include <security_utilities/memstreams.h> | |
47 | #include <security_cdsa_utilities/cssmdata.h> | |
48 | #include <security_utilities/debugging.h> | |
49 | #include <set> | |
50 | ||
51 | ||
52 | namespace Security { | |
53 | namespace DataWalkers { | |
54 | ||
55 | #define WALKERDEBUG 0 | |
56 | ||
57 | ||
58 | #if WALKERDEBUG | |
fa7225c8 | 59 | # define DEBUGWALK(who) secinfo("walkers", "walk " who " %s@%p (%ld)", \ |
b1ab9ed8 A |
60 | Debug::typeName(addr).c_str(), addr, size) |
61 | #else | |
62 | # define DEBUGWALK(who) /* nothing */ | |
63 | #endif | |
64 | ||
65 | ||
66 | // | |
67 | // SizeWalker simply walks a structure and calculates how many bytes | |
68 | // CopyWalker would use to make a flat copy. This is naturally at least | |
69 | // the sum of all relevant sizes, but can be more due to alignment and | |
70 | // counting overhead. | |
71 | // | |
72 | class SizeWalker : public LowLevelMemoryUtilities::Writer::Counter { | |
73 | public: | |
74 | template <class T> | |
75 | void operator () (T &obj, size_t size = sizeof(T)) { } | |
76 | ||
77 | template <class T> | |
78 | void operator () (T *addr, size_t size = sizeof(T)) | |
79 | { DEBUGWALK("size"); LowLevelMemoryUtilities::Writer::Counter::insert(size); } | |
80 | ||
81 | void blob(void *addr, size_t size) | |
82 | { (*this)(addr, size); } | |
83 | ||
84 | void reserve(size_t space) | |
85 | { LowLevelMemoryUtilities::Writer::Counter::insert(space); } | |
86 | ||
87 | static const bool needsRelinking = false; | |
88 | static const bool needsSize = true; | |
89 | }; | |
90 | ||
91 | ||
92 | // | |
93 | // CopyWalker makes a deep, flat copy of a structure. The result will work | |
94 | // just like the original (with all elements recursively copied), except that | |
95 | // it occupies contiguous memory. | |
96 | // | |
97 | class CopyWalker : public LowLevelMemoryUtilities::Writer { | |
98 | public: | |
99 | CopyWalker() { } | |
100 | CopyWalker(void *base) : LowLevelMemoryUtilities::Writer(base) { } | |
101 | ||
102 | public: | |
103 | template <class T> | |
104 | void operator () (T &obj, size_t size = sizeof(T)) | |
105 | { } | |
106 | ||
107 | template <class T> | |
108 | void operator () (T * &addr, size_t size = sizeof(T)) | |
109 | { | |
110 | DEBUGWALK("copy"); | |
111 | if (addr) | |
112 | addr = reinterpret_cast<T *>(LowLevelMemoryUtilities::Writer::operator () (addr, size)); | |
113 | } | |
114 | ||
115 | template <class T> | |
116 | void blob(T * &addr, size_t size) | |
117 | { (*this)(addr, size); } | |
118 | ||
119 | static const bool needsRelinking = true; | |
120 | static const bool needsSize = true; | |
121 | }; | |
122 | ||
123 | ||
e3d460c9 | 124 | |
b1ab9ed8 A |
125 | // |
126 | // Walk a structure and apply a constant linear shift to all pointers | |
127 | // encountered. This is useful when a structure and its deep components | |
128 | // have been linearly shifted by something (say, an IPC transit). | |
129 | // | |
130 | class ReconstituteWalker { | |
131 | public: | |
132 | ReconstituteWalker(off_t offset) : mOffset(offset) { } | |
133 | ReconstituteWalker(void *ptr, void *base) | |
134 | : mOffset(LowLevelMemoryUtilities::difference(ptr, base)) { } | |
135 | ||
136 | template <class T> | |
137 | void operator () (T &obj, size_t size = sizeof(T)) | |
138 | { } | |
139 | ||
140 | template <class T> | |
141 | void operator () (T * &addr, size_t size = 0) | |
142 | { | |
143 | DEBUGWALK("reconstitute"); | |
144 | if (addr) | |
427c49bc | 145 | addr = LowLevelMemoryUtilities::increment<T>(addr, (ptrdiff_t)mOffset); |
b1ab9ed8 A |
146 | } |
147 | ||
148 | template <class T> | |
149 | void blob(T * &addr, size_t size) | |
150 | { (*this)(addr, size); } | |
151 | ||
152 | static const bool needsRelinking = true; | |
153 | static const bool needsSize = false; | |
154 | ||
155 | private: | |
156 | off_t mOffset; | |
157 | }; | |
158 | ||
159 | ||
160 | // | |
161 | // Make an element-by-element copy of a structure. Each pointer followed | |
162 | // uses a separate allocation for its pointed-to storage. | |
163 | // | |
164 | class ChunkCopyWalker { | |
165 | public: | |
166 | ChunkCopyWalker(Allocator &alloc = Allocator::standard()) : allocator(alloc) { } | |
167 | ||
168 | Allocator &allocator; | |
169 | ||
170 | template <class T> | |
171 | void operator () (T &obj, size_t size = sizeof(T)) | |
172 | { } | |
173 | ||
174 | template <class T> | |
175 | void operator () (T * &addr, size_t size = sizeof(T)) | |
176 | { | |
177 | DEBUGWALK("chunkcopy"); | |
178 | #if BUG_GCC | |
179 | T *copy = reinterpret_cast<T *>(allocator.malloc(size)); | |
180 | #else | |
181 | T *copy = allocator.malloc<T>(size); | |
182 | #endif | |
183 | memcpy(copy, addr, size); | |
184 | addr = copy; | |
185 | } | |
186 | ||
187 | template <class T> | |
188 | void blob(T * &addr, size_t size) | |
189 | { (*this)(addr, size); } | |
190 | ||
191 | static const bool needsRelinking = true; | |
192 | static const bool needsSize = true; | |
193 | }; | |
194 | ||
195 | ||
196 | // | |
197 | // Walk a structure and call an Allocator to separate free each node. | |
198 | // This is safe for non-trees (i.e. shared subsidiary nodes); such will | |
199 | // only be freed once. | |
200 | // | |
201 | class ChunkFreeWalker { | |
202 | public: | |
203 | ChunkFreeWalker(Allocator &alloc = Allocator::standard()) : allocator(alloc) { } | |
204 | ||
205 | Allocator &allocator; | |
206 | ||
207 | template <class T> | |
208 | void operator () (T &obj, size_t size = 0) | |
209 | { } | |
210 | ||
211 | template <class T> | |
212 | void operator () (T *addr, size_t size = 0) | |
213 | { | |
214 | DEBUGWALK("chunkfree"); | |
215 | freeSet.insert(addr); | |
216 | } | |
217 | ||
218 | void blob(void *addr, size_t size) | |
219 | { (*this)(addr, 0); } | |
220 | ||
221 | void free(); | |
222 | ~ChunkFreeWalker() { free(); } | |
223 | ||
224 | static const bool needsRelinking = false; | |
225 | static const bool needsSize = false; | |
226 | ||
227 | private: | |
228 | std::set<void *> freeSet; | |
229 | }; | |
230 | ||
231 | ||
232 | // | |
233 | // Stand-alone operations for a single structure web. | |
234 | // These simply create, use, and discard their operator objects internally. | |
235 | // | |
236 | template <class T> | |
237 | size_t size(T obj) | |
238 | { | |
239 | SizeWalker w; | |
240 | walk(w, obj); | |
241 | return w; | |
242 | } | |
243 | ||
244 | // Special version for const pointer's | |
245 | template <class T> | |
246 | size_t size(const T *obj) | |
247 | { return size(const_cast<T *>(obj)); } | |
248 | ||
249 | ||
250 | template <class T> | |
251 | T *copy(const T *obj, void *addr) | |
252 | { | |
253 | if (obj == NULL) | |
254 | return NULL; | |
255 | CopyWalker w(addr); | |
256 | walk(w, const_cast<T * &>(obj)); | |
257 | return const_cast<T *>(obj); | |
258 | } | |
259 | ||
260 | template <class T> | |
261 | T *copy(const T *obj, Allocator &alloc, size_t size) | |
262 | { | |
263 | if (obj == NULL) | |
264 | return NULL; | |
265 | return copy(obj, alloc.malloc(size)); | |
266 | } | |
267 | ||
268 | template <class T> | |
269 | T *copy(const T *obj, Allocator &alloc = Allocator::standard()) | |
270 | { | |
271 | return obj ? copy(obj, alloc, size(obj)) : NULL; | |
272 | } | |
273 | ||
274 | ||
275 | template <class T> | |
276 | void relocate(T *obj, T *base) | |
277 | { | |
278 | if (obj) { | |
279 | ReconstituteWalker w(LowLevelMemoryUtilities::difference(obj, base)); | |
280 | walk(w, base); | |
281 | } | |
282 | } | |
283 | ||
284 | ||
285 | // | |
286 | // chunkCopy and chunkFree can take pointer and non-pointer arguments. | |
287 | // Don't try to declare the T arguments const (overload resolution will | |
288 | // mess you over if you try). Just take const and nonconst Ts and take | |
289 | // the const away internally. | |
290 | // | |
291 | template <class T> | |
292 | typename Nonconst<T>::Type *chunkCopy(T *obj, Allocator &alloc = Allocator::standard()) | |
293 | { | |
294 | if (obj) { | |
295 | ChunkCopyWalker w(alloc); | |
296 | return walk(w, unconst_ref_cast<T *>(obj)); | |
297 | } else | |
298 | return NULL; | |
299 | } | |
300 | ||
301 | template <class T> | |
302 | T chunkCopy(T obj, Allocator &alloc = Allocator::standard()) | |
303 | { | |
304 | ChunkCopyWalker w(alloc); | |
305 | walk(w, obj); | |
306 | return obj; | |
307 | } | |
308 | ||
309 | template <class T> | |
310 | void chunkFree(T *obj, Allocator &alloc = Allocator::standard()) | |
311 | { | |
312 | if (obj) { | |
313 | ChunkFreeWalker w(alloc); | |
314 | walk(w, unconst_ref_cast<T *>(obj)); | |
315 | } | |
316 | } | |
317 | ||
318 | template <class T> | |
fa7225c8 | 319 | void chunkFree(T &obj, Allocator &alloc = Allocator::standard()) |
b1ab9ed8 A |
320 | { |
321 | ChunkFreeWalker w(alloc); | |
322 | walk(w, obj); | |
323 | } | |
324 | ||
325 | ||
326 | // | |
327 | // Copier combines SizeWalker and CopyWalker into one operational package. | |
328 | // this is useful if you need both the copy and its size (and don't want | |
329 | // to re-run size()). Copier (like copy()) only applies to one object. | |
330 | // | |
331 | template <class T> | |
332 | class Copier { | |
333 | public: | |
334 | Copier(const T *obj, Allocator &alloc = Allocator::standard()) : allocator(alloc) | |
335 | { | |
336 | if (obj == NULL) { | |
337 | mValue = NULL; | |
338 | mLength = 0; | |
339 | } else { | |
340 | mLength = size(const_cast<T *>(obj)); | |
341 | #if BUG_GCC | |
342 | mValue = reinterpret_cast<T *>(alloc.malloc(mLength)); | |
343 | #else | |
344 | mValue = alloc.malloc<T>(mLength); | |
345 | #endif | |
346 | mValue = copy(obj, mValue); | |
347 | } | |
348 | } | |
349 | ||
350 | Copier(const T *obj, uint32 count, Allocator &alloc = Allocator::standard()) | |
351 | : allocator(alloc) | |
352 | { | |
353 | if (obj == NULL) { | |
354 | mValue = NULL; | |
355 | mLength = 0; | |
356 | } else { | |
357 | SizeWalker sizer; | |
358 | sizer.reserve(sizeof(T) * count); // initial vector size | |
359 | for (uint32 n = 0; n < count; n++) | |
360 | walk(sizer, const_cast<T &>(obj[n])); // dependent data sizes | |
361 | mLength = sizer; | |
362 | #if BUG_GCC | |
363 | mValue = reinterpret_cast<T *>(alloc.malloc(mLength)); | |
364 | #else | |
365 | mValue = alloc.malloc<T>(mLength); | |
366 | #endif | |
367 | CopyWalker copier(LowLevelMemoryUtilities::increment(mValue, sizeof(T) * count)); | |
368 | for (uint32 n = 0; n < count; n++) { | |
369 | mValue[n] = obj[n]; | |
370 | walk(copier, mValue[n]); | |
371 | } | |
372 | } | |
373 | } | |
374 | ||
375 | Allocator &allocator; | |
376 | ||
377 | ~Copier() { allocator.free(mValue); } | |
378 | ||
379 | T *value() const { return mValue; } | |
380 | operator T *() const { return value(); } | |
381 | size_t length() const { return mLength; } | |
382 | ||
383 | T *keep() { T *result = mValue; mValue = NULL; return result; } | |
384 | ||
385 | private: | |
386 | T *mValue; | |
387 | size_t mLength; | |
388 | }; | |
389 | ||
390 | ||
391 | } // end namespace DataWalkers | |
392 | } // end namespace Security | |
393 | ||
394 | #endif //_H_WALKERS |