]> git.saurik.com Git - apple/security.git/blob - Security/libsecurity_cdsa_utilities/lib/walkers.h
Security-57031.1.35.tar.gz
[apple/security.git] / Security / libsecurity_cdsa_utilities / lib / walkers.h
1 /*
2 * Copyright (c) 2000-2006,2011-2012,2014 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24
25 //
26 // walkers - facilities for traversing and manipulating recursive data structures
27 //
28 // Very briefly, this facility allows for deep traversals of (potentially) recursive
29 // data structures through templated structure "walkers." Standard operations include
30 // deep copying to a contiguous memory buffer, size calculation, deep freeing, reconstitution
31 // after relocation (e.g. via IPC), and others. You can add other operations (e.g. scattered deep
32 // copy, debug dumping, etc.) by defining operations classes and applying them to the
33 // existing walkers. You can also extend the reach of the facility to new data structures
34 // by writing appropriate walker functions for them.
35 //
36 // NOTE: We no longer have a default walker for flat structures. You must define
37 // a walk(operate, foo * &) function for every data type encountered during a walk
38 // or you will get compile-time errors.
39 //
40 // For more detailed rules and regulations, see the accompanying documentation.
41 //
42 #ifndef _H_WALKERS
43 #define _H_WALKERS
44
45 #include <security_utilities/alloc.h>
46 #include <security_utilities/memstreams.h>
47 #include <security_cdsa_utilities/cssmdata.h>
48 #include <security_utilities/debugging.h>
49 #include <set>
50
51
52 namespace Security {
53 namespace DataWalkers {
54
55 #define WALKERDEBUG 0
56
57
58 #if WALKERDEBUG
59 # define DEBUGWALK(who) secdebug("walkers", "walk " who " %s@%p (%ld)", \
60 Debug::typeName(addr).c_str(), addr, size)
61 #else
62 # define DEBUGWALK(who) /* nothing */
63 #endif
64
65
66 //
67 // SizeWalker simply walks a structure and calculates how many bytes
68 // CopyWalker would use to make a flat copy. This is naturally at least
69 // the sum of all relevant sizes, but can be more due to alignment and
70 // counting overhead.
71 //
72 class SizeWalker : public LowLevelMemoryUtilities::Writer::Counter {
73 public:
74 template <class T>
75 void operator () (T &obj, size_t size = sizeof(T)) { }
76
77 template <class T>
78 void operator () (T *addr, size_t size = sizeof(T))
79 { DEBUGWALK("size"); LowLevelMemoryUtilities::Writer::Counter::insert(size); }
80
81 void blob(void *addr, size_t size)
82 { (*this)(addr, size); }
83
84 void reserve(size_t space)
85 { LowLevelMemoryUtilities::Writer::Counter::insert(space); }
86
87 static const bool needsRelinking = false;
88 static const bool needsSize = true;
89 };
90
91
92 //
93 // CopyWalker makes a deep, flat copy of a structure. The result will work
94 // just like the original (with all elements recursively copied), except that
95 // it occupies contiguous memory.
96 //
97 class CopyWalker : public LowLevelMemoryUtilities::Writer {
98 public:
99 CopyWalker() { }
100 CopyWalker(void *base) : LowLevelMemoryUtilities::Writer(base) { }
101
102 public:
103 template <class T>
104 void operator () (T &obj, size_t size = sizeof(T))
105 { }
106
107 template <class T>
108 void operator () (T * &addr, size_t size = sizeof(T))
109 {
110 DEBUGWALK("copy");
111 if (addr)
112 addr = reinterpret_cast<T *>(LowLevelMemoryUtilities::Writer::operator () (addr, size));
113 }
114
115 template <class T>
116 void blob(T * &addr, size_t size)
117 { (*this)(addr, size); }
118
119 static const bool needsRelinking = true;
120 static const bool needsSize = true;
121 };
122
123
124 //
125 // Walk a structure and apply a constant linear shift to all pointers
126 // encountered. This is useful when a structure and its deep components
127 // have been linearly shifted by something (say, an IPC transit).
128 //
129 class ReconstituteWalker {
130 public:
131 ReconstituteWalker(off_t offset) : mOffset(offset) { }
132 ReconstituteWalker(void *ptr, void *base)
133 : mOffset(LowLevelMemoryUtilities::difference(ptr, base)) { }
134
135 template <class T>
136 void operator () (T &obj, size_t size = sizeof(T))
137 { }
138
139 template <class T>
140 void operator () (T * &addr, size_t size = 0)
141 {
142 DEBUGWALK("reconstitute");
143 if (addr)
144 addr = LowLevelMemoryUtilities::increment<T>(addr, (ptrdiff_t)mOffset);
145 }
146
147 template <class T>
148 void blob(T * &addr, size_t size)
149 { (*this)(addr, size); }
150
151 static const bool needsRelinking = true;
152 static const bool needsSize = false;
153
154 private:
155 off_t mOffset;
156 };
157
158
159 //
160 // Make an element-by-element copy of a structure. Each pointer followed
161 // uses a separate allocation for its pointed-to storage.
162 //
163 class ChunkCopyWalker {
164 public:
165 ChunkCopyWalker(Allocator &alloc = Allocator::standard()) : allocator(alloc) { }
166
167 Allocator &allocator;
168
169 template <class T>
170 void operator () (T &obj, size_t size = sizeof(T))
171 { }
172
173 template <class T>
174 void operator () (T * &addr, size_t size = sizeof(T))
175 {
176 DEBUGWALK("chunkcopy");
177 #if BUG_GCC
178 T *copy = reinterpret_cast<T *>(allocator.malloc(size));
179 #else
180 T *copy = allocator.malloc<T>(size);
181 #endif
182 memcpy(copy, addr, size);
183 addr = copy;
184 }
185
186 template <class T>
187 void blob(T * &addr, size_t size)
188 { (*this)(addr, size); }
189
190 static const bool needsRelinking = true;
191 static const bool needsSize = true;
192 };
193
194
195 //
196 // Walk a structure and call an Allocator to separate free each node.
197 // This is safe for non-trees (i.e. shared subsidiary nodes); such will
198 // only be freed once.
199 //
200 class ChunkFreeWalker {
201 public:
202 ChunkFreeWalker(Allocator &alloc = Allocator::standard()) : allocator(alloc) { }
203
204 Allocator &allocator;
205
206 template <class T>
207 void operator () (T &obj, size_t size = 0)
208 { }
209
210 template <class T>
211 void operator () (T *addr, size_t size = 0)
212 {
213 DEBUGWALK("chunkfree");
214 freeSet.insert(addr);
215 }
216
217 void blob(void *addr, size_t size)
218 { (*this)(addr, 0); }
219
220 void free();
221 ~ChunkFreeWalker() { free(); }
222
223 static const bool needsRelinking = false;
224 static const bool needsSize = false;
225
226 private:
227 std::set<void *> freeSet;
228 };
229
230
231 //
232 // Stand-alone operations for a single structure web.
233 // These simply create, use, and discard their operator objects internally.
234 //
235 template <class T>
236 size_t size(T obj)
237 {
238 SizeWalker w;
239 walk(w, obj);
240 return w;
241 }
242
243 // Special version for const pointer's
244 template <class T>
245 size_t size(const T *obj)
246 { return size(const_cast<T *>(obj)); }
247
248
249 template <class T>
250 T *copy(const T *obj, void *addr)
251 {
252 if (obj == NULL)
253 return NULL;
254 CopyWalker w(addr);
255 walk(w, const_cast<T * &>(obj));
256 return const_cast<T *>(obj);
257 }
258
259 template <class T>
260 T *copy(const T *obj, Allocator &alloc, size_t size)
261 {
262 if (obj == NULL)
263 return NULL;
264 return copy(obj, alloc.malloc(size));
265 }
266
267 template <class T>
268 T *copy(const T *obj, Allocator &alloc = Allocator::standard())
269 {
270 return obj ? copy(obj, alloc, size(obj)) : NULL;
271 }
272
273
274 template <class T>
275 void relocate(T *obj, T *base)
276 {
277 if (obj) {
278 ReconstituteWalker w(LowLevelMemoryUtilities::difference(obj, base));
279 walk(w, base);
280 }
281 }
282
283
284 //
285 // chunkCopy and chunkFree can take pointer and non-pointer arguments.
286 // Don't try to declare the T arguments const (overload resolution will
287 // mess you over if you try). Just take const and nonconst Ts and take
288 // the const away internally.
289 //
290 template <class T>
291 typename Nonconst<T>::Type *chunkCopy(T *obj, Allocator &alloc = Allocator::standard())
292 {
293 if (obj) {
294 ChunkCopyWalker w(alloc);
295 return walk(w, unconst_ref_cast<T *>(obj));
296 } else
297 return NULL;
298 }
299
300 template <class T>
301 T chunkCopy(T obj, Allocator &alloc = Allocator::standard())
302 {
303 ChunkCopyWalker w(alloc);
304 walk(w, obj);
305 return obj;
306 }
307
308 template <class T>
309 void chunkFree(T *obj, Allocator &alloc = Allocator::standard())
310 {
311 if (obj) {
312 ChunkFreeWalker w(alloc);
313 walk(w, unconst_ref_cast<T *>(obj));
314 }
315 }
316
317 template <class T>
318 void chunkFree(const T &obj, Allocator &alloc = Allocator::standard())
319 {
320 ChunkFreeWalker w(alloc);
321 walk(w, obj);
322 }
323
324
325 //
326 // Copier combines SizeWalker and CopyWalker into one operational package.
327 // this is useful if you need both the copy and its size (and don't want
328 // to re-run size()). Copier (like copy()) only applies to one object.
329 //
330 template <class T>
331 class Copier {
332 public:
333 Copier(const T *obj, Allocator &alloc = Allocator::standard()) : allocator(alloc)
334 {
335 if (obj == NULL) {
336 mValue = NULL;
337 mLength = 0;
338 } else {
339 mLength = size(const_cast<T *>(obj));
340 #if BUG_GCC
341 mValue = reinterpret_cast<T *>(alloc.malloc(mLength));
342 #else
343 mValue = alloc.malloc<T>(mLength);
344 #endif
345 mValue = copy(obj, mValue);
346 }
347 }
348
349 Copier(const T *obj, uint32 count, Allocator &alloc = Allocator::standard())
350 : allocator(alloc)
351 {
352 if (obj == NULL) {
353 mValue = NULL;
354 mLength = 0;
355 } else {
356 SizeWalker sizer;
357 sizer.reserve(sizeof(T) * count); // initial vector size
358 for (uint32 n = 0; n < count; n++)
359 walk(sizer, const_cast<T &>(obj[n])); // dependent data sizes
360 mLength = sizer;
361 #if BUG_GCC
362 mValue = reinterpret_cast<T *>(alloc.malloc(mLength));
363 #else
364 mValue = alloc.malloc<T>(mLength);
365 #endif
366 CopyWalker copier(LowLevelMemoryUtilities::increment(mValue, sizeof(T) * count));
367 for (uint32 n = 0; n < count; n++) {
368 mValue[n] = obj[n];
369 walk(copier, mValue[n]);
370 }
371 }
372 }
373
374 Allocator &allocator;
375
376 ~Copier() { allocator.free(mValue); }
377
378 T *value() const { return mValue; }
379 operator T *() const { return value(); }
380 size_t length() const { return mLength; }
381
382 T *keep() { T *result = mValue; mValue = NULL; return result; }
383
384 private:
385 T *mValue;
386 size_t mLength;
387 };
388
389
390 } // end namespace DataWalkers
391 } // end namespace Security
392
393 #endif //_H_WALKERS