]> git.saurik.com Git - apple/javascriptcore.git/blob - wtf/Vector.h
d8c8e2ed51e2af89de0b7c6d9002041494ec403c
[apple/javascriptcore.git] / wtf / Vector.h
1 /*
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 *
19 */
20
21 #ifndef WTF_Vector_h
22 #define WTF_Vector_h
23
24 #include "Assertions.h"
25 #include "FastMalloc.h"
26 #include "Noncopyable.h"
27 #include "VectorTraits.h"
28 #include <limits>
29 #include <stdlib.h>
30 #include <string.h>
31 #include <utility>
32
33 namespace WTF {
34
35 using std::min;
36 using std::max;
37
38 // WTF_ALIGN_OF / WTF_ALIGNED
39 #if COMPILER(GCC) || COMPILER(MINGW) || COMPILER(RVCT) || COMPILER(WINSCW)
40 #define WTF_ALIGN_OF(type) __alignof__(type)
41 #define WTF_ALIGNED(variable_type, variable, n) variable_type variable __attribute__((__aligned__(n)))
42 #elif COMPILER(MSVC)
43 #define WTF_ALIGN_OF(type) __alignof(type)
44 #define WTF_ALIGNED(variable_type, variable, n) __declspec(align(n)) variable_type variable
45 #else
46 #error WTF_ALIGN macros need alignment control.
47 #endif
48
49 #if COMPILER(GCC) && (((__GNUC__ * 100) + __GNUC_MINOR__) >= 303)
50 typedef char __attribute__((__may_alias__)) AlignedBufferChar;
51 #else
52 typedef char AlignedBufferChar;
53 #endif
54
55 template <size_t size, size_t alignment> struct AlignedBuffer;
56 template <size_t size> struct AlignedBuffer<size, 1> { AlignedBufferChar buffer[size]; };
57 template <size_t size> struct AlignedBuffer<size, 2> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 2); };
58 template <size_t size> struct AlignedBuffer<size, 4> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 4); };
59 template <size_t size> struct AlignedBuffer<size, 8> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 8); };
60 template <size_t size> struct AlignedBuffer<size, 16> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 16); };
61 template <size_t size> struct AlignedBuffer<size, 32> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 32); };
62 template <size_t size> struct AlignedBuffer<size, 64> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 64); };
63
64 template <bool needsDestruction, typename T>
65 class VectorDestructor;
66
67 template<typename T>
68 struct VectorDestructor<false, T>
69 {
70 static void destruct(T*, T*) {}
71 };
72
73 template<typename T>
74 struct VectorDestructor<true, T>
75 {
76 static void destruct(T* begin, T* end)
77 {
78 for (T* cur = begin; cur != end; ++cur)
79 cur->~T();
80 }
81 };
82
83 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
84 class VectorInitializer;
85
86 template<bool ignore, typename T>
87 struct VectorInitializer<false, ignore, T>
88 {
89 static void initialize(T*, T*) {}
90 };
91
92 template<typename T>
93 struct VectorInitializer<true, false, T>
94 {
95 static void initialize(T* begin, T* end)
96 {
97 for (T* cur = begin; cur != end; ++cur)
98 new (cur) T;
99 }
100 };
101
102 template<typename T>
103 struct VectorInitializer<true, true, T>
104 {
105 static void initialize(T* begin, T* end)
106 {
107 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
108 }
109 };
110
111 template <bool canMoveWithMemcpy, typename T>
112 class VectorMover;
113
114 template<typename T>
115 struct VectorMover<false, T>
116 {
117 static void move(const T* src, const T* srcEnd, T* dst)
118 {
119 while (src != srcEnd) {
120 new (dst) T(*src);
121 src->~T();
122 ++dst;
123 ++src;
124 }
125 }
126 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
127 {
128 if (src > dst)
129 move(src, srcEnd, dst);
130 else {
131 T* dstEnd = dst + (srcEnd - src);
132 while (src != srcEnd) {
133 --srcEnd;
134 --dstEnd;
135 new (dstEnd) T(*srcEnd);
136 srcEnd->~T();
137 }
138 }
139 }
140 };
141
142 template<typename T>
143 struct VectorMover<true, T>
144 {
145 static void move(const T* src, const T* srcEnd, T* dst)
146 {
147 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
148 }
149 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
150 {
151 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
152 }
153 };
154
155 template <bool canCopyWithMemcpy, typename T>
156 class VectorCopier;
157
158 template<typename T>
159 struct VectorCopier<false, T>
160 {
161 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
162 {
163 while (src != srcEnd) {
164 new (dst) T(*src);
165 ++dst;
166 ++src;
167 }
168 }
169 };
170
171 template<typename T>
172 struct VectorCopier<true, T>
173 {
174 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
175 {
176 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
177 }
178 };
179
180 template <bool canFillWithMemset, typename T>
181 class VectorFiller;
182
183 template<typename T>
184 struct VectorFiller<false, T>
185 {
186 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
187 {
188 while (dst != dstEnd) {
189 new (dst) T(val);
190 ++dst;
191 }
192 }
193 };
194
195 template<typename T>
196 struct VectorFiller<true, T>
197 {
198 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
199 {
200 ASSERT(sizeof(T) == sizeof(char));
201 memset(dst, val, dstEnd - dst);
202 }
203 };
204
205 template<bool canCompareWithMemcmp, typename T>
206 class VectorComparer;
207
208 template<typename T>
209 struct VectorComparer<false, T>
210 {
211 static bool compare(const T* a, const T* b, size_t size)
212 {
213 for (size_t i = 0; i < size; ++i)
214 if (a[i] != b[i])
215 return false;
216 return true;
217 }
218 };
219
220 template<typename T>
221 struct VectorComparer<true, T>
222 {
223 static bool compare(const T* a, const T* b, size_t size)
224 {
225 return memcmp(a, b, sizeof(T) * size) == 0;
226 }
227 };
228
229 template<typename T>
230 struct VectorTypeOperations
231 {
232 static void destruct(T* begin, T* end)
233 {
234 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
235 }
236
237 static void initialize(T* begin, T* end)
238 {
239 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
240 }
241
242 static void move(const T* src, const T* srcEnd, T* dst)
243 {
244 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
245 }
246
247 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
248 {
249 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
250 }
251
252 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
253 {
254 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
255 }
256
257 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
258 {
259 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
260 }
261
262 static bool compare(const T* a, const T* b, size_t size)
263 {
264 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
265 }
266 };
267
268 template<typename T>
269 class VectorBufferBase : Noncopyable {
270 public:
271 void allocateBuffer(size_t newCapacity)
272 {
273 m_capacity = newCapacity;
274 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
275 CRASH();
276 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
277 }
278
279 void deallocateBuffer(T* bufferToDeallocate)
280 {
281 if (m_buffer == bufferToDeallocate) {
282 m_buffer = 0;
283 m_capacity = 0;
284 }
285 fastFree(bufferToDeallocate);
286 }
287
288 T* buffer() { return m_buffer; }
289 const T* buffer() const { return m_buffer; }
290 T** bufferSlot() { return &m_buffer; }
291 size_t capacity() const { return m_capacity; }
292
293 T* releaseBuffer()
294 {
295 T* buffer = m_buffer;
296 m_buffer = 0;
297 m_capacity = 0;
298 return buffer;
299 }
300
301 protected:
302 VectorBufferBase()
303 : m_buffer(0)
304 , m_capacity(0)
305 {
306 }
307
308 VectorBufferBase(T* buffer, size_t capacity)
309 : m_buffer(buffer)
310 , m_capacity(capacity)
311 {
312 }
313
314 ~VectorBufferBase()
315 {
316 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
317 }
318
319 T* m_buffer;
320 size_t m_capacity;
321 };
322
323 template<typename T, size_t inlineCapacity>
324 class VectorBuffer;
325
326 template<typename T>
327 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
328 private:
329 typedef VectorBufferBase<T> Base;
330 public:
331 VectorBuffer()
332 {
333 }
334
335 VectorBuffer(size_t capacity)
336 {
337 allocateBuffer(capacity);
338 }
339
340 ~VectorBuffer()
341 {
342 deallocateBuffer(buffer());
343 }
344
345 void swap(VectorBuffer<T, 0>& other)
346 {
347 std::swap(m_buffer, other.m_buffer);
348 std::swap(m_capacity, other.m_capacity);
349 }
350
351 void restoreInlineBufferIfNeeded() { }
352
353 using Base::allocateBuffer;
354 using Base::deallocateBuffer;
355
356 using Base::buffer;
357 using Base::bufferSlot;
358 using Base::capacity;
359
360 using Base::releaseBuffer;
361 private:
362 using Base::m_buffer;
363 using Base::m_capacity;
364 };
365
366 template<typename T, size_t inlineCapacity>
367 class VectorBuffer : private VectorBufferBase<T> {
368 private:
369 typedef VectorBufferBase<T> Base;
370 public:
371 VectorBuffer()
372 : Base(inlineBuffer(), inlineCapacity)
373 {
374 }
375
376 VectorBuffer(size_t capacity)
377 : Base(inlineBuffer(), inlineCapacity)
378 {
379 allocateBuffer(capacity);
380 }
381
382 ~VectorBuffer()
383 {
384 deallocateBuffer(buffer());
385 }
386
387 void allocateBuffer(size_t newCapacity)
388 {
389 if (newCapacity > inlineCapacity)
390 Base::allocateBuffer(newCapacity);
391 }
392
393 void deallocateBuffer(T* bufferToDeallocate)
394 {
395 if (bufferToDeallocate == inlineBuffer())
396 return;
397 Base::deallocateBuffer(bufferToDeallocate);
398 }
399
400 void restoreInlineBufferIfNeeded()
401 {
402 if (m_buffer)
403 return;
404 m_buffer = inlineBuffer();
405 m_capacity = inlineCapacity;
406 }
407
408 using Base::buffer;
409 using Base::bufferSlot;
410 using Base::capacity;
411
412 T* releaseBuffer()
413 {
414 if (buffer() == inlineBuffer())
415 return 0;
416 return Base::releaseBuffer();
417 }
418
419 private:
420 using Base::m_buffer;
421 using Base::m_capacity;
422
423 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
424 T* inlineBuffer() { return reinterpret_cast<T*>(m_inlineBuffer.buffer); }
425
426 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
427 };
428
429 template<typename T, size_t inlineCapacity = 0>
430 class Vector {
431 private:
432 typedef VectorBuffer<T, inlineCapacity> Buffer;
433 typedef VectorTypeOperations<T> TypeOperations;
434
435 public:
436 typedef T ValueType;
437
438 typedef T* iterator;
439 typedef const T* const_iterator;
440
441 Vector()
442 : m_size(0)
443 {
444 }
445
446 explicit Vector(size_t size)
447 : m_size(size)
448 , m_buffer(size)
449 {
450 if (begin())
451 TypeOperations::initialize(begin(), end());
452 }
453
454 ~Vector()
455 {
456 if (m_size) shrink(0);
457 }
458
459 Vector(const Vector&);
460 template<size_t otherCapacity>
461 Vector(const Vector<T, otherCapacity>&);
462
463 Vector& operator=(const Vector&);
464 template<size_t otherCapacity>
465 Vector& operator=(const Vector<T, otherCapacity>&);
466
467 size_t size() const { return m_size; }
468 size_t capacity() const { return m_buffer.capacity(); }
469 bool isEmpty() const { return !size(); }
470
471 T& at(size_t i)
472 {
473 ASSERT(i < size());
474 return m_buffer.buffer()[i];
475 }
476 const T& at(size_t i) const
477 {
478 ASSERT(i < size());
479 return m_buffer.buffer()[i];
480 }
481
482 T& operator[](size_t i) { return at(i); }
483 const T& operator[](size_t i) const { return at(i); }
484
485 T* data() { return m_buffer.buffer(); }
486 const T* data() const { return m_buffer.buffer(); }
487 T** dataSlot() { return m_buffer.bufferSlot(); }
488
489 iterator begin() { return data(); }
490 iterator end() { return begin() + m_size; }
491 const_iterator begin() const { return data(); }
492 const_iterator end() const { return begin() + m_size; }
493
494 T& first() { return at(0); }
495 const T& first() const { return at(0); }
496 T& last() { return at(size() - 1); }
497 const T& last() const { return at(size() - 1); }
498
499 template<typename U> size_t find(const U&) const;
500
501 void shrink(size_t size);
502 void grow(size_t size);
503 void resize(size_t size);
504 void reserveCapacity(size_t newCapacity);
505 void shrinkCapacity(size_t newCapacity);
506 void shrinkToFit() { shrinkCapacity(size()); }
507
508 void clear() { shrinkCapacity(0); }
509
510 template<typename U> void append(const U*, size_t);
511 template<typename U> void append(const U&);
512 template<typename U> void uncheckedAppend(const U& val);
513 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&);
514
515 template<typename U> void insert(size_t position, const U*, size_t);
516 template<typename U> void insert(size_t position, const U&);
517 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
518
519 template<typename U> void prepend(const U*, size_t);
520 template<typename U> void prepend(const U&);
521 template<typename U, size_t c> void prepend(const Vector<U, c>&);
522
523 void remove(size_t position);
524 void remove(size_t position, size_t length);
525
526 void removeLast()
527 {
528 ASSERT(!isEmpty());
529 shrink(size() - 1);
530 }
531
532 Vector(size_t size, const T& val)
533 : m_size(size)
534 , m_buffer(size)
535 {
536 if (begin())
537 TypeOperations::uninitializedFill(begin(), end(), val);
538 }
539
540 void fill(const T&, size_t);
541 void fill(const T& val) { fill(val, size()); }
542
543 template<typename Iterator> void appendRange(Iterator start, Iterator end);
544
545 T* releaseBuffer();
546
547 void swap(Vector<T, inlineCapacity>& other)
548 {
549 std::swap(m_size, other.m_size);
550 m_buffer.swap(other.m_buffer);
551 }
552
553 private:
554 void expandCapacity(size_t newMinCapacity);
555 const T* expandCapacity(size_t newMinCapacity, const T*);
556 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
557
558 size_t m_size;
559 Buffer m_buffer;
560 };
561
562 template<typename T, size_t inlineCapacity>
563 Vector<T, inlineCapacity>::Vector(const Vector& other)
564 : m_size(other.size())
565 , m_buffer(other.capacity())
566 {
567 if (begin())
568 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
569 }
570
571 template<typename T, size_t inlineCapacity>
572 template<size_t otherCapacity>
573 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
574 : m_size(other.size())
575 , m_buffer(other.capacity())
576 {
577 if (begin())
578 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
579 }
580
581 template<typename T, size_t inlineCapacity>
582 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
583 {
584 if (&other == this)
585 return *this;
586
587 if (size() > other.size())
588 shrink(other.size());
589 else if (other.size() > capacity()) {
590 clear();
591 reserveCapacity(other.size());
592 if (!begin())
593 return *this;
594 }
595
596 std::copy(other.begin(), other.begin() + size(), begin());
597 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
598 m_size = other.size();
599
600 return *this;
601 }
602
603 template<typename T, size_t inlineCapacity>
604 template<size_t otherCapacity>
605 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
606 {
607 if (&other == this)
608 return *this;
609
610 if (size() > other.size())
611 shrink(other.size());
612 else if (other.size() > capacity()) {
613 clear();
614 reserveCapacity(other.size());
615 if (!begin())
616 return *this;
617 }
618
619 std::copy(other.begin(), other.begin() + size(), begin());
620 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
621 m_size = other.size();
622
623 return *this;
624 }
625
626 template<typename T, size_t inlineCapacity>
627 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
628 {
629 if (size() > newSize)
630 shrink(newSize);
631 else if (newSize > capacity()) {
632 clear();
633 reserveCapacity(newSize);
634 if (!begin())
635 return;
636 }
637
638 std::fill(begin(), end(), val);
639 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
640 m_size = newSize;
641 }
642
643 template<typename T, size_t inlineCapacity>
644 template<typename Iterator>
645 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
646 {
647 for (Iterator it = start; it != end; ++it)
648 append(*it);
649 }
650
651 template<typename T, size_t inlineCapacity>
652 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
653 {
654 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
655 }
656
657 template<typename T, size_t inlineCapacity>
658 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
659 {
660 if (ptr < begin() || ptr >= end()) {
661 expandCapacity(newMinCapacity);
662 return ptr;
663 }
664 size_t index = ptr - begin();
665 expandCapacity(newMinCapacity);
666 return begin() + index;
667 }
668
669 template<typename T, size_t inlineCapacity> template<typename U>
670 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
671 {
672 expandCapacity(newMinCapacity);
673 return ptr;
674 }
675
676 template<typename T, size_t inlineCapacity>
677 void Vector<T, inlineCapacity>::resize(size_t size)
678 {
679 if (size <= m_size)
680 TypeOperations::destruct(begin() + size, end());
681 else {
682 if (size > capacity())
683 expandCapacity(size);
684 if (begin())
685 TypeOperations::initialize(end(), begin() + size);
686 }
687
688 m_size = size;
689 }
690
691 template<typename T, size_t inlineCapacity>
692 void Vector<T, inlineCapacity>::shrink(size_t size)
693 {
694 ASSERT(size <= m_size);
695 TypeOperations::destruct(begin() + size, end());
696 m_size = size;
697 }
698
699 template<typename T, size_t inlineCapacity>
700 void Vector<T, inlineCapacity>::grow(size_t size)
701 {
702 ASSERT(size >= m_size);
703 if (size > capacity())
704 expandCapacity(size);
705 if (begin())
706 TypeOperations::initialize(end(), begin() + size);
707 m_size = size;
708 }
709
710 template<typename T, size_t inlineCapacity>
711 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
712 {
713 if (newCapacity <= capacity())
714 return;
715 T* oldBuffer = begin();
716 T* oldEnd = end();
717 m_buffer.allocateBuffer(newCapacity);
718 if (begin())
719 TypeOperations::move(oldBuffer, oldEnd, begin());
720 m_buffer.deallocateBuffer(oldBuffer);
721 }
722
723 template<typename T, size_t inlineCapacity>
724 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity)
725 {
726 if (newCapacity >= capacity())
727 return;
728
729 if (newCapacity < size())
730 shrink(newCapacity);
731
732 T* oldBuffer = begin();
733 if (newCapacity > 0) {
734 T* oldEnd = end();
735 m_buffer.allocateBuffer(newCapacity);
736 if (begin() != oldBuffer)
737 TypeOperations::move(oldBuffer, oldEnd, begin());
738 }
739
740 m_buffer.deallocateBuffer(oldBuffer);
741 m_buffer.restoreInlineBufferIfNeeded();
742 }
743
744 // Templatizing these is better than just letting the conversion happen implicitly,
745 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
746 // without refcount thrash.
747
748 template<typename T, size_t inlineCapacity> template<typename U>
749 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
750 {
751 size_t newSize = m_size + dataSize;
752 if (newSize > capacity()) {
753 data = expandCapacity(newSize, data);
754 if (!begin())
755 return;
756 }
757 T* dest = end();
758 for (size_t i = 0; i < dataSize; ++i)
759 new (&dest[i]) T(data[i]);
760 m_size = newSize;
761 }
762
763 template<typename T, size_t inlineCapacity> template<typename U>
764 inline void Vector<T, inlineCapacity>::append(const U& val)
765 {
766 const U* ptr = &val;
767 if (size() == capacity()) {
768 ptr = expandCapacity(size() + 1, ptr);
769 if (!begin())
770 return;
771 }
772
773 #if COMPILER(MSVC7)
774 // FIXME: MSVC7 generates compilation errors when trying to assign
775 // a pointer to a Vector of its base class (i.e. can't downcast). So far
776 // I've been unable to determine any logical reason for this, so I can
777 // only assume it is a bug with the compiler. Casting is a bad solution,
778 // however, because it subverts implicit conversions, so a better
779 // one is needed.
780 new (end()) T(static_cast<T>(*ptr));
781 #else
782 new (end()) T(*ptr);
783 #endif
784 ++m_size;
785 }
786
787 // This version of append saves a branch in the case where you know that the
788 // vector's capacity is large enough for the append to succeed.
789
790 template<typename T, size_t inlineCapacity> template<typename U>
791 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
792 {
793 ASSERT(size() < capacity());
794 const U* ptr = &val;
795 new (end()) T(*ptr);
796 ++m_size;
797 }
798
799 // This method should not be called append, a better name would be appendElements.
800 // It could also be eliminated entirely, and call sites could just use
801 // appendRange(val.begin(), val.end()).
802 template<typename T, size_t inlineCapacity> template<size_t otherCapacity>
803 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val)
804 {
805 append(val.begin(), val.size());
806 }
807
808 template<typename T, size_t inlineCapacity> template<typename U>
809 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
810 {
811 ASSERT(position <= size());
812 size_t newSize = m_size + dataSize;
813 if (newSize > capacity()) {
814 data = expandCapacity(newSize, data);
815 if (!begin())
816 return;
817 }
818 T* spot = begin() + position;
819 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
820 for (size_t i = 0; i < dataSize; ++i)
821 new (&spot[i]) T(data[i]);
822 m_size = newSize;
823 }
824
825 template<typename T, size_t inlineCapacity> template<typename U>
826 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
827 {
828 ASSERT(position <= size());
829 const U* data = &val;
830 if (size() == capacity()) {
831 data = expandCapacity(size() + 1, data);
832 if (!begin())
833 return;
834 }
835 T* spot = begin() + position;
836 TypeOperations::moveOverlapping(spot, end(), spot + 1);
837 new (spot) T(*data);
838 ++m_size;
839 }
840
841 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
842 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
843 {
844 insert(position, val.begin(), val.size());
845 }
846
847 template<typename T, size_t inlineCapacity> template<typename U>
848 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
849 {
850 insert(0, data, dataSize);
851 }
852
853 template<typename T, size_t inlineCapacity> template<typename U>
854 inline void Vector<T, inlineCapacity>::prepend(const U& val)
855 {
856 insert(0, val);
857 }
858
859 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
860 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
861 {
862 insert(0, val.begin(), val.size());
863 }
864
865 template<typename T, size_t inlineCapacity>
866 inline void Vector<T, inlineCapacity>::remove(size_t position)
867 {
868 ASSERT(position < size());
869 T* spot = begin() + position;
870 spot->~T();
871 TypeOperations::moveOverlapping(spot + 1, end(), spot);
872 --m_size;
873 }
874
875 template<typename T, size_t inlineCapacity>
876 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length)
877 {
878 ASSERT(position < size());
879 ASSERT(position + length < size());
880 T* beginSpot = begin() + position;
881 T* endSpot = beginSpot + length;
882 TypeOperations::destruct(beginSpot, endSpot);
883 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
884 m_size -= length;
885 }
886
887 template<typename T, size_t inlineCapacity>
888 inline T* Vector<T, inlineCapacity>::releaseBuffer()
889 {
890 T* buffer = m_buffer.releaseBuffer();
891 if (inlineCapacity && !buffer && m_size) {
892 // If the vector had some data, but no buffer to release,
893 // that means it was using the inline buffer. In that case,
894 // we create a brand new buffer so the caller always gets one.
895 size_t bytes = m_size * sizeof(T);
896 buffer = static_cast<T*>(fastMalloc(bytes));
897 memcpy(buffer, data(), bytes);
898 }
899 m_size = 0;
900 return buffer;
901 }
902
903 template<typename T, size_t inlineCapacity>
904 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
905 {
906 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
907 iterator end = collection.end();
908 for (iterator it = collection.begin(); it != end; ++it)
909 delete *it;
910 }
911
912 template<typename T, size_t inlineCapacity>
913 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
914 {
915 a.swap(b);
916 }
917
918 template<typename T, size_t inlineCapacity>
919 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
920 {
921 if (a.size() != b.size())
922 return false;
923
924 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
925 }
926
927 template<typename T, size_t inlineCapacity>
928 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
929 {
930 return !(a == b);
931 }
932
933
934 } // namespace WTF
935
936 using WTF::Vector;
937
938 #endif // WTF_Vector_h