]>
git.saurik.com Git - apple/javascriptcore.git/blob - wtf/Vector.h
f8a5be2798bb4405172eeda933ca282cf45178d4
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "FastAllocBase.h"
25 #include "Noncopyable.h"
27 #include "VectorTraits.h"
32 #include <QDataStream>
40 // WTF_ALIGN_OF / WTF_ALIGNED
41 #if COMPILER(GCC) || COMPILER(MINGW) || COMPILER(RVCT) || COMPILER(WINSCW)
42 #define WTF_ALIGN_OF(type) __alignof__(type)
43 #define WTF_ALIGNED(variable_type, variable, n) variable_type variable __attribute__((__aligned__(n)))
45 #define WTF_ALIGN_OF(type) __alignof(type)
46 #define WTF_ALIGNED(variable_type, variable, n) __declspec(align(n)) variable_type variable
48 #error WTF_ALIGN macros need alignment control.
51 #if COMPILER(GCC) && (((__GNUC__ * 100) + __GNUC_MINOR__) >= 303)
52 typedef char __attribute__((__may_alias__
)) AlignedBufferChar
;
54 typedef char AlignedBufferChar
;
57 template <size_t size
, size_t alignment
> struct AlignedBuffer
;
58 template <size_t size
> struct AlignedBuffer
<size
, 1> { AlignedBufferChar buffer
[size
]; };
59 template <size_t size
> struct AlignedBuffer
<size
, 2> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 2); };
60 template <size_t size
> struct AlignedBuffer
<size
, 4> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 4); };
61 template <size_t size
> struct AlignedBuffer
<size
, 8> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 8); };
62 template <size_t size
> struct AlignedBuffer
<size
, 16> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 16); };
63 template <size_t size
> struct AlignedBuffer
<size
, 32> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 32); };
64 template <size_t size
> struct AlignedBuffer
<size
, 64> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 64); };
66 template <bool needsDestruction
, typename T
>
67 class VectorDestructor
;
70 struct VectorDestructor
<false, T
>
72 static void destruct(T
*, T
*) {}
76 struct VectorDestructor
<true, T
>
78 static void destruct(T
* begin
, T
* end
)
80 for (T
* cur
= begin
; cur
!= end
; ++cur
)
85 template <bool needsInitialization
, bool canInitializeWithMemset
, typename T
>
86 class VectorInitializer
;
88 template<bool ignore
, typename T
>
89 struct VectorInitializer
<false, ignore
, T
>
91 static void initialize(T
*, T
*) {}
95 struct VectorInitializer
<true, false, T
>
97 static void initialize(T
* begin
, T
* end
)
99 for (T
* cur
= begin
; cur
!= end
; ++cur
)
105 struct VectorInitializer
<true, true, T
>
107 static void initialize(T
* begin
, T
* end
)
109 memset(begin
, 0, reinterpret_cast<char*>(end
) - reinterpret_cast<char*>(begin
));
113 template <bool canMoveWithMemcpy
, typename T
>
117 struct VectorMover
<false, T
>
119 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
121 while (src
!= srcEnd
) {
128 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
131 move(src
, srcEnd
, dst
);
133 T
* dstEnd
= dst
+ (srcEnd
- src
);
134 while (src
!= srcEnd
) {
137 new (dstEnd
) T(*srcEnd
);
145 struct VectorMover
<true, T
>
147 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
149 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
151 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
153 memmove(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
157 template <bool canCopyWithMemcpy
, typename T
>
161 struct VectorCopier
<false, T
>
163 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
165 while (src
!= srcEnd
) {
174 struct VectorCopier
<true, T
>
176 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
178 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
182 template <bool canFillWithMemset
, typename T
>
186 struct VectorFiller
<false, T
>
188 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
190 while (dst
!= dstEnd
) {
198 struct VectorFiller
<true, T
>
200 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
202 ASSERT(sizeof(T
) == sizeof(char));
203 memset(dst
, val
, dstEnd
- dst
);
207 template<bool canCompareWithMemcmp
, typename T
>
208 class VectorComparer
;
211 struct VectorComparer
<false, T
>
213 static bool compare(const T
* a
, const T
* b
, size_t size
)
215 for (size_t i
= 0; i
< size
; ++i
)
223 struct VectorComparer
<true, T
>
225 static bool compare(const T
* a
, const T
* b
, size_t size
)
227 return memcmp(a
, b
, sizeof(T
) * size
) == 0;
232 struct VectorTypeOperations
234 static void destruct(T
* begin
, T
* end
)
236 VectorDestructor
<VectorTraits
<T
>::needsDestruction
, T
>::destruct(begin
, end
);
239 static void initialize(T
* begin
, T
* end
)
241 VectorInitializer
<VectorTraits
<T
>::needsInitialization
, VectorTraits
<T
>::canInitializeWithMemset
, T
>::initialize(begin
, end
);
244 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
246 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::move(src
, srcEnd
, dst
);
249 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
251 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::moveOverlapping(src
, srcEnd
, dst
);
254 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
256 VectorCopier
<VectorTraits
<T
>::canCopyWithMemcpy
, T
>::uninitializedCopy(src
, srcEnd
, dst
);
259 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
261 VectorFiller
<VectorTraits
<T
>::canFillWithMemset
, T
>::uninitializedFill(dst
, dstEnd
, val
);
264 static bool compare(const T
* a
, const T
* b
, size_t size
)
266 return VectorComparer
<VectorTraits
<T
>::canCompareWithMemcmp
, T
>::compare(a
, b
, size
);
271 class VectorBufferBase
: Noncopyable
{
273 void allocateBuffer(size_t newCapacity
)
275 m_capacity
= newCapacity
;
276 if (newCapacity
> std::numeric_limits
<size_t>::max() / sizeof(T
))
278 m_buffer
= static_cast<T
*>(fastMalloc(newCapacity
* sizeof(T
)));
281 void deallocateBuffer(T
* bufferToDeallocate
)
283 if (m_buffer
== bufferToDeallocate
) {
287 fastFree(bufferToDeallocate
);
290 T
* buffer() { return m_buffer
; }
291 const T
* buffer() const { return m_buffer
; }
292 T
** bufferSlot() { return &m_buffer
; }
293 size_t capacity() const { return m_capacity
; }
297 T
* buffer
= m_buffer
;
310 VectorBufferBase(T
* buffer
, size_t capacity
)
312 , m_capacity(capacity
)
318 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
325 template<typename T
, size_t inlineCapacity
>
329 class VectorBuffer
<T
, 0> : private VectorBufferBase
<T
> {
331 typedef VectorBufferBase
<T
> Base
;
337 VectorBuffer(size_t capacity
)
339 allocateBuffer(capacity
);
344 deallocateBuffer(buffer());
347 void swap(VectorBuffer
<T
, 0>& other
)
349 std::swap(m_buffer
, other
.m_buffer
);
350 std::swap(m_capacity
, other
.m_capacity
);
353 void restoreInlineBufferIfNeeded() { }
355 using Base::allocateBuffer
;
356 using Base::deallocateBuffer
;
359 using Base::bufferSlot
;
360 using Base::capacity
;
362 using Base::releaseBuffer
;
364 using Base::m_buffer
;
365 using Base::m_capacity
;
368 template<typename T
, size_t inlineCapacity
>
369 class VectorBuffer
: private VectorBufferBase
<T
> {
371 typedef VectorBufferBase
<T
> Base
;
374 : Base(inlineBuffer(), inlineCapacity
)
378 VectorBuffer(size_t capacity
)
379 : Base(inlineBuffer(), inlineCapacity
)
381 if (capacity
> inlineCapacity
)
382 Base::allocateBuffer(capacity
);
387 deallocateBuffer(buffer());
390 void allocateBuffer(size_t newCapacity
)
392 if (newCapacity
> inlineCapacity
)
393 Base::allocateBuffer(newCapacity
);
395 m_buffer
= inlineBuffer();
396 m_capacity
= inlineCapacity
;
400 void deallocateBuffer(T
* bufferToDeallocate
)
402 if (bufferToDeallocate
== inlineBuffer())
404 Base::deallocateBuffer(bufferToDeallocate
);
407 void restoreInlineBufferIfNeeded()
411 m_buffer
= inlineBuffer();
412 m_capacity
= inlineCapacity
;
416 using Base::bufferSlot
;
417 using Base::capacity
;
421 if (buffer() == inlineBuffer())
423 return Base::releaseBuffer();
427 using Base::m_buffer
;
428 using Base::m_capacity
;
430 static const size_t m_inlineBufferSize
= inlineCapacity
* sizeof(T
);
432 // FIXME: <rdar://problem/6546253&6546260>
433 T
* inlineBuffer() { return reinterpret_cast<T
*>(static_cast<void*>(m_inlineBuffer
.buffer
)); }
435 T
* inlineBuffer() { return reinterpret_cast<T
*>(m_inlineBuffer
.buffer
); }
438 AlignedBuffer
<m_inlineBufferSize
, WTF_ALIGN_OF(T
)> m_inlineBuffer
;
441 template<typename T
, size_t inlineCapacity
= 0>
442 class Vector
: public FastAllocBase
{
444 typedef VectorBuffer
<T
, inlineCapacity
> Buffer
;
445 typedef VectorTypeOperations
<T
> TypeOperations
;
451 typedef const T
* const_iterator
;
458 explicit Vector(size_t size
)
463 TypeOperations::initialize(begin(), end());
468 if (m_size
) shrink(0);
471 Vector(const Vector
&);
472 template<size_t otherCapacity
>
473 Vector(const Vector
<T
, otherCapacity
>&);
475 Vector
& operator=(const Vector
&);
476 template<size_t otherCapacity
>
477 Vector
& operator=(const Vector
<T
, otherCapacity
>&);
479 size_t size() const { return m_size
; }
480 size_t capacity() const { return m_buffer
.capacity(); }
481 bool isEmpty() const { return !size(); }
486 return m_buffer
.buffer()[i
];
488 const T
& at(size_t i
) const
491 return m_buffer
.buffer()[i
];
494 T
& operator[](size_t i
) { return at(i
); }
495 const T
& operator[](size_t i
) const { return at(i
); }
497 T
* data() { return m_buffer
.buffer(); }
498 const T
* data() const { return m_buffer
.buffer(); }
499 T
** dataSlot() { return m_buffer
.bufferSlot(); }
501 iterator
begin() { return data(); }
502 iterator
end() { return begin() + m_size
; }
503 const_iterator
begin() const { return data(); }
504 const_iterator
end() const { return begin() + m_size
; }
506 T
& first() { return at(0); }
507 const T
& first() const { return at(0); }
508 T
& last() { return at(size() - 1); }
509 const T
& last() const { return at(size() - 1); }
511 template<typename U
> size_t find(const U
&) const;
513 void shrink(size_t size
);
514 void grow(size_t size
);
515 void resize(size_t size
);
516 void reserveCapacity(size_t newCapacity
);
517 void reserveInitialCapacity(size_t initialCapacity
);
518 void shrinkCapacity(size_t newCapacity
);
519 void shrinkToFit() { shrinkCapacity(size()); }
521 void clear() { shrinkCapacity(0); }
523 template<typename U
> void append(const U
*, size_t);
524 template<typename U
> void append(const U
&);
525 template<typename U
> void uncheckedAppend(const U
& val
);
526 template<size_t otherCapacity
> void append(const Vector
<T
, otherCapacity
>&);
528 template<typename U
> void insert(size_t position
, const U
*, size_t);
529 template<typename U
> void insert(size_t position
, const U
&);
530 template<typename U
, size_t c
> void insert(size_t position
, const Vector
<U
, c
>&);
532 template<typename U
> void prepend(const U
*, size_t);
533 template<typename U
> void prepend(const U
&);
534 template<typename U
, size_t c
> void prepend(const Vector
<U
, c
>&);
536 void remove(size_t position
);
537 void remove(size_t position
, size_t length
);
545 Vector(size_t size
, const T
& val
)
550 TypeOperations::uninitializedFill(begin(), end(), val
);
553 void fill(const T
&, size_t);
554 void fill(const T
& val
) { fill(val
, size()); }
556 template<typename Iterator
> void appendRange(Iterator start
, Iterator end
);
560 void swap(Vector
<T
, inlineCapacity
>& other
)
562 std::swap(m_size
, other
.m_size
);
563 m_buffer
.swap(other
.m_buffer
);
567 void expandCapacity(size_t newMinCapacity
);
568 const T
* expandCapacity(size_t newMinCapacity
, const T
*);
569 template<typename U
> U
* expandCapacity(size_t newMinCapacity
, U
*);
577 QDataStream
& operator<<(QDataStream
& stream
, const Vector
<T
>& data
)
579 stream
<< qint64(data
.size());
580 foreach (const T
& i
, data
)
586 QDataStream
& operator>>(QDataStream
& stream
, Vector
<T
>& data
)
592 data
.reserveCapacity(count
);
593 for (qint64 i
= 0; i
< count
; ++i
) {
601 template<typename T
, size_t inlineCapacity
>
602 Vector
<T
, inlineCapacity
>::Vector(const Vector
& other
)
603 : m_size(other
.size())
604 , m_buffer(other
.capacity())
607 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
610 template<typename T
, size_t inlineCapacity
>
611 template<size_t otherCapacity
>
612 Vector
<T
, inlineCapacity
>::Vector(const Vector
<T
, otherCapacity
>& other
)
613 : m_size(other
.size())
614 , m_buffer(other
.capacity())
617 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
620 template<typename T
, size_t inlineCapacity
>
621 Vector
<T
, inlineCapacity
>& Vector
<T
, inlineCapacity
>::operator=(const Vector
<T
, inlineCapacity
>& other
)
626 if (size() > other
.size())
627 shrink(other
.size());
628 else if (other
.size() > capacity()) {
630 reserveCapacity(other
.size());
635 std::copy(other
.begin(), other
.begin() + size(), begin());
636 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
637 m_size
= other
.size();
642 template<typename T
, size_t inlineCapacity
>
643 template<size_t otherCapacity
>
644 Vector
<T
, inlineCapacity
>& Vector
<T
, inlineCapacity
>::operator=(const Vector
<T
, otherCapacity
>& other
)
649 if (size() > other
.size())
650 shrink(other
.size());
651 else if (other
.size() > capacity()) {
653 reserveCapacity(other
.size());
658 std::copy(other
.begin(), other
.begin() + size(), begin());
659 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
660 m_size
= other
.size();
665 template<typename T
, size_t inlineCapacity
>
667 size_t Vector
<T
, inlineCapacity
>::find(const U
& value
) const
669 for (size_t i
= 0; i
< size(); ++i
) {
676 template<typename T
, size_t inlineCapacity
>
677 void Vector
<T
, inlineCapacity
>::fill(const T
& val
, size_t newSize
)
679 if (size() > newSize
)
681 else if (newSize
> capacity()) {
683 reserveCapacity(newSize
);
688 std::fill(begin(), end(), val
);
689 TypeOperations::uninitializedFill(end(), begin() + newSize
, val
);
693 template<typename T
, size_t inlineCapacity
>
694 template<typename Iterator
>
695 void Vector
<T
, inlineCapacity
>::appendRange(Iterator start
, Iterator end
)
697 for (Iterator it
= start
; it
!= end
; ++it
)
701 template<typename T
, size_t inlineCapacity
>
702 void Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
)
704 reserveCapacity(max(newMinCapacity
, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
707 template<typename T
, size_t inlineCapacity
>
708 const T
* Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
, const T
* ptr
)
710 if (ptr
< begin() || ptr
>= end()) {
711 expandCapacity(newMinCapacity
);
714 size_t index
= ptr
- begin();
715 expandCapacity(newMinCapacity
);
716 return begin() + index
;
719 template<typename T
, size_t inlineCapacity
> template<typename U
>
720 inline U
* Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
, U
* ptr
)
722 expandCapacity(newMinCapacity
);
726 template<typename T
, size_t inlineCapacity
>
727 inline void Vector
<T
, inlineCapacity
>::resize(size_t size
)
730 TypeOperations::destruct(begin() + size
, end());
732 if (size
> capacity())
733 expandCapacity(size
);
735 TypeOperations::initialize(end(), begin() + size
);
741 template<typename T
, size_t inlineCapacity
>
742 void Vector
<T
, inlineCapacity
>::shrink(size_t size
)
744 ASSERT(size
<= m_size
);
745 TypeOperations::destruct(begin() + size
, end());
749 template<typename T
, size_t inlineCapacity
>
750 void Vector
<T
, inlineCapacity
>::grow(size_t size
)
752 ASSERT(size
>= m_size
);
753 if (size
> capacity())
754 expandCapacity(size
);
756 TypeOperations::initialize(end(), begin() + size
);
760 template<typename T
, size_t inlineCapacity
>
761 void Vector
<T
, inlineCapacity
>::reserveCapacity(size_t newCapacity
)
763 if (newCapacity
<= capacity())
765 T
* oldBuffer
= begin();
767 m_buffer
.allocateBuffer(newCapacity
);
769 TypeOperations::move(oldBuffer
, oldEnd
, begin());
770 m_buffer
.deallocateBuffer(oldBuffer
);
773 template<typename T
, size_t inlineCapacity
>
774 inline void Vector
<T
, inlineCapacity
>::reserveInitialCapacity(size_t initialCapacity
)
777 ASSERT(capacity() == inlineCapacity
);
778 if (initialCapacity
> inlineCapacity
)
779 m_buffer
.allocateBuffer(initialCapacity
);
782 template<typename T
, size_t inlineCapacity
>
783 void Vector
<T
, inlineCapacity
>::shrinkCapacity(size_t newCapacity
)
785 if (newCapacity
>= capacity())
788 if (newCapacity
< size())
791 T
* oldBuffer
= begin();
792 if (newCapacity
> 0) {
794 m_buffer
.allocateBuffer(newCapacity
);
795 if (begin() != oldBuffer
)
796 TypeOperations::move(oldBuffer
, oldEnd
, begin());
799 m_buffer
.deallocateBuffer(oldBuffer
);
800 m_buffer
.restoreInlineBufferIfNeeded();
803 // Templatizing these is better than just letting the conversion happen implicitly,
804 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
805 // without refcount thrash.
807 template<typename T
, size_t inlineCapacity
> template<typename U
>
808 void Vector
<T
, inlineCapacity
>::append(const U
* data
, size_t dataSize
)
810 size_t newSize
= m_size
+ dataSize
;
811 if (newSize
> capacity()) {
812 data
= expandCapacity(newSize
, data
);
816 if (newSize
< m_size
)
819 for (size_t i
= 0; i
< dataSize
; ++i
)
820 new (&dest
[i
]) T(data
[i
]);
824 template<typename T
, size_t inlineCapacity
> template<typename U
>
825 ALWAYS_INLINE
void Vector
<T
, inlineCapacity
>::append(const U
& val
)
828 if (size() == capacity()) {
829 ptr
= expandCapacity(size() + 1, ptr
);
835 // FIXME: MSVC7 generates compilation errors when trying to assign
836 // a pointer to a Vector of its base class (i.e. can't downcast). So far
837 // I've been unable to determine any logical reason for this, so I can
838 // only assume it is a bug with the compiler. Casting is a bad solution,
839 // however, because it subverts implicit conversions, so a better
841 new (end()) T(static_cast<T
>(*ptr
));
848 // This version of append saves a branch in the case where you know that the
849 // vector's capacity is large enough for the append to succeed.
851 template<typename T
, size_t inlineCapacity
> template<typename U
>
852 inline void Vector
<T
, inlineCapacity
>::uncheckedAppend(const U
& val
)
854 ASSERT(size() < capacity());
860 // This method should not be called append, a better name would be appendElements.
861 // It could also be eliminated entirely, and call sites could just use
862 // appendRange(val.begin(), val.end()).
863 template<typename T
, size_t inlineCapacity
> template<size_t otherCapacity
>
864 inline void Vector
<T
, inlineCapacity
>::append(const Vector
<T
, otherCapacity
>& val
)
866 append(val
.begin(), val
.size());
869 template<typename T
, size_t inlineCapacity
> template<typename U
>
870 void Vector
<T
, inlineCapacity
>::insert(size_t position
, const U
* data
, size_t dataSize
)
872 ASSERT(position
<= size());
873 size_t newSize
= m_size
+ dataSize
;
874 if (newSize
> capacity()) {
875 data
= expandCapacity(newSize
, data
);
879 if (newSize
< m_size
)
881 T
* spot
= begin() + position
;
882 TypeOperations::moveOverlapping(spot
, end(), spot
+ dataSize
);
883 for (size_t i
= 0; i
< dataSize
; ++i
)
884 new (&spot
[i
]) T(data
[i
]);
888 template<typename T
, size_t inlineCapacity
> template<typename U
>
889 inline void Vector
<T
, inlineCapacity
>::insert(size_t position
, const U
& val
)
891 ASSERT(position
<= size());
892 const U
* data
= &val
;
893 if (size() == capacity()) {
894 data
= expandCapacity(size() + 1, data
);
898 T
* spot
= begin() + position
;
899 TypeOperations::moveOverlapping(spot
, end(), spot
+ 1);
904 template<typename T
, size_t inlineCapacity
> template<typename U
, size_t c
>
905 inline void Vector
<T
, inlineCapacity
>::insert(size_t position
, const Vector
<U
, c
>& val
)
907 insert(position
, val
.begin(), val
.size());
910 template<typename T
, size_t inlineCapacity
> template<typename U
>
911 void Vector
<T
, inlineCapacity
>::prepend(const U
* data
, size_t dataSize
)
913 insert(0, data
, dataSize
);
916 template<typename T
, size_t inlineCapacity
> template<typename U
>
917 inline void Vector
<T
, inlineCapacity
>::prepend(const U
& val
)
922 template<typename T
, size_t inlineCapacity
> template<typename U
, size_t c
>
923 inline void Vector
<T
, inlineCapacity
>::prepend(const Vector
<U
, c
>& val
)
925 insert(0, val
.begin(), val
.size());
928 template<typename T
, size_t inlineCapacity
>
929 inline void Vector
<T
, inlineCapacity
>::remove(size_t position
)
931 ASSERT(position
< size());
932 T
* spot
= begin() + position
;
934 TypeOperations::moveOverlapping(spot
+ 1, end(), spot
);
938 template<typename T
, size_t inlineCapacity
>
939 inline void Vector
<T
, inlineCapacity
>::remove(size_t position
, size_t length
)
941 ASSERT(position
< size());
942 ASSERT(position
+ length
< size());
943 T
* beginSpot
= begin() + position
;
944 T
* endSpot
= beginSpot
+ length
;
945 TypeOperations::destruct(beginSpot
, endSpot
);
946 TypeOperations::moveOverlapping(endSpot
, end(), beginSpot
);
950 template<typename T
, size_t inlineCapacity
>
951 inline T
* Vector
<T
, inlineCapacity
>::releaseBuffer()
953 T
* buffer
= m_buffer
.releaseBuffer();
954 if (inlineCapacity
&& !buffer
&& m_size
) {
955 // If the vector had some data, but no buffer to release,
956 // that means it was using the inline buffer. In that case,
957 // we create a brand new buffer so the caller always gets one.
958 size_t bytes
= m_size
* sizeof(T
);
959 buffer
= static_cast<T
*>(fastMalloc(bytes
));
960 memcpy(buffer
, data(), bytes
);
966 template<typename T
, size_t inlineCapacity
>
967 void deleteAllValues(const Vector
<T
, inlineCapacity
>& collection
)
969 typedef typename Vector
<T
, inlineCapacity
>::const_iterator iterator
;
970 iterator end
= collection
.end();
971 for (iterator it
= collection
.begin(); it
!= end
; ++it
)
975 template<typename T
, size_t inlineCapacity
>
976 inline void swap(Vector
<T
, inlineCapacity
>& a
, Vector
<T
, inlineCapacity
>& b
)
981 template<typename T
, size_t inlineCapacity
>
982 bool operator==(const Vector
<T
, inlineCapacity
>& a
, const Vector
<T
, inlineCapacity
>& b
)
984 if (a
.size() != b
.size())
987 return VectorTypeOperations
<T
>::compare(a
.data(), b
.data(), a
.size());
990 template<typename T
, size_t inlineCapacity
>
991 inline bool operator!=(const Vector
<T
, inlineCapacity
>& a
, const Vector
<T
, inlineCapacity
>& b
)
1001 #endif // WTF_Vector_h