]>
git.saurik.com Git - apple/javascriptcore.git/blob - wtf/Vector.h
d8c8e2ed51e2af89de0b7c6d9002041494ec403c
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "Assertions.h"
25 #include "FastMalloc.h"
26 #include "Noncopyable.h"
27 #include "VectorTraits.h"
38 // WTF_ALIGN_OF / WTF_ALIGNED
39 #if COMPILER(GCC) || COMPILER(MINGW) || COMPILER(RVCT) || COMPILER(WINSCW)
40 #define WTF_ALIGN_OF(type) __alignof__(type)
41 #define WTF_ALIGNED(variable_type, variable, n) variable_type variable __attribute__((__aligned__(n)))
43 #define WTF_ALIGN_OF(type) __alignof(type)
44 #define WTF_ALIGNED(variable_type, variable, n) __declspec(align(n)) variable_type variable
46 #error WTF_ALIGN macros need alignment control.
49 #if COMPILER(GCC) && (((__GNUC__ * 100) + __GNUC_MINOR__) >= 303)
50 typedef char __attribute__((__may_alias__
)) AlignedBufferChar
;
52 typedef char AlignedBufferChar
;
55 template <size_t size
, size_t alignment
> struct AlignedBuffer
;
56 template <size_t size
> struct AlignedBuffer
<size
, 1> { AlignedBufferChar buffer
[size
]; };
57 template <size_t size
> struct AlignedBuffer
<size
, 2> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 2); };
58 template <size_t size
> struct AlignedBuffer
<size
, 4> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 4); };
59 template <size_t size
> struct AlignedBuffer
<size
, 8> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 8); };
60 template <size_t size
> struct AlignedBuffer
<size
, 16> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 16); };
61 template <size_t size
> struct AlignedBuffer
<size
, 32> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 32); };
62 template <size_t size
> struct AlignedBuffer
<size
, 64> { WTF_ALIGNED(AlignedBufferChar
, buffer
[size
], 64); };
64 template <bool needsDestruction
, typename T
>
65 class VectorDestructor
;
68 struct VectorDestructor
<false, T
>
70 static void destruct(T
*, T
*) {}
74 struct VectorDestructor
<true, T
>
76 static void destruct(T
* begin
, T
* end
)
78 for (T
* cur
= begin
; cur
!= end
; ++cur
)
83 template <bool needsInitialization
, bool canInitializeWithMemset
, typename T
>
84 class VectorInitializer
;
86 template<bool ignore
, typename T
>
87 struct VectorInitializer
<false, ignore
, T
>
89 static void initialize(T
*, T
*) {}
93 struct VectorInitializer
<true, false, T
>
95 static void initialize(T
* begin
, T
* end
)
97 for (T
* cur
= begin
; cur
!= end
; ++cur
)
103 struct VectorInitializer
<true, true, T
>
105 static void initialize(T
* begin
, T
* end
)
107 memset(begin
, 0, reinterpret_cast<char*>(end
) - reinterpret_cast<char*>(begin
));
111 template <bool canMoveWithMemcpy
, typename T
>
115 struct VectorMover
<false, T
>
117 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
119 while (src
!= srcEnd
) {
126 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
129 move(src
, srcEnd
, dst
);
131 T
* dstEnd
= dst
+ (srcEnd
- src
);
132 while (src
!= srcEnd
) {
135 new (dstEnd
) T(*srcEnd
);
143 struct VectorMover
<true, T
>
145 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
147 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
149 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
151 memmove(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
155 template <bool canCopyWithMemcpy
, typename T
>
159 struct VectorCopier
<false, T
>
161 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
163 while (src
!= srcEnd
) {
172 struct VectorCopier
<true, T
>
174 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
176 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
180 template <bool canFillWithMemset
, typename T
>
184 struct VectorFiller
<false, T
>
186 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
188 while (dst
!= dstEnd
) {
196 struct VectorFiller
<true, T
>
198 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
200 ASSERT(sizeof(T
) == sizeof(char));
201 memset(dst
, val
, dstEnd
- dst
);
205 template<bool canCompareWithMemcmp
, typename T
>
206 class VectorComparer
;
209 struct VectorComparer
<false, T
>
211 static bool compare(const T
* a
, const T
* b
, size_t size
)
213 for (size_t i
= 0; i
< size
; ++i
)
221 struct VectorComparer
<true, T
>
223 static bool compare(const T
* a
, const T
* b
, size_t size
)
225 return memcmp(a
, b
, sizeof(T
) * size
) == 0;
230 struct VectorTypeOperations
232 static void destruct(T
* begin
, T
* end
)
234 VectorDestructor
<VectorTraits
<T
>::needsDestruction
, T
>::destruct(begin
, end
);
237 static void initialize(T
* begin
, T
* end
)
239 VectorInitializer
<VectorTraits
<T
>::needsInitialization
, VectorTraits
<T
>::canInitializeWithMemset
, T
>::initialize(begin
, end
);
242 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
244 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::move(src
, srcEnd
, dst
);
247 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
249 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::moveOverlapping(src
, srcEnd
, dst
);
252 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
254 VectorCopier
<VectorTraits
<T
>::canCopyWithMemcpy
, T
>::uninitializedCopy(src
, srcEnd
, dst
);
257 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
259 VectorFiller
<VectorTraits
<T
>::canFillWithMemset
, T
>::uninitializedFill(dst
, dstEnd
, val
);
262 static bool compare(const T
* a
, const T
* b
, size_t size
)
264 return VectorComparer
<VectorTraits
<T
>::canCompareWithMemcmp
, T
>::compare(a
, b
, size
);
269 class VectorBufferBase
: Noncopyable
{
271 void allocateBuffer(size_t newCapacity
)
273 m_capacity
= newCapacity
;
274 if (newCapacity
> std::numeric_limits
<size_t>::max() / sizeof(T
))
276 m_buffer
= static_cast<T
*>(fastMalloc(newCapacity
* sizeof(T
)));
279 void deallocateBuffer(T
* bufferToDeallocate
)
281 if (m_buffer
== bufferToDeallocate
) {
285 fastFree(bufferToDeallocate
);
288 T
* buffer() { return m_buffer
; }
289 const T
* buffer() const { return m_buffer
; }
290 T
** bufferSlot() { return &m_buffer
; }
291 size_t capacity() const { return m_capacity
; }
295 T
* buffer
= m_buffer
;
308 VectorBufferBase(T
* buffer
, size_t capacity
)
310 , m_capacity(capacity
)
316 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
323 template<typename T
, size_t inlineCapacity
>
327 class VectorBuffer
<T
, 0> : private VectorBufferBase
<T
> {
329 typedef VectorBufferBase
<T
> Base
;
335 VectorBuffer(size_t capacity
)
337 allocateBuffer(capacity
);
342 deallocateBuffer(buffer());
345 void swap(VectorBuffer
<T
, 0>& other
)
347 std::swap(m_buffer
, other
.m_buffer
);
348 std::swap(m_capacity
, other
.m_capacity
);
351 void restoreInlineBufferIfNeeded() { }
353 using Base::allocateBuffer
;
354 using Base::deallocateBuffer
;
357 using Base::bufferSlot
;
358 using Base::capacity
;
360 using Base::releaseBuffer
;
362 using Base::m_buffer
;
363 using Base::m_capacity
;
366 template<typename T
, size_t inlineCapacity
>
367 class VectorBuffer
: private VectorBufferBase
<T
> {
369 typedef VectorBufferBase
<T
> Base
;
372 : Base(inlineBuffer(), inlineCapacity
)
376 VectorBuffer(size_t capacity
)
377 : Base(inlineBuffer(), inlineCapacity
)
379 allocateBuffer(capacity
);
384 deallocateBuffer(buffer());
387 void allocateBuffer(size_t newCapacity
)
389 if (newCapacity
> inlineCapacity
)
390 Base::allocateBuffer(newCapacity
);
393 void deallocateBuffer(T
* bufferToDeallocate
)
395 if (bufferToDeallocate
== inlineBuffer())
397 Base::deallocateBuffer(bufferToDeallocate
);
400 void restoreInlineBufferIfNeeded()
404 m_buffer
= inlineBuffer();
405 m_capacity
= inlineCapacity
;
409 using Base::bufferSlot
;
410 using Base::capacity
;
414 if (buffer() == inlineBuffer())
416 return Base::releaseBuffer();
420 using Base::m_buffer
;
421 using Base::m_capacity
;
423 static const size_t m_inlineBufferSize
= inlineCapacity
* sizeof(T
);
424 T
* inlineBuffer() { return reinterpret_cast<T
*>(m_inlineBuffer
.buffer
); }
426 AlignedBuffer
<m_inlineBufferSize
, WTF_ALIGN_OF(T
)> m_inlineBuffer
;
429 template<typename T
, size_t inlineCapacity
= 0>
432 typedef VectorBuffer
<T
, inlineCapacity
> Buffer
;
433 typedef VectorTypeOperations
<T
> TypeOperations
;
439 typedef const T
* const_iterator
;
446 explicit Vector(size_t size
)
451 TypeOperations::initialize(begin(), end());
456 if (m_size
) shrink(0);
459 Vector(const Vector
&);
460 template<size_t otherCapacity
>
461 Vector(const Vector
<T
, otherCapacity
>&);
463 Vector
& operator=(const Vector
&);
464 template<size_t otherCapacity
>
465 Vector
& operator=(const Vector
<T
, otherCapacity
>&);
467 size_t size() const { return m_size
; }
468 size_t capacity() const { return m_buffer
.capacity(); }
469 bool isEmpty() const { return !size(); }
474 return m_buffer
.buffer()[i
];
476 const T
& at(size_t i
) const
479 return m_buffer
.buffer()[i
];
482 T
& operator[](size_t i
) { return at(i
); }
483 const T
& operator[](size_t i
) const { return at(i
); }
485 T
* data() { return m_buffer
.buffer(); }
486 const T
* data() const { return m_buffer
.buffer(); }
487 T
** dataSlot() { return m_buffer
.bufferSlot(); }
489 iterator
begin() { return data(); }
490 iterator
end() { return begin() + m_size
; }
491 const_iterator
begin() const { return data(); }
492 const_iterator
end() const { return begin() + m_size
; }
494 T
& first() { return at(0); }
495 const T
& first() const { return at(0); }
496 T
& last() { return at(size() - 1); }
497 const T
& last() const { return at(size() - 1); }
499 template<typename U
> size_t find(const U
&) const;
501 void shrink(size_t size
);
502 void grow(size_t size
);
503 void resize(size_t size
);
504 void reserveCapacity(size_t newCapacity
);
505 void shrinkCapacity(size_t newCapacity
);
506 void shrinkToFit() { shrinkCapacity(size()); }
508 void clear() { shrinkCapacity(0); }
510 template<typename U
> void append(const U
*, size_t);
511 template<typename U
> void append(const U
&);
512 template<typename U
> void uncheckedAppend(const U
& val
);
513 template<size_t otherCapacity
> void append(const Vector
<T
, otherCapacity
>&);
515 template<typename U
> void insert(size_t position
, const U
*, size_t);
516 template<typename U
> void insert(size_t position
, const U
&);
517 template<typename U
, size_t c
> void insert(size_t position
, const Vector
<U
, c
>&);
519 template<typename U
> void prepend(const U
*, size_t);
520 template<typename U
> void prepend(const U
&);
521 template<typename U
, size_t c
> void prepend(const Vector
<U
, c
>&);
523 void remove(size_t position
);
524 void remove(size_t position
, size_t length
);
532 Vector(size_t size
, const T
& val
)
537 TypeOperations::uninitializedFill(begin(), end(), val
);
540 void fill(const T
&, size_t);
541 void fill(const T
& val
) { fill(val
, size()); }
543 template<typename Iterator
> void appendRange(Iterator start
, Iterator end
);
547 void swap(Vector
<T
, inlineCapacity
>& other
)
549 std::swap(m_size
, other
.m_size
);
550 m_buffer
.swap(other
.m_buffer
);
554 void expandCapacity(size_t newMinCapacity
);
555 const T
* expandCapacity(size_t newMinCapacity
, const T
*);
556 template<typename U
> U
* expandCapacity(size_t newMinCapacity
, U
*);
562 template<typename T
, size_t inlineCapacity
>
563 Vector
<T
, inlineCapacity
>::Vector(const Vector
& other
)
564 : m_size(other
.size())
565 , m_buffer(other
.capacity())
568 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
571 template<typename T
, size_t inlineCapacity
>
572 template<size_t otherCapacity
>
573 Vector
<T
, inlineCapacity
>::Vector(const Vector
<T
, otherCapacity
>& other
)
574 : m_size(other
.size())
575 , m_buffer(other
.capacity())
578 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
581 template<typename T
, size_t inlineCapacity
>
582 Vector
<T
, inlineCapacity
>& Vector
<T
, inlineCapacity
>::operator=(const Vector
<T
, inlineCapacity
>& other
)
587 if (size() > other
.size())
588 shrink(other
.size());
589 else if (other
.size() > capacity()) {
591 reserveCapacity(other
.size());
596 std::copy(other
.begin(), other
.begin() + size(), begin());
597 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
598 m_size
= other
.size();
603 template<typename T
, size_t inlineCapacity
>
604 template<size_t otherCapacity
>
605 Vector
<T
, inlineCapacity
>& Vector
<T
, inlineCapacity
>::operator=(const Vector
<T
, otherCapacity
>& other
)
610 if (size() > other
.size())
611 shrink(other
.size());
612 else if (other
.size() > capacity()) {
614 reserveCapacity(other
.size());
619 std::copy(other
.begin(), other
.begin() + size(), begin());
620 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
621 m_size
= other
.size();
626 template<typename T
, size_t inlineCapacity
>
627 void Vector
<T
, inlineCapacity
>::fill(const T
& val
, size_t newSize
)
629 if (size() > newSize
)
631 else if (newSize
> capacity()) {
633 reserveCapacity(newSize
);
638 std::fill(begin(), end(), val
);
639 TypeOperations::uninitializedFill(end(), begin() + newSize
, val
);
643 template<typename T
, size_t inlineCapacity
>
644 template<typename Iterator
>
645 void Vector
<T
, inlineCapacity
>::appendRange(Iterator start
, Iterator end
)
647 for (Iterator it
= start
; it
!= end
; ++it
)
651 template<typename T
, size_t inlineCapacity
>
652 void Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
)
654 reserveCapacity(max(newMinCapacity
, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
657 template<typename T
, size_t inlineCapacity
>
658 const T
* Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
, const T
* ptr
)
660 if (ptr
< begin() || ptr
>= end()) {
661 expandCapacity(newMinCapacity
);
664 size_t index
= ptr
- begin();
665 expandCapacity(newMinCapacity
);
666 return begin() + index
;
669 template<typename T
, size_t inlineCapacity
> template<typename U
>
670 inline U
* Vector
<T
, inlineCapacity
>::expandCapacity(size_t newMinCapacity
, U
* ptr
)
672 expandCapacity(newMinCapacity
);
676 template<typename T
, size_t inlineCapacity
>
677 void Vector
<T
, inlineCapacity
>::resize(size_t size
)
680 TypeOperations::destruct(begin() + size
, end());
682 if (size
> capacity())
683 expandCapacity(size
);
685 TypeOperations::initialize(end(), begin() + size
);
691 template<typename T
, size_t inlineCapacity
>
692 void Vector
<T
, inlineCapacity
>::shrink(size_t size
)
694 ASSERT(size
<= m_size
);
695 TypeOperations::destruct(begin() + size
, end());
699 template<typename T
, size_t inlineCapacity
>
700 void Vector
<T
, inlineCapacity
>::grow(size_t size
)
702 ASSERT(size
>= m_size
);
703 if (size
> capacity())
704 expandCapacity(size
);
706 TypeOperations::initialize(end(), begin() + size
);
710 template<typename T
, size_t inlineCapacity
>
711 void Vector
<T
, inlineCapacity
>::reserveCapacity(size_t newCapacity
)
713 if (newCapacity
<= capacity())
715 T
* oldBuffer
= begin();
717 m_buffer
.allocateBuffer(newCapacity
);
719 TypeOperations::move(oldBuffer
, oldEnd
, begin());
720 m_buffer
.deallocateBuffer(oldBuffer
);
723 template<typename T
, size_t inlineCapacity
>
724 void Vector
<T
, inlineCapacity
>::shrinkCapacity(size_t newCapacity
)
726 if (newCapacity
>= capacity())
729 if (newCapacity
< size())
732 T
* oldBuffer
= begin();
733 if (newCapacity
> 0) {
735 m_buffer
.allocateBuffer(newCapacity
);
736 if (begin() != oldBuffer
)
737 TypeOperations::move(oldBuffer
, oldEnd
, begin());
740 m_buffer
.deallocateBuffer(oldBuffer
);
741 m_buffer
.restoreInlineBufferIfNeeded();
744 // Templatizing these is better than just letting the conversion happen implicitly,
745 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
746 // without refcount thrash.
748 template<typename T
, size_t inlineCapacity
> template<typename U
>
749 void Vector
<T
, inlineCapacity
>::append(const U
* data
, size_t dataSize
)
751 size_t newSize
= m_size
+ dataSize
;
752 if (newSize
> capacity()) {
753 data
= expandCapacity(newSize
, data
);
758 for (size_t i
= 0; i
< dataSize
; ++i
)
759 new (&dest
[i
]) T(data
[i
]);
763 template<typename T
, size_t inlineCapacity
> template<typename U
>
764 inline void Vector
<T
, inlineCapacity
>::append(const U
& val
)
767 if (size() == capacity()) {
768 ptr
= expandCapacity(size() + 1, ptr
);
774 // FIXME: MSVC7 generates compilation errors when trying to assign
775 // a pointer to a Vector of its base class (i.e. can't downcast). So far
776 // I've been unable to determine any logical reason for this, so I can
777 // only assume it is a bug with the compiler. Casting is a bad solution,
778 // however, because it subverts implicit conversions, so a better
780 new (end()) T(static_cast<T
>(*ptr
));
787 // This version of append saves a branch in the case where you know that the
788 // vector's capacity is large enough for the append to succeed.
790 template<typename T
, size_t inlineCapacity
> template<typename U
>
791 inline void Vector
<T
, inlineCapacity
>::uncheckedAppend(const U
& val
)
793 ASSERT(size() < capacity());
799 // This method should not be called append, a better name would be appendElements.
800 // It could also be eliminated entirely, and call sites could just use
801 // appendRange(val.begin(), val.end()).
802 template<typename T
, size_t inlineCapacity
> template<size_t otherCapacity
>
803 inline void Vector
<T
, inlineCapacity
>::append(const Vector
<T
, otherCapacity
>& val
)
805 append(val
.begin(), val
.size());
808 template<typename T
, size_t inlineCapacity
> template<typename U
>
809 void Vector
<T
, inlineCapacity
>::insert(size_t position
, const U
* data
, size_t dataSize
)
811 ASSERT(position
<= size());
812 size_t newSize
= m_size
+ dataSize
;
813 if (newSize
> capacity()) {
814 data
= expandCapacity(newSize
, data
);
818 T
* spot
= begin() + position
;
819 TypeOperations::moveOverlapping(spot
, end(), spot
+ dataSize
);
820 for (size_t i
= 0; i
< dataSize
; ++i
)
821 new (&spot
[i
]) T(data
[i
]);
825 template<typename T
, size_t inlineCapacity
> template<typename U
>
826 inline void Vector
<T
, inlineCapacity
>::insert(size_t position
, const U
& val
)
828 ASSERT(position
<= size());
829 const U
* data
= &val
;
830 if (size() == capacity()) {
831 data
= expandCapacity(size() + 1, data
);
835 T
* spot
= begin() + position
;
836 TypeOperations::moveOverlapping(spot
, end(), spot
+ 1);
841 template<typename T
, size_t inlineCapacity
> template<typename U
, size_t c
>
842 inline void Vector
<T
, inlineCapacity
>::insert(size_t position
, const Vector
<U
, c
>& val
)
844 insert(position
, val
.begin(), val
.size());
847 template<typename T
, size_t inlineCapacity
> template<typename U
>
848 void Vector
<T
, inlineCapacity
>::prepend(const U
* data
, size_t dataSize
)
850 insert(0, data
, dataSize
);
853 template<typename T
, size_t inlineCapacity
> template<typename U
>
854 inline void Vector
<T
, inlineCapacity
>::prepend(const U
& val
)
859 template<typename T
, size_t inlineCapacity
> template<typename U
, size_t c
>
860 inline void Vector
<T
, inlineCapacity
>::prepend(const Vector
<U
, c
>& val
)
862 insert(0, val
.begin(), val
.size());
865 template<typename T
, size_t inlineCapacity
>
866 inline void Vector
<T
, inlineCapacity
>::remove(size_t position
)
868 ASSERT(position
< size());
869 T
* spot
= begin() + position
;
871 TypeOperations::moveOverlapping(spot
+ 1, end(), spot
);
875 template<typename T
, size_t inlineCapacity
>
876 inline void Vector
<T
, inlineCapacity
>::remove(size_t position
, size_t length
)
878 ASSERT(position
< size());
879 ASSERT(position
+ length
< size());
880 T
* beginSpot
= begin() + position
;
881 T
* endSpot
= beginSpot
+ length
;
882 TypeOperations::destruct(beginSpot
, endSpot
);
883 TypeOperations::moveOverlapping(endSpot
, end(), beginSpot
);
887 template<typename T
, size_t inlineCapacity
>
888 inline T
* Vector
<T
, inlineCapacity
>::releaseBuffer()
890 T
* buffer
= m_buffer
.releaseBuffer();
891 if (inlineCapacity
&& !buffer
&& m_size
) {
892 // If the vector had some data, but no buffer to release,
893 // that means it was using the inline buffer. In that case,
894 // we create a brand new buffer so the caller always gets one.
895 size_t bytes
= m_size
* sizeof(T
);
896 buffer
= static_cast<T
*>(fastMalloc(bytes
));
897 memcpy(buffer
, data(), bytes
);
903 template<typename T
, size_t inlineCapacity
>
904 void deleteAllValues(const Vector
<T
, inlineCapacity
>& collection
)
906 typedef typename Vector
<T
, inlineCapacity
>::const_iterator iterator
;
907 iterator end
= collection
.end();
908 for (iterator it
= collection
.begin(); it
!= end
; ++it
)
912 template<typename T
, size_t inlineCapacity
>
913 inline void swap(Vector
<T
, inlineCapacity
>& a
, Vector
<T
, inlineCapacity
>& b
)
918 template<typename T
, size_t inlineCapacity
>
919 bool operator==(const Vector
<T
, inlineCapacity
>& a
, const Vector
<T
, inlineCapacity
>& b
)
921 if (a
.size() != b
.size())
924 return VectorTypeOperations
<T
>::compare(a
.data(), b
.data(), a
.size());
927 template<typename T
, size_t inlineCapacity
>
928 inline bool operator!=(const Vector
<T
, inlineCapacity
>& a
, const Vector
<T
, inlineCapacity
>& b
)
938 #endif // WTF_Vector_h