more test cases
[wxWidgets.git] / tests / strings / tokenizer.cpp
1 ///////////////////////////////////////////////////////////////////////////////
2 // Name: tests/strings/strings.cpp
3 // Purpose: wxStringTokenizer unit test
4 // Author: Vadim Zeitlin
5 // Created: 2005-12-20 (extacted from strings.cpp)
6 // RCS-ID: $Id$
7 // Copyright: (c) 2004-2005 Vadim Zeitlin
8 ///////////////////////////////////////////////////////////////////////////////
9
10 // ----------------------------------------------------------------------------
11 // headers
12 // ----------------------------------------------------------------------------
13
14 #include "testprec.h"
15
16 #ifdef __BORLANDC__
17 #pragma hdrstop
18 #endif
19
20 #ifndef WX_PRECOMP
21 #include "wx/wx.h"
22 #endif // WX_PRECOMP
23
24 #include "wx/tokenzr.h"
25
26 // ----------------------------------------------------------------------------
27 // test class
28 // ----------------------------------------------------------------------------
29
30 class TokenizerTestCase : public CppUnit::TestCase
31 {
32 public:
33 TokenizerTestCase() { }
34
35 private:
36 CPPUNIT_TEST_SUITE( TokenizerTestCase );
37 CPPUNIT_TEST( GetCount );
38 CPPUNIT_TEST( GetPosition );
39 CPPUNIT_TEST( StrtokCompat );
40 CPPUNIT_TEST_SUITE_END();
41
42 void GetCount();
43 void GetPosition();
44 void StrtokCompat();
45
46 DECLARE_NO_COPY_CLASS(TokenizerTestCase)
47 };
48
49 // register in the unnamed registry so that these tests are run by default
50 CPPUNIT_TEST_SUITE_REGISTRATION( TokenizerTestCase );
51
52 // also include in it's own registry so that these tests can be run alone
53 CPPUNIT_TEST_SUITE_NAMED_REGISTRATION( TokenizerTestCase, "TokenizerTestCase" );
54
55 // ----------------------------------------------------------------------------
56 // test data
57 // ----------------------------------------------------------------------------
58
59 static const struct TokenizerTestData
60 {
61 // the string to tokenize
62 const wxChar *str;
63
64 // the delimiters to use
65 const wxChar *delims;
66
67 // the tokenizer mode
68 wxStringTokenizerMode mode;
69
70 // expected number of tokens
71 size_t count;
72 }
73 gs_testData[] =
74 {
75 { _T(""), _T(" "), wxTOKEN_DEFAULT, 0 },
76 { _T(""), _T(" "), wxTOKEN_RET_EMPTY, 0 },
77 { _T(""), _T(" "), wxTOKEN_RET_EMPTY_ALL, 0 },
78
79 { _T("Hello, world"), _T(" "), wxTOKEN_DEFAULT, 2 },
80 { _T("Hello, world "), _T(" "), wxTOKEN_DEFAULT, 2 },
81 { _T("Hello, world"), _T(","), wxTOKEN_DEFAULT, 2 },
82 { _T("Hello, world!"), _T(",!"), wxTOKEN_DEFAULT, 2 },
83 { _T("Hello,, world!"), _T(",!"), wxTOKEN_DEFAULT, 3 },
84 { _T("Hello,, world!"), _T(",!"), wxTOKEN_STRTOK, 2 },
85 { _T("Hello, world!"), _T(",!"), wxTOKEN_RET_EMPTY_ALL, 3 },
86
87 { _T("username:password:uid:gid:gecos:home:shell"),
88 _T(":"), wxTOKEN_DEFAULT, 7 },
89
90 { _T("1:2::3:"), _T(":"), wxTOKEN_DEFAULT, 4 },
91 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_EMPTY, 4 },
92 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 5 },
93 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_DELIMS, 4 },
94 { _T("1:2::3:"), _T(":"), wxTOKEN_STRTOK, 3 },
95
96 { _T("1:2::3::"), _T(":"), wxTOKEN_DEFAULT, 5 },
97 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_EMPTY, 4 },
98 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 6 },
99 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_DELIMS, 5 },
100 { _T("1:2::3::"), _T(":"), wxTOKEN_STRTOK, 3 },
101
102 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_DEFAULT, 4 },
103 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_STRTOK, 4 },
104 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY, 6 },
105 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY_ALL, 9 },
106
107 { _T("01/02/99"), _T("/-"), wxTOKEN_DEFAULT, 3 },
108 { _T("01-02/99"), _T("/-"), wxTOKEN_RET_DELIMS, 3 },
109 };
110
111 // ----------------------------------------------------------------------------
112 // the tests
113 // ----------------------------------------------------------------------------
114
115 void TokenizerTestCase::GetCount()
116 {
117 for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
118 {
119 const TokenizerTestData& ttd = gs_testData[n];
120
121 wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
122 CPPUNIT_ASSERT_EQUAL( ttd.count, tkz.CountTokens() );
123
124 size_t count = 0;
125 while ( tkz.HasMoreTokens() )
126 {
127 tkz.GetNextToken();
128 count++;
129 }
130
131 CPPUNIT_ASSERT_EQUAL( ttd.count, count );
132 }
133 }
134
135 // call this with the string to tokenize, delimeters to use and the expected
136 // positions (i.e. results of GetPosition()) after each GetNextToken() call,
137 // terminate positions with 0
138 static void
139 DoTestGetPosition(const wxChar *s, const wxChar *delims, int pos, ...)
140 {
141 wxStringTokenizer tkz(s, delims);
142
143 CPPUNIT_ASSERT_EQUAL( (size_t)0, tkz.GetPosition() );
144
145 va_list ap;
146 va_start(ap, pos);
147
148 for ( ;; )
149 {
150 if ( !pos )
151 {
152 CPPUNIT_ASSERT( !tkz.HasMoreTokens() );
153 break;
154 }
155
156 tkz.GetNextToken();
157
158 CPPUNIT_ASSERT_EQUAL( (size_t)pos, tkz.GetPosition() );
159
160 pos = va_arg(ap, int);
161 }
162
163 va_end(ap);
164 }
165
166 void TokenizerTestCase::GetPosition()
167 {
168 DoTestGetPosition(_T("foo"), _T("_"), 3, 0);
169 DoTestGetPosition(_T("foo_bar"), _T("_"), 4, 7, 0);
170 DoTestGetPosition(_T("foo_bar_"), _T("_"), 4, 8, 0);
171 }
172
173 void TokenizerTestCase::StrtokCompat()
174 {
175 for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
176 {
177 const TokenizerTestData& ttd = gs_testData[n];
178 if ( ttd.mode != wxTOKEN_STRTOK )
179 continue;
180
181 #if wxUSE_UNICODE
182 wxWCharBuffer
183 #else
184 wxCharBuffer
185 #endif
186 buf(ttd.str);
187 wxChar *last;
188 wxChar *s = wxStrtok(buf.data(), ttd.delims, &last);
189
190 wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
191 while ( tkz.HasMoreTokens() )
192 {
193 CPPUNIT_ASSERT_EQUAL( wxString(s), tkz.GetNextToken() );
194 s = wxStrtok(NULL, ttd.delims, &last);
195 }
196 }
197 }
198
199