]> git.saurik.com Git - wxWidgets.git/blob - tests/strings/tokenizer.cpp
implement wxTreeCtrl::GetFocusedItem() for wxMSW too (see #10859)
[wxWidgets.git] / tests / strings / tokenizer.cpp
1 ///////////////////////////////////////////////////////////////////////////////
2 // Name: tests/strings/strings.cpp
3 // Purpose: wxStringTokenizer unit test
4 // Author: Vadim Zeitlin
5 // Created: 2005-12-20 (extacted from strings.cpp)
6 // RCS-ID: $Id$
7 // Copyright: (c) 2004-2005 Vadim Zeitlin
8 ///////////////////////////////////////////////////////////////////////////////
9
10 // ----------------------------------------------------------------------------
11 // headers
12 // ----------------------------------------------------------------------------
13
14 #include "testprec.h"
15
16 #ifdef __BORLANDC__
17 #pragma hdrstop
18 #endif
19
20 #ifndef WX_PRECOMP
21 #include "wx/wx.h"
22 #endif // WX_PRECOMP
23
24 #include "wx/tokenzr.h"
25
26 // ----------------------------------------------------------------------------
27 // test class
28 // ----------------------------------------------------------------------------
29
30 class TokenizerTestCase : public CppUnit::TestCase
31 {
32 public:
33 TokenizerTestCase() { }
34
35 private:
36 CPPUNIT_TEST_SUITE( TokenizerTestCase );
37 CPPUNIT_TEST( GetCount );
38 CPPUNIT_TEST( GetPosition );
39 CPPUNIT_TEST( GetString );
40 CPPUNIT_TEST( LastDelimiter );
41 CPPUNIT_TEST( StrtokCompat );
42 CPPUNIT_TEST_SUITE_END();
43
44 void GetCount();
45 void GetPosition();
46 void GetString();
47 void LastDelimiter();
48 void StrtokCompat();
49
50 DECLARE_NO_COPY_CLASS(TokenizerTestCase)
51 };
52
53 // register in the unnamed registry so that these tests are run by default
54 CPPUNIT_TEST_SUITE_REGISTRATION( TokenizerTestCase );
55
56 // also include in it's own registry so that these tests can be run alone
57 CPPUNIT_TEST_SUITE_NAMED_REGISTRATION( TokenizerTestCase, "TokenizerTestCase" );
58
59 // ----------------------------------------------------------------------------
60 // test data
61 // ----------------------------------------------------------------------------
62
63 static const struct TokenizerTestData
64 {
65 // the string to tokenize
66 const wxChar *str;
67
68 // the delimiters to use
69 const wxChar *delims;
70
71 // the tokenizer mode
72 wxStringTokenizerMode mode;
73
74 // expected number of tokens
75 size_t count;
76 }
77 gs_testData[] =
78 {
79 { _T(""), _T(" "), wxTOKEN_DEFAULT, 0 },
80 { _T(""), _T(" "), wxTOKEN_RET_EMPTY, 0 },
81 { _T(""), _T(" "), wxTOKEN_RET_EMPTY_ALL, 0 },
82 { _T(""), _T(" "), wxTOKEN_RET_DELIMS, 0 },
83 { _T(":"), _T(":"), wxTOKEN_RET_EMPTY, 1 },
84 { _T(":"), _T(":"), wxTOKEN_RET_DELIMS, 1 },
85 { _T(":"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 2 },
86 { _T("::"), _T(":"), wxTOKEN_RET_EMPTY, 1 },
87 { _T("::"), _T(":"), wxTOKEN_RET_DELIMS, 1 },
88 { _T("::"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 3 },
89
90 { _T("Hello, world"), _T(" "), wxTOKEN_DEFAULT, 2 },
91 { _T("Hello, world "), _T(" "), wxTOKEN_DEFAULT, 2 },
92 { _T("Hello, world"), _T(","), wxTOKEN_DEFAULT, 2 },
93 { _T("Hello, world!"), _T(",!"), wxTOKEN_DEFAULT, 2 },
94 { _T("Hello,, world!"), _T(",!"), wxTOKEN_DEFAULT, 3 },
95 { _T("Hello,, world!"), _T(",!"), wxTOKEN_STRTOK, 2 },
96 { _T("Hello, world!"), _T(",!"), wxTOKEN_RET_EMPTY_ALL, 3 },
97
98 { _T("username:password:uid:gid:gecos:home:shell"),
99 _T(":"), wxTOKEN_DEFAULT, 7 },
100
101 { _T("1:2::3:"), _T(":"), wxTOKEN_DEFAULT, 4 },
102 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_EMPTY, 4 },
103 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 5 },
104 { _T("1:2::3:"), _T(":"), wxTOKEN_RET_DELIMS, 4 },
105 { _T("1:2::3:"), _T(":"), wxTOKEN_STRTOK, 3 },
106
107 { _T("1:2::3::"), _T(":"), wxTOKEN_DEFAULT, 4 },
108 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_EMPTY, 4 },
109 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_EMPTY_ALL, 6 },
110 { _T("1:2::3::"), _T(":"), wxTOKEN_RET_DELIMS, 4 },
111 { _T("1:2::3::"), _T(":"), wxTOKEN_STRTOK, 3 },
112
113 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_DEFAULT, 4 },
114 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_STRTOK, 4 },
115 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY, 6 },
116 { _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY_ALL, 9 },
117
118 { _T("01/02/99"), _T("/-"), wxTOKEN_DEFAULT, 3 },
119 { _T("01-02/99"), _T("/-"), wxTOKEN_RET_DELIMS, 3 },
120 };
121
122 // helper function returning the string showing the index for which the test
123 // fails in the diagnostic message
124 static std::string Nth(size_t n)
125 {
126 return std::string(wxString::Format(_T("for loop index %lu"),
127 (unsigned long)n).mb_str());
128 }
129
130 // ----------------------------------------------------------------------------
131 // the tests
132 // ----------------------------------------------------------------------------
133
134 void TokenizerTestCase::GetCount()
135 {
136 for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
137 {
138 const TokenizerTestData& ttd = gs_testData[n];
139
140 wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
141 CPPUNIT_ASSERT_EQUAL_MESSAGE( Nth(n), ttd.count, tkz.CountTokens() );
142
143 size_t count = 0;
144 while ( tkz.HasMoreTokens() )
145 {
146 tkz.GetNextToken();
147 count++;
148 }
149
150 CPPUNIT_ASSERT_EQUAL_MESSAGE( Nth(n), ttd.count, count );
151 }
152 }
153
154 // call this with the string to tokenize, delimeters to use and the expected
155 // positions (i.e. results of GetPosition()) after each GetNextToken() call,
156 // terminate positions with 0
157 static void
158 DoTestGetPosition(const wxChar *s, const wxChar *delims, int pos, ...)
159 {
160 wxStringTokenizer tkz(s, delims);
161
162 CPPUNIT_ASSERT_EQUAL( (size_t)0, tkz.GetPosition() );
163
164 va_list ap;
165 va_start(ap, pos);
166
167 for ( ;; )
168 {
169 if ( !pos )
170 {
171 CPPUNIT_ASSERT( !tkz.HasMoreTokens() );
172 break;
173 }
174
175 tkz.GetNextToken();
176
177 CPPUNIT_ASSERT_EQUAL( (size_t)pos, tkz.GetPosition() );
178
179 pos = va_arg(ap, int);
180 }
181
182 va_end(ap);
183 }
184
185 void TokenizerTestCase::GetPosition()
186 {
187 DoTestGetPosition(_T("foo"), _T("_"), 3, 0);
188 DoTestGetPosition(_T("foo_bar"), _T("_"), 4, 7, 0);
189 DoTestGetPosition(_T("foo_bar_"), _T("_"), 4, 8, 0);
190 }
191
192 // helper for GetString(): the parameters are the same as for DoTestGetPosition
193 // but it checks GetString() return value instead of GetPosition()
194 static void
195 DoTestGetString(const wxChar *s, const wxChar *delims, int pos, ...)
196 {
197 wxStringTokenizer tkz(s, delims);
198
199 CPPUNIT_ASSERT_EQUAL( wxString(s), tkz.GetString() );
200
201 va_list ap;
202 va_start(ap, pos);
203
204 for ( ;; )
205 {
206 if ( !pos )
207 {
208 CPPUNIT_ASSERT( tkz.GetString().empty() ) ;
209 break;
210 }
211
212 tkz.GetNextToken();
213
214 CPPUNIT_ASSERT_EQUAL( wxString(s + pos), tkz.GetString() );
215
216 pos = va_arg(ap, int);
217 }
218
219 va_end(ap);
220 }
221
222 void TokenizerTestCase::GetString()
223 {
224 DoTestGetString(_T("foo"), _T("_"), 3, 0);
225 DoTestGetString(_T("foo_bar"), _T("_"), 4, 7, 0);
226 DoTestGetString(_T("foo_bar_"), _T("_"), 4, 8, 0);
227 }
228
229 void TokenizerTestCase::LastDelimiter()
230 {
231 wxStringTokenizer tkz(_T("a+-b=c"), _T("+-="));
232
233 tkz.GetNextToken();
234 CPPUNIT_ASSERT_EQUAL( _T('+'), tkz.GetLastDelimiter() );
235
236 tkz.GetNextToken();
237 CPPUNIT_ASSERT_EQUAL( _T('-'), tkz.GetLastDelimiter() );
238
239 tkz.GetNextToken();
240 CPPUNIT_ASSERT_EQUAL( _T('='), tkz.GetLastDelimiter() );
241
242 tkz.GetNextToken();
243 CPPUNIT_ASSERT_EQUAL( _T('\0'), tkz.GetLastDelimiter() );
244 }
245
246 void TokenizerTestCase::StrtokCompat()
247 {
248 for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
249 {
250 const TokenizerTestData& ttd = gs_testData[n];
251 if ( ttd.mode != wxTOKEN_STRTOK )
252 continue;
253
254 #if wxUSE_UNICODE
255 wxWCharBuffer
256 #else
257 wxCharBuffer
258 #endif
259 buf(ttd.str);
260 wxChar *last;
261 wxChar *s = wxStrtok(buf.data(), ttd.delims, &last);
262
263 wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
264 while ( tkz.HasMoreTokens() )
265 {
266 CPPUNIT_ASSERT_EQUAL( wxString(s), tkz.GetNextToken() );
267 s = wxStrtok(NULL, ttd.delims, &last);
268 }
269 }
270 }
271
272