1 | // Boost tokenizer examples -------------------------------------------------// |
---|
2 | |
---|
3 | // © Copyright John R. Bandela 2001. |
---|
4 | |
---|
5 | // Distributed under the Boost Software License, Version 1.0. (See |
---|
6 | // accompanying file LICENSE_1_0.txt or copy at |
---|
7 | // http://www.boost.org/LICENSE_1_0.txt) |
---|
8 | |
---|
9 | // See http://www.boost.org for updates, documentation, and revision history. |
---|
10 | |
---|
11 | #include <iostream> |
---|
12 | #include <iterator> |
---|
13 | #include <string> |
---|
14 | #include <boost/tokenizer.hpp> |
---|
15 | #include <boost/array.hpp> |
---|
16 | |
---|
17 | #include <boost/test/minimal.hpp> |
---|
18 | |
---|
19 | int test_main( int argc, char* argv[] ) |
---|
20 | { |
---|
21 | using namespace std; |
---|
22 | using namespace boost; |
---|
23 | |
---|
24 | // Use tokenizer |
---|
25 | { |
---|
26 | const string test_string = ";;Hello|world||-foo--bar;yow;baz|"; |
---|
27 | string answer[] = { "Hello", "world", "foo", "bar", "yow", "baz" }; |
---|
28 | typedef tokenizer<char_separator<char> > Tok; |
---|
29 | char_separator<char> sep("-;|"); |
---|
30 | Tok t(test_string, sep); |
---|
31 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
32 | } |
---|
33 | { |
---|
34 | const string test_string = ";;Hello|world||-foo--bar;yow;baz|"; |
---|
35 | string answer[] = { "", "", "Hello", "|", "world", "|", "", "|", "", |
---|
36 | "foo", "", "bar", "yow", "baz", "|", "" }; |
---|
37 | typedef tokenizer<char_separator<char> > Tok; |
---|
38 | char_separator<char> sep("-;", "|", boost::keep_empty_tokens); |
---|
39 | Tok t(test_string, sep); |
---|
40 | BOOST_REQUIRE(equal(t.begin(), t.end(), answer)); |
---|
41 | } |
---|
42 | { |
---|
43 | const string test_string = "This,,is, a.test.."; |
---|
44 | string answer[] = {"This","is","a","test"}; |
---|
45 | typedef tokenizer<> Tok; |
---|
46 | Tok t(test_string); |
---|
47 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
48 | } |
---|
49 | |
---|
50 | { |
---|
51 | const string test_string = "Field 1,\"embedded,comma\",quote \\\", escape \\\\"; |
---|
52 | string answer[] = {"Field 1","embedded,comma","quote \""," escape \\"}; |
---|
53 | typedef tokenizer<escaped_list_separator<char> > Tok; |
---|
54 | Tok t(test_string); |
---|
55 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
56 | |
---|
57 | } |
---|
58 | |
---|
59 | { |
---|
60 | const string test_string = ",1,;2\\\";3\\;,4,5^\\,\'6,7\';"; |
---|
61 | string answer[] = {"","1","","2\"","3;","4","5\\","6,7",""}; |
---|
62 | typedef tokenizer<escaped_list_separator<char> > Tok; |
---|
63 | escaped_list_separator<char> sep("\\^",",;","\"\'"); |
---|
64 | Tok t(test_string,sep); |
---|
65 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
66 | |
---|
67 | } |
---|
68 | |
---|
69 | { |
---|
70 | const string test_string = "12252001"; |
---|
71 | string answer[] = {"12","25","2001"}; |
---|
72 | typedef tokenizer<offset_separator > Tok; |
---|
73 | boost::array<int,3> offsets = {{2,2,4}}; |
---|
74 | offset_separator func(offsets.begin(),offsets.end()); |
---|
75 | Tok t(test_string,func); |
---|
76 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
77 | |
---|
78 | } |
---|
79 | |
---|
80 | // Use token_iterator_generator |
---|
81 | { |
---|
82 | |
---|
83 | const string test_string = "This,,is, a.test.."; |
---|
84 | string answer[] = {"This","is","a","test"}; |
---|
85 | typedef token_iterator_generator<char_delimiters_separator<char> >::type Iter; |
---|
86 | Iter begin = make_token_iterator<string>(test_string.begin(), |
---|
87 | test_string.end(),char_delimiters_separator<char>()); |
---|
88 | Iter end; |
---|
89 | BOOST_REQUIRE(equal(begin,end,answer)); |
---|
90 | } |
---|
91 | |
---|
92 | { |
---|
93 | const string test_string = "Field 1,\"embedded,comma\",quote \\\", escape \\\\"; |
---|
94 | string answer[] = {"Field 1","embedded,comma","quote \""," escape \\"}; |
---|
95 | typedef token_iterator_generator<escaped_list_separator<char> >::type Iter; |
---|
96 | Iter begin = make_token_iterator<string>(test_string.begin(), |
---|
97 | test_string.end(),escaped_list_separator<char>()); |
---|
98 | Iter end; |
---|
99 | BOOST_REQUIRE(equal(begin,end,answer)); |
---|
100 | |
---|
101 | } |
---|
102 | |
---|
103 | { |
---|
104 | const string test_string = "12252001"; |
---|
105 | string answer[] = {"12","25","2001"}; |
---|
106 | typedef token_iterator_generator<offset_separator>::type Iter; |
---|
107 | boost::array<int,3> offsets = {{2,2,4}}; |
---|
108 | offset_separator func(offsets.begin(),offsets.end()); |
---|
109 | Iter begin = make_token_iterator<string>(test_string.begin(), |
---|
110 | test_string.end(),func); |
---|
111 | Iter end= make_token_iterator<string>(test_string.end(), |
---|
112 | test_string.end(),func); |
---|
113 | BOOST_REQUIRE(equal(begin,end,answer)); |
---|
114 | |
---|
115 | } |
---|
116 | |
---|
117 | // Test copying |
---|
118 | { |
---|
119 | const string test_string = "abcdef"; |
---|
120 | token_iterator_generator<offset_separator>::type beg, end, other; |
---|
121 | boost::array<int,3> ar = {{1,2,3}}; |
---|
122 | offset_separator f(ar.begin(),ar.end()); |
---|
123 | beg = make_token_iterator<string>(test_string.begin(),test_string.end(),f); |
---|
124 | |
---|
125 | ++beg; |
---|
126 | other = beg; |
---|
127 | ++other; |
---|
128 | |
---|
129 | BOOST_REQUIRE(*beg=="bc"); |
---|
130 | BOOST_REQUIRE(*other=="def"); |
---|
131 | |
---|
132 | other = make_token_iterator<string>(test_string.begin(), |
---|
133 | test_string.end(),f); |
---|
134 | |
---|
135 | BOOST_REQUIRE(*other=="a"); |
---|
136 | } |
---|
137 | |
---|
138 | // Test non-default constructed char_delimiters_separator |
---|
139 | { |
---|
140 | const string test_string = "how,are you, doing"; |
---|
141 | string answer[] = {"how",",","are you",","," doing"}; |
---|
142 | tokenizer<> t(test_string,char_delimiters_separator<char>(true,",","")); |
---|
143 | BOOST_REQUIRE(equal(t.begin(),t.end(),answer)); |
---|
144 | } |
---|
145 | |
---|
146 | return 0; |
---|
147 | } |
---|
148 | |
---|