Imported Upstream version 1.72.0
[platform/upstream/boost.git] / libs / spirit / test / lex / regression_wide.cpp
1 //  Copyright (c) 2001-2010 Hartmut Kaiser
2 //  Copyright (c) 2010 Sergey "GooRoo" Olendarenko
3 //
4 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
5 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6
7 #include <boost/detail/lightweight_test.hpp>
8 #include <boost/config/warning_disable.hpp>
9
10 #include <cstdlib>
11 #include <iostream>
12 #include <locale>
13 #include <string>
14
15 #include <boost/spirit/include/lex_lexertl.hpp>
16 #include <boost/spirit/include/phoenix_function.hpp>
17 #include <boost/spirit/include/phoenix_operator.hpp>
18
19 namespace lex = boost::spirit::lex;
20 namespace phoenix = boost::phoenix;
21
22 typedef std::basic_string<wchar_t> wstring_type;
23
24 ///////////////////////////////////////////////////////////////////////////////
25 enum tokenids 
26 {
27     ID_IDENT = 1,
28     ID_CONSTANT,
29     ID_OPERATION,
30     ID_BRACKET
31 };
32
33 ///////////////////////////////////////////////////////////////////////////////
34 struct test_data
35 {
36     tokenids     tokenid;
37     wstring_type value;
38 };
39
40 // alpha+x1*(2.836-x2[i])
41 test_data data[] = 
42 {
43     { ID_IDENT, L"alpha" },
44     { ID_OPERATION, L"+" },
45     { ID_IDENT, L"x1" },
46     { ID_OPERATION, L"*" },
47     { ID_BRACKET, L"(" },
48     { ID_CONSTANT, L"2.836" },
49     { ID_OPERATION, L"-" },
50     { ID_IDENT, L"x2" },
51     { ID_BRACKET, L"[" },
52     { ID_IDENT, L"i" },
53     { ID_BRACKET, L"]" },
54     { ID_BRACKET, L")" }
55 };
56
57 ///////////////////////////////////////////////////////////////////////////////
58 struct test_impl
59 {
60     typedef void result_type;
61     template <typename TokenId, typename Value>
62     struct result { typedef void type; };
63
64     template <typename TokenId, typename Value>
65     void operator()(TokenId const& tokenid, Value const& val) const
66     {
67         BOOST_TEST(sequence_counter < sizeof(data)/sizeof(data[0]));
68         BOOST_TEST(data[sequence_counter].tokenid == tokenids(tokenid));
69         BOOST_TEST(0 == val.which());
70
71         typedef boost::iterator_range<wstring_type::iterator> iterator_range;
72         iterator_range r = boost::get<iterator_range>(val);
73         BOOST_TEST(data[sequence_counter].value == 
74             wstring_type(r.begin(), r.end()));
75
76         ++sequence_counter;
77     }
78
79     static std::size_t sequence_counter;
80 };
81 std::size_t test_impl::sequence_counter = 0;
82
83 phoenix::function<test_impl> const test = test_impl();
84
85 ///////////////////////////////////////////////////////////////////////////////
86 template <typename Lexer>
87 struct mega_tokens : lex::lexer<Lexer>
88 {
89     mega_tokens()
90         : identifier(L"[a-zA-Z_][a-zA-Z0-9_]*", ID_IDENT)
91         , constant  (L"[0-9]+(\\.[0-9]+)?", ID_CONSTANT)
92         , operation (L"[\\+\\-\\*/]", ID_OPERATION)
93         , bracket   (L"[\\(\\)\\[\\]]", ID_BRACKET)
94     {
95         using lex::_tokenid;
96         using lex::_val;
97
98         this->self
99             = operation  [ test(_tokenid, _val) ]
100             | identifier [ test(_tokenid, _val) ]
101             | constant   [ test(_tokenid, _val) ]
102             | bracket    [ test(_tokenid, _val) ]
103         ;
104     }
105
106     lex::token_def<wstring_type, wchar_t, tokenids> identifier;
107     lex::token_def<double, wchar_t, tokenids> constant;
108     lex::token_def<wchar_t, wchar_t, tokenids> operation;
109     lex::token_def<wchar_t, wchar_t, tokenids> bracket;
110 };
111
112 ///////////////////////////////////////////////////////////////////////////////
113 int main()
114 {
115     typedef wstring_type::iterator base_iterator;
116     typedef lex::lexertl::token<
117         base_iterator, boost::mpl::vector<wchar_t, wstring_type, double>
118       , boost::mpl::true_, tokenids
119     > token_type;
120     typedef lex::lexertl::actor_lexer<token_type> lexer_type;
121
122     mega_tokens<lexer_type> mega_lexer;
123
124     wstring_type exampleStr = L"alpha+x1*(2.836-x2[i])";
125     base_iterator first = exampleStr.begin();
126
127     BOOST_TEST(lex::tokenize(first, exampleStr.end(), mega_lexer));
128     BOOST_TEST(test_impl::sequence_counter == sizeof(data)/sizeof(data[0]));
129
130     return boost::report_errors();
131 }