blob: 611df2ffed72b2dd6f854f6aadf3db1bb998513e (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
|
/*
* MRustC - Rust Compiler
* - By John Hodge (Mutabah/thePowersGang)
*
* parse/tokenstream.cpp
* - TokenStream - Parser token source interface
*/
#include "tokenstream.hpp"
#include <common.hpp>
#include "parseerror.hpp"
const bool DEBUG_PRINT_TOKENS = false;
//const bool DEBUG_PRINT_TOKENS = true;
//#define DEBUG_PRINT_TOKENS debug_enabled("Lexer Tokens")
//#define FULL_TRACE
TokenStream::TokenStream():
m_cache_valid(false)
{
}
TokenStream::~TokenStream()
{
}
Token TokenStream::innerGetToken()
{
Token ret = this->realGetToken();
if( ret != TOK_EOF && ret.get_pos().filename == "" )
ret.set_pos( this->getPosition() );
//DEBUG("ret.get_pos() = " << ret.get_pos());
return ret;
}
Token TokenStream::getToken()
{
if( m_cache_valid )
{
#ifdef FULL_TRACE
DEBUG("<<< " << m_cache << " (cache)");
#endif
m_cache_valid = false;
return mv$(m_cache);
}
else if( m_lookahead.size() )
{
Token ret = mv$( m_lookahead.front().first );
m_hygiene = m_lookahead.front().second;
m_lookahead.erase(m_lookahead.begin());
#ifdef FULL_TRACE
DEBUG("<<< " << ret << " (lookahead)");
#endif
if( DEBUG_PRINT_TOKENS ) {
::std::cout << "getToken[" << typeid(*this).name() << "] - " << ret.get_pos() << "-" << ret << ::std::endl;
}
return ret;
}
else
{
Token ret = this->innerGetToken();
m_hygiene = this->realGetHygiene();
#ifdef FULL_TRACE
DEBUG("<<< " << ret << " (new)");
#endif
if( DEBUG_PRINT_TOKENS ) {
::std::cout << "getToken[" << typeid(*this).name() << "] - " << ret.get_pos() << "-" << ret << ::std::endl;
}
return ret;
}
}
void TokenStream::putback(Token tok)
{
if( m_cache_valid )
{
DEBUG("" << getPosition() << " - Double putback: " << tok << " but " << m_cache);
throw ParseError::BugCheck("Double putback");
}
else
{
#ifdef FULL_TRACE
DEBUG(">>> " << tok);
#endif
m_cache_valid = true;
m_cache = mv$(tok);
}
}
eTokenType TokenStream::lookahead(unsigned int i)
{
const unsigned int MAX_LOOKAHEAD = 3;
if( m_cache_valid )
{
if( i == 0 )
return m_cache.type();
i --;
}
if( i >= MAX_LOOKAHEAD )
throw ParseError::BugCheck("Excessive lookahead");
while( i >= m_lookahead.size() )
{
DEBUG("lookahead - read #" << m_lookahead.size());
auto tok = this->innerGetToken();
auto hygiene = this->realGetHygiene();
m_lookahead.push_back( ::std::make_pair(mv$(tok), mv$(hygiene)) );
}
DEBUG("lookahead(" << i << ") = " << m_lookahead[i]);
return m_lookahead[i].first.type();
}
Ident::Hygiene TokenStream::getHygiene() const
{
return m_hygiene;
}
ProtoSpan TokenStream::start_span() const
{
auto p = this->getPosition();
return ProtoSpan {
p.filename,
p.line, p.ofs
};
}
Span TokenStream::end_span(ProtoSpan ps) const
{
auto p = this->getPosition();
auto rv = Span( ps.filename, ps.start_line, ps.start_ofs, p.line, p.ofs );
rv.outer_span = this->outerSpan();
return rv;
}
Span TokenStream::point_span() const
{
Span rv = this->getPosition();
rv.outer_span = this->outerSpan();
return rv;
}
Ident TokenStream::get_ident(Token tok) const
{
if(tok.type() == TOK_IDENT) {
return Ident(getHygiene(), tok.str());
}
else if(tok.type() == TOK_LIFETIME) {
// TODO: Maybe only when it's explicitly asked for?
return Ident(getHygiene(), tok.str());
}
else if( tok.type() == TOK_INTERPOLATED_IDENT ) {
TODO(getPosition(), "get_ident from TOK_INTERPOLATED_IDENT");
}
else {
throw ParseError::Unexpected(*this, tok);
}
}
|