Index: src/compiler/preprocessor/Tokenizer.l |
diff --git a/src/compiler/preprocessor/Tokenizer.l b/src/compiler/preprocessor/Tokenizer.l |
index 42547993a54127d7b17f8979e144a77572efb21e..85e4e6d4ad9ce8f1a02ba1d4c8c2c2ab70a2c83d 100644 |
--- a/src/compiler/preprocessor/Tokenizer.l |
+++ b/src/compiler/preprocessor/Tokenizer.l |
@@ -267,9 +267,11 @@ FRACTIONAL_CONSTANT ({DIGIT}*"."{DIGIT}+)|({DIGIT}+".") |
namespace pp { |
-Tokenizer::Tokenizer(Diagnostics* diagnostics) |
- : mHandle(0), |
- mMaxTokenLength(256) |
+// TODO(alokp): Maximum token length should ideally be specified by |
+// the preprocessor client, i.e., the compiler. |
+const size_t Tokenizer::kMaxTokenLength = 256; |
+ |
+Tokenizer::Tokenizer(Diagnostics* diagnostics) : mHandle(0) |
{ |
mContext.diagnostics = diagnostics; |
} |
@@ -302,11 +304,11 @@ void Tokenizer::setLineNumber(int line) |
void Tokenizer::lex(Token* token) |
{ |
token->type = yylex(&token->text, &token->location, mHandle); |
- if (token->text.size() > mMaxTokenLength) |
+ if (token->text.size() > kMaxTokenLength) |
{ |
mContext.diagnostics->report(Diagnostics::TOKEN_TOO_LONG, |
token->location, token->text); |
- token->text.erase(mMaxTokenLength); |
+ token->text.erase(kMaxTokenLength); |
} |
token->flags = 0; |