public virtual IDictionary<string, int> Load() {
IDictionary<string, int> tokens = new LinkedHashMap<string, int>();
int maxTokenType = -1;
string fullFile = GetImportedVocabFile();
AntlrTool tool = g.tool;
string vocabName = g.GetOptionString("tokenVocab");
try {
Regex tokenDefPattern = new Regex("([^\n]+?)[ \\t]*?=[ \\t]*?([0-9]+)");
string[] lines;
if (tool.grammarEncoding != null) {
lines = File.ReadAllLines(fullFile, Encoding.GetEncoding(tool.grammarEncoding));
}
else {
lines = File.ReadAllLines(fullFile);
}
for (int i = 0; i < lines.Length; i++)
{
string tokenDef = lines[i];
int lineNum = i + 1;
Match matcher = tokenDefPattern.Match(tokenDef);
if (matcher.Success) {
string tokenID = matcher.Groups[1].Value;
string tokenTypeS = matcher.Groups[2].Value;
int tokenType;
if (!int.TryParse(tokenTypeS, out tokenType))
{
tool.errMgr.ToolError(ErrorType.TOKENS_FILE_SYNTAX_ERROR,
vocabName + CodeGenerator.VOCAB_FILE_EXTENSION,
" bad token type: " + tokenTypeS,
lineNum);
tokenType = TokenTypes.Invalid;
}
tool.Log("grammar", "import " + tokenID + "=" + tokenType);
tokens[tokenID] = tokenType;
maxTokenType = Math.Max(maxTokenType, tokenType);
lineNum++;
}
else {
if (tokenDef.Length > 0) { // ignore blank lines
tool.errMgr.ToolError(ErrorType.TOKENS_FILE_SYNTAX_ERROR,
vocabName + CodeGenerator.VOCAB_FILE_EXTENSION,
" bad token def: " + tokenDef,
lineNum);
}
}
}
}
catch (FileNotFoundException) {
GrammarAST inTree = g.ast.GetOptionAST("tokenVocab");
string inTreeValue = inTree.Token.Text;
if (vocabName.Equals(inTreeValue)) {
tool.errMgr.GrammarError(ErrorType.CANNOT_FIND_TOKENS_FILE_REFD_IN_GRAMMAR,
g.fileName,
inTree.Token,
fullFile);
}
else { // must be from -D option on cmd-line not token in tree
tool.errMgr.ToolError(ErrorType.CANNOT_FIND_TOKENS_FILE_GIVEN_ON_CMDLINE,
fullFile,
g.name);
}
}
catch (Exception e) {
tool.errMgr.ToolError(ErrorType.ERROR_READING_TOKENS_FILE,
e,
fullFile,
e.Message);
}
return tokens;
}