public void Tokenize_WithMemberString_TokensAreCorrect()
{
//ARRANGE
string source = "[Aaa].[Bbb].[Ccc].&[1]";
var expectedTokens = new List<Token>
{
new Token(TokenType.IdentifierExpression, "[Aaa]"),
new Token(TokenType.IdentifierSeparator, "."),
new Token(TokenType.IdentifierExpression, "[Bbb]"),
new Token(TokenType.IdentifierSeparator, "."),
new Token(TokenType.IdentifierExpression, "[Ccc]"),
new Token(TokenType.ValueSeparator, ".&"),
new Token(TokenType.IdentifierExpression, "[1]"),
new Token(TokenType.LastToken, ""),
};
//ACT
var tokens = _lexer.Tokenize(source).ToList();
//ASSERT
Assert.That(tokens.Count, Is.EqualTo(expectedTokens.Count));
for (int i = 0; i < expectedTokens.Count; i++)
Assert.That(tokens[i].ToString(), Is.EqualTo(expectedTokens[i].ToString()));
}