public virtual void TestMatching()
{
SlowSynonymMap map = new SlowSynonymMap();
bool orig = false;
bool merge = true;
map.Add(Strings("a b"), Tokens("ab"), orig, merge);
map.Add(Strings("a c"), Tokens("ac"), orig, merge);
map.Add(Strings("a"), Tokens("aa"), orig, merge);
map.Add(Strings("b"), Tokens("bb"), orig, merge);
map.Add(Strings("z x c v"), Tokens("zxcv"), orig, merge);
map.Add(Strings("x c"), Tokens("xc"), orig, merge);
AssertTokenizesTo(map, "$", new string[] { "$" });
AssertTokenizesTo(map, "a", new string[] { "aa" });
AssertTokenizesTo(map, "a $", new string[] { "aa", "$" });
AssertTokenizesTo(map, "$ a", new string[] { "$", "aa" });
AssertTokenizesTo(map, "a a", new string[] { "aa", "aa" });
AssertTokenizesTo(map, "b", new string[] { "bb" });
AssertTokenizesTo(map, "z x c v", new string[] { "zxcv" });
AssertTokenizesTo(map, "z x c $", new string[] { "z", "xc", "$" });
// repeats
map.Add(Strings("a b"), Tokens("ab"), orig, merge);
map.Add(Strings("a b"), Tokens("ab"), orig, merge);
// FIXME: the below test intended to be { "ab" }
AssertTokenizesTo(map, "a b", new string[] { "ab", "ab", "ab" });
// check for lack of recursion
map.Add(Strings("zoo"), Tokens("zoo"), orig, merge);
AssertTokenizesTo(map, "zoo zoo $ zoo", new string[] { "zoo", "zoo", "$", "zoo" });
map.Add(Strings("zoo"), Tokens("zoo zoo"), orig, merge);
// FIXME: the below test intended to be { "zoo", "zoo", "zoo", "zoo", "$", "zoo", "zoo" }
// maybe this was just a typo in the old test????
AssertTokenizesTo(map, "zoo zoo $ zoo", new string[] { "zoo", "zoo", "zoo", "zoo", "zoo", "zoo", "$", "zoo", "zoo", "zoo" });
}