public virtual void TestBigramTokenizer()
{
SlowSynonymMap synMap;
// prepare bi-gram tokenizer factory
IDictionary <string, string> args = new Dictionary <string, string>();
args[AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM] = "4.4";
args["minGramSize"] = "2";
args["maxGramSize"] = "2";
TokenizerFactory tf = new NGramTokenizerFactory(args);
// (ab)->(bc)->(cd)->[ef][fg][gh]
IList <string> rules = new List <string>();
rules.Add("abcd=>efgh");
synMap = new SlowSynonymMap(true);
SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, tf);
assertEquals(1, synMap.Submap.size());
assertEquals(1, GetSubSynonymMap(synMap, "ab").Submap.size());
assertEquals(1, GetSubSynonymMap(GetSubSynonymMap(synMap, "ab"), "bc").Submap.size());
AssertTokIncludes(GetSubSynonymMap(GetSubSynonymMap(synMap, "ab"), "bc"), "cd", "ef");
AssertTokIncludes(GetSubSynonymMap(GetSubSynonymMap(synMap, "ab"), "bc"), "cd", "fg");
AssertTokIncludes(GetSubSynonymMap(GetSubSynonymMap(synMap, "ab"), "bc"), "cd", "gh");
}