Lucene.Net.Analysis.Synonym.TestSlowSynonymFilter.TestIncludeOrig C# (CSharp) Метод

TestIncludeOrig() приватный Метод

private TestIncludeOrig ( ) : void
Результат void
        public virtual void TestIncludeOrig()
        {
            SlowSynonymMap map = new SlowSynonymMap();

            bool orig = true;
            bool merge = true;
            map.Add(Strings("a b"), Tokens("ab"), orig, merge);
            map.Add(Strings("a c"), Tokens("ac"), orig, merge);
            map.Add(Strings("a"), Tokens("aa"), orig, merge);
            map.Add(Strings("b"), Tokens("bb"), orig, merge);
            map.Add(Strings("z x c v"), Tokens("zxcv"), orig, merge);
            map.Add(Strings("x c"), Tokens("xc"), orig, merge);

            AssertTokenizesTo(map, "$", new string[] { "$" }, new int[] { 1 });
            AssertTokenizesTo(map, "a", new string[] { "a", "aa" }, new int[] { 1, 0 });
            AssertTokenizesTo(map, "a", new string[] { "a", "aa" }, new int[] { 1, 0 });
            AssertTokenizesTo(map, "$ a", new string[] { "$", "a", "aa" }, new int[] { 1, 1, 0 });
            AssertTokenizesTo(map, "a $", new string[] { "a", "aa", "$" }, new int[] { 1, 0, 1 });
            AssertTokenizesTo(map, "$ a !", new string[] { "$", "a", "aa", "!" }, new int[] { 1, 1, 0, 1 });
            AssertTokenizesTo(map, "a a", new string[] { "a", "aa", "a", "aa" }, new int[] { 1, 0, 1, 0 });
            AssertTokenizesTo(map, "b", new string[] { "b", "bb" }, new int[] { 1, 0 });
            AssertTokenizesTo(map, "z x c v", new string[] { "z", "zxcv", "x", "c", "v" }, new int[] { 1, 0, 1, 1, 1 });
            AssertTokenizesTo(map, "z x c $", new string[] { "z", "x", "xc", "c", "$" }, new int[] { 1, 1, 0, 1, 1 });

            // check for lack of recursion
            map.Add(Strings("zoo zoo"), Tokens("zoo"), orig, merge);
            // CHECKME: I think the previous test (with 4 zoo's), was just a typo.
            AssertTokenizesTo(map, "zoo zoo $ zoo", new string[] { "zoo", "zoo", "zoo", "$", "zoo" }, new int[] { 1, 0, 1, 1, 1 });

            map.Add(Strings("zoo"), Tokens("zoo zoo"), orig, merge);
            AssertTokenizesTo(map, "zoo zoo $ zoo", new string[] { "zoo", "zoo", "zoo", "$", "zoo", "zoo", "zoo" }, new int[] { 1, 0, 1, 1, 1, 0, 1 });
        }