Lucene.Net.Analysis.Synonym.TestSlowSynonymFilter.TestPositionIncrementsWithOrig C# (CSharp) Method

TestPositionIncrementsWithOrig() private method

private TestPositionIncrementsWithOrig ( ) : void
return void
        public virtual void TestPositionIncrementsWithOrig()
        {
            SlowSynonymMap map = new SlowSynonymMap();

            bool orig = true;
            bool merge = true;

            // test that generated tokens start at the same offset as the original
            map.Add(Strings("a"), Tokens("aa"), orig, merge);
            AssertTokenizesTo(map, Tokens("a,5"), new string[] { "a", "aa" }, new int[] { 5, 0 });
            AssertTokenizesTo(map, Tokens("b,1 a,0"), new string[] { "b", "a", "aa" }, new int[] { 1, 0, 0 });

            // test that offset of first replacement is ignored (always takes the orig offset)
            map.Add(Strings("b"), Tokens("bb,100"), orig, merge);
            AssertTokenizesTo(map, Tokens("b,5"), new string[] { "b", "bb" }, new int[] { 5, 0 });
            AssertTokenizesTo(map, Tokens("c,1 b,0"), new string[] { "c", "b", "bb" }, new int[] { 1, 0, 0 });

            // test that subsequent tokens are adjusted accordingly
            map.Add(Strings("c"), Tokens("cc,100 c2,2"), orig, merge);
            AssertTokenizesTo(map, Tokens("c,5"), new string[] { "c", "cc", "c2" }, new int[] { 5, 0, 2 });
            AssertTokenizesTo(map, Tokens("d,1 c,0"), new string[] { "d", "c", "cc", "c2" }, new int[] { 1, 0, 0, 2 });
        }