Lucene.Net.Analysis.Sinks.TeeSinkTokenFilter.AddSinkTokenStream C# (CSharp) Метод

AddSinkTokenStream() публичный Метод

Adds a SinkTokenStream created by another TeeSinkTokenFilter to this one. The supplied stream will also receive all consumed tokens. This method can be used to pass tokens from two different tees to one sink.
public AddSinkTokenStream ( SinkTokenStream sink ) : void
sink SinkTokenStream
Результат void
        public void AddSinkTokenStream(SinkTokenStream sink)
        {
            // check that sink has correct factory
            if (!attributeFactory.Equals(sink.attributeFactory))
            {
                throw new System.ArgumentException("The supplied sink is not compatible to this tee");
            }
            // add eventually missing attribute impls to the existing sink
            for (var it = CloneAttributes().AttributeImplsIterator; it.MoveNext();)
            {
                sink.AddAttributeImpl(it.Current);
            }
            this.sinks.Add(new WeakReference<SinkTokenStream>(sink));
        }

Usage Example

Пример #1
0
        public virtual void TestMultipleSources()
        {
            TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new MockTokenizer(new StringReader(buffer1.ToString()), MockTokenizer.WHITESPACE, false));

            TeeSinkTokenFilter.SinkTokenStream dogDetector = tee1.NewSinkTokenStream(dogFilter);
            TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.NewSinkTokenStream(theFilter);
            tee1.Reset();
            TokenStream source1 = new CachingTokenFilter(tee1);

            tee1.AddAttribute <ICheckClearAttributesAttribute>();
            dogDetector.AddAttribute <ICheckClearAttributesAttribute>();
            theDetector.AddAttribute <ICheckClearAttributesAttribute>();

            TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new MockTokenizer(new StringReader(buffer2.ToString()), MockTokenizer.WHITESPACE, false));

            tee2.AddSinkTokenStream(dogDetector);
            tee2.AddSinkTokenStream(theDetector);
            TokenStream source2 = tee2;

            AssertTokenStreamContents(source1, tokens1);
            AssertTokenStreamContents(source2, tokens2);

            AssertTokenStreamContents(theDetector, new string[] { "The", "the", "The", "the" });
            AssertTokenStreamContents(dogDetector, new string[] { "Dogs", "Dogs" });

            source1.Reset();
            TokenStream lowerCasing = new LowerCaseFilter(TEST_VERSION_CURRENT, source1);

            string[] lowerCaseTokens = new string[tokens1.Length];
            for (int i = 0; i < tokens1.Length; i++)
            {
                lowerCaseTokens[i] = CultureInfo.InvariantCulture.TextInfo.ToLower(tokens1[i]);
            }
            AssertTokenStreamContents(lowerCasing, lowerCaseTokens);
        }