IronRuby.Tests.Tests.TokenCategorizer2 C# (CSharp) Method

TokenCategorizer2() public method

public TokenCategorizer2 ( ) : void
return void
        public void TokenCategorizer2() {
            // initial position:
            TestCategorizer(Engine, null, "1\n2", new SourceLocation(10, 2, 5),
                // 1
                new TokenInfo(new SourceSpan(new SourceLocation(10, 2, 5), new SourceLocation(11, 2, 6)), TokenCategory.NumericLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(11, 2, 6), new SourceLocation(12, 3, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // 2
                new TokenInfo(new SourceSpan(new SourceLocation(12, 3, 1), new SourceLocation(13, 3, 2)), TokenCategory.NumericLiteral, TokenTriggers.None)
            );

            // regexes:
            TestCategorizer(Engine, null, "/x/",
                // /
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.StringLiteral, TokenTriggers.None),
                // hello
                new TokenInfo(new SourceSpan(new SourceLocation(1, 1, 2), new SourceLocation(2, 1, 3)), TokenCategory.StringLiteral, TokenTriggers.None),
                // /
                new TokenInfo(new SourceSpan(new SourceLocation(2, 1, 3), new SourceLocation(3, 1, 4)), TokenCategory.StringLiteral, TokenTriggers.None)
            );

            // whitespace:
            TestCategorizer(Engine, null, "print 'foo' #bar", 
                // print
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(5, 1, 6)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(6, 1, 7)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // '
                new TokenInfo(new SourceSpan(new SourceLocation(6, 1, 7), new SourceLocation(7, 1, 8)), TokenCategory.StringLiteral, TokenTriggers.None),
                // foo
                new TokenInfo(new SourceSpan(new SourceLocation(7, 1, 8), new SourceLocation(10, 1, 11)), TokenCategory.StringLiteral, TokenTriggers.None),
                // '
                new TokenInfo(new SourceSpan(new SourceLocation(10, 1, 11), new SourceLocation(11, 1, 12)), TokenCategory.StringLiteral, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(11, 1, 12), new SourceLocation(12, 1, 13)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // #bar
                new TokenInfo(new SourceSpan(new SourceLocation(12, 1, 13), new SourceLocation(16, 1, 17)), TokenCategory.LineComment, TokenTriggers.None)
            );

            // eolns:
            TestCategorizer(Engine, null, "a\r\nb",
                // a
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.Identifier, TokenTriggers.None),   
                // \r\n
                new TokenInfo(new SourceSpan(new SourceLocation(1, 1, 2), new SourceLocation(3, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),   
                // b
                new TokenInfo(new SourceSpan(new SourceLocation(3, 2, 1), new SourceLocation(4, 2, 2)), TokenCategory.Identifier, TokenTriggers.None)  
            );

            //                                       11111111 11222222222233 333
            //                             012345678901234567 89012345678901 234
            TestCategorizer(Engine, null, "canvas.Event { |x|\nputs 'string'\n}", 
            //                             1234567890123456789 12345678901234 12
            //                                      1111111111          11111   
                // line 1                    
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(6, 1, 7)), TokenCategory.Identifier, TokenTriggers.None),          // canvas
                new TokenInfo(new SourceSpan(new SourceLocation(6, 1, 7), new SourceLocation(7, 1, 8)), TokenCategory.Delimiter, TokenTriggers.MemberSelect),   // .
                new TokenInfo(new SourceSpan(new SourceLocation(7, 1, 8), new SourceLocation(12, 1, 13)), TokenCategory.Identifier, TokenTriggers.None),        // Event
                new TokenInfo(new SourceSpan(new SourceLocation(12, 1, 13), new SourceLocation(13, 1, 14)), TokenCategory.WhiteSpace, TokenTriggers.None),      //  
                new TokenInfo(new SourceSpan(new SourceLocation(13, 1, 14), new SourceLocation(14, 1, 15)), TokenCategory.Grouping, TokenTriggers.MatchBraces), // {
                new TokenInfo(new SourceSpan(new SourceLocation(14, 1, 15), new SourceLocation(15, 1, 16)), TokenCategory.WhiteSpace, TokenTriggers.None),      //  
                new TokenInfo(new SourceSpan(new SourceLocation(15, 1, 16), new SourceLocation(16, 1, 17)), TokenCategory.Grouping, TokenTriggers.MatchBraces), // |
                new TokenInfo(new SourceSpan(new SourceLocation(16, 1, 17), new SourceLocation(17, 1, 18)), TokenCategory.Identifier, TokenTriggers.None),      // x
                new TokenInfo(new SourceSpan(new SourceLocation(17, 1, 18), new SourceLocation(18, 1, 19)), TokenCategory.Grouping, TokenTriggers.MatchBraces), // |
                new TokenInfo(new SourceSpan(new SourceLocation(18, 1, 19), new SourceLocation(19, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),       // \n
                // line 2
                new TokenInfo(new SourceSpan(new SourceLocation(19, 2, 1), new SourceLocation(23, 2, 5)), TokenCategory.Identifier, TokenTriggers.None),        // puts
                new TokenInfo(new SourceSpan(new SourceLocation(23, 2, 5), new SourceLocation(24, 2, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),        //  
                new TokenInfo(new SourceSpan(new SourceLocation(24, 2, 6), new SourceLocation(25, 2, 7)), TokenCategory.StringLiteral, TokenTriggers.None),     // '
                new TokenInfo(new SourceSpan(new SourceLocation(25, 2, 7), new SourceLocation(31, 2, 13)), TokenCategory.StringLiteral, TokenTriggers.None),    // string
                new TokenInfo(new SourceSpan(new SourceLocation(31, 2, 13), new SourceLocation(32, 2, 14)), TokenCategory.StringLiteral, TokenTriggers.None),   // '
                new TokenInfo(new SourceSpan(new SourceLocation(32, 2, 14), new SourceLocation(33, 3, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),       // \n (significant)
                // line 3
                new TokenInfo(new SourceSpan(new SourceLocation(33, 3, 1), new SourceLocation(34, 3, 2)), TokenCategory.Grouping, TokenTriggers.MatchBraces)    // }
            );

            // state transfer: strings //

            object state = null;
            state = TestCategorizer(Engine, state, "\"a\n", 
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.StringLiteral, TokenTriggers.None), // "
                new TokenInfo(new SourceSpan(new SourceLocation(1, 1, 2), new SourceLocation(3, 2, 1)), TokenCategory.StringLiteral, TokenTriggers.None)  // a\n
            );

            state = TestCategorizer(Engine, state, "b\n",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(2, 2, 1)), TokenCategory.StringLiteral, TokenTriggers.None)  // b\n
            );

            state = TestCategorizer(Engine, state, "\"",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.StringLiteral, TokenTriggers.None)  // ""
            );

            // state transfer: multi-line comments //

            state = null;
            state = TestCategorizer(Engine, state, "=begin\n",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(7, 2, 1)), TokenCategory.Comment, TokenTriggers.None)
            );

            for (int i = 0; i < 3; i++) {
                state = TestCategorizer(Engine, state, "foo\n",
                    new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 2, 1)), TokenCategory.Comment, TokenTriggers.None)
                );
            }

            state = TestCategorizer(Engine, state, "a\nb\n",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 3, 1)), TokenCategory.Comment, TokenTriggers.None)
            );

            state = TestCategorizer(Engine, state, "=end",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Comment, TokenTriggers.None)
            );

            Assert(((Tokenizer.State)state).CurrentSequence == TokenSequenceState.None);

            // state transfer: nested strings //

            state = null;
            state = TestCategorizer(Engine, state, "\"a",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.StringLiteral, TokenTriggers.None),
                new TokenInfo(new SourceSpan(new SourceLocation(1, 1, 2), new SourceLocation(2, 1, 3)), TokenCategory.StringLiteral, TokenTriggers.None)
            );

            state = TestCategorizer(Engine, state, "#{",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(2, 1, 3)), TokenCategory.Grouping, TokenTriggers.MatchBraces)
            );

            state = TestCategorizer(Engine, state, "1",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.NumericLiteral, TokenTriggers.None)
            );

            state = TestCategorizer(Engine, state, "}",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.Grouping, TokenTriggers.MatchBraces)
            );

            state = TestCategorizer(Engine, state, "\"",
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(1, 1, 2)), TokenCategory.StringLiteral, TokenTriggers.None)
            );

            Assert(((Tokenizer.State)state).CurrentSequence == TokenSequenceState.None);
        }
Tests