IronRuby.Tests.Tests.Heredoc1 C# (CSharp) Method

Heredoc1() private method

private Heredoc1 ( ) : void
return void
        private void Heredoc1() {
            AssertTokenizer t = new AssertTokenizer(this) { Verbatim = false };

            t.Load("<<LABEL\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<\"LABEL\"\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<'LABEL'\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<`LABEL`\nhello\nLABEL")
                [Tokens.ShellStringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<LABEL\nLABEL123\nLABEL")
                [Tokens.StringBegin]["LABEL123\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("puts <<L1, 1, <<L2, 2\naaa\nL1\nbbb\nL2\n3")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]["aaa\n"][Tokens.StringEnd]
                [Tokens.Comma][1][Tokens.Comma]
                [Tokens.StringBegin]["bbb\n"][Tokens.StringEnd]
                [Tokens.Comma][2]
                [Tokens.NewLine]
                [3].EOF();

            t.Load("puts <<A,1\\\n...\nA\n,2")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]
                ["...\n"]
                [Tokens.StringEnd].State(LexicalState.EXPR_END)
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [1].State(LexicalState.EXPR_END)    // \\n is a whitespace
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [2].State(LexicalState.EXPR_END).
            EOF();

            t.Load("puts <<A,(f\\\n...\nA\n())")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]
                ["...\n"]
                [Tokens.StringEnd].State(LexicalState.EXPR_END)
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [Tokens.LeftExprParenthesis].State(LexicalState.EXPR_BEG)  
                [Tokens.Identifier, "f"].State(LexicalState.EXPR_ARG)      // \\n is a whitespace, WhitespaceSeen == true
                [Tokens.LeftArgParenthesis].State(LexicalState.EXPR_BEG)
                [Tokens.RightParenthesis]
                [Tokens.RightParenthesis].
            EOF();
            t.Expect();

            AssertTokenizer vt = new AssertTokenizer(this) { Verbatim = true };

            vt.Load("puts <<A,1\\\n...\nA\n,2")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin]
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [1].State(LexicalState.EXPR_END)    
                [Tokens.Whitespace]                             // \\n 
                [Tokens.StringContent, "...\n"]
                [Tokens.VerbatimHeredocEnd].State(LexicalState.EXPR_END) // A label
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [2].State(LexicalState.EXPR_END).
            EOF();

            vt.Load("puts <<A,(f\\\n...\nA\n())")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin]
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [Tokens.LeftExprParenthesis].State(LexicalState.EXPR_BEG)
                [Tokens.Identifier, "f"].State(LexicalState.EXPR_ARG)   
                [Tokens.Whitespace]
                ["...\n"]
                [Tokens.VerbatimHeredocEnd].State(LexicalState.EXPR_ARG)       
                [Tokens.LeftArgParenthesis].State(LexicalState.EXPR_BEG)
                [Tokens.RightParenthesis]
                [Tokens.RightParenthesis].
            EOF();

            vt.Load(@"puts <<A,<<B
1
2#{f <<C,<<D}3#{g <<E}4
c
C
d#{f <<F}d
f
F
D
e
E
5
A
b
b
B")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<A
                [Tokens.Comma]
                [Tokens.VerbatimHeredocBegin] // <<B
                [Tokens.EndOfLine]
                ["1\n2"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "f"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<C
                [Tokens.Comma]
                [Tokens.VerbatimHeredocBegin] // <<D
                [Tokens.StringEmbeddedCodeEnd]
                ["3"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "g"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<E
                [Tokens.StringEmbeddedCodeEnd]
                ["4\n"]
                ["c\n"]
                [Tokens.VerbatimHeredocEnd]   // C
                ["d"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "f"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<F
                [Tokens.StringEmbeddedCodeEnd]
                ["d\n"]
                ["f\n"]
                [Tokens.VerbatimHeredocEnd]   // F
                [Tokens.VerbatimHeredocEnd]   // D
                ["e\n"]
                [Tokens.VerbatimHeredocEnd]   // E
                ["5\n"]
                [Tokens.VerbatimHeredocEnd]   // A
                ["b\nb\n"]
                [Tokens.VerbatimHeredocEnd]   // B
            .EOF();

            t.Expect();

            // index:                                111111111122 2222 222 2333 333 3 3
            //                             0123456789012345678901 2345 678 9012 345 6 7
            TestCategorizer(Engine, null, "puts <<L1, 1, <<L2, 2\naaa\nL1\nbbb\nL2\r\n3", 
            // column:                     1234567890123456789012 1234 123 1234 123 4 1 
            // line:                       1111111111111111111111 2222 333 4444 555 5 6
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(9, 1, 10)), TokenCategory.StringLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(9, 1, 10), new SourceLocation(10, 1, 11)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(10, 1, 11), new SourceLocation(11, 1, 12)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // 1
                new TokenInfo(new SourceSpan(new SourceLocation(11, 1, 12), new SourceLocation(12, 1, 13)), TokenCategory.NumericLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(12, 1, 13), new SourceLocation(13, 1, 14)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(13, 1, 14), new SourceLocation(14, 1, 15)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L2
                new TokenInfo(new SourceSpan(new SourceLocation(14, 1, 15), new SourceLocation(18, 1, 19)), TokenCategory.StringLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(18, 1, 19), new SourceLocation(19, 1, 20)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(19, 1, 20), new SourceLocation(20, 1, 21)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // 2
                new TokenInfo(new SourceSpan(new SourceLocation(20, 1, 21), new SourceLocation(21, 1, 22)), TokenCategory.NumericLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(21, 1, 22), new SourceLocation(22, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(22, 2, 1), new SourceLocation(26, 3, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L1\n
                new TokenInfo(new SourceSpan(new SourceLocation(26, 3, 1), new SourceLocation(29, 4, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // bbb\n
                new TokenInfo(new SourceSpan(new SourceLocation(29, 4, 1), new SourceLocation(33, 5, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L2\r\n
                new TokenInfo(new SourceSpan(new SourceLocation(33, 5, 1), new SourceLocation(37, 6, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // 3
                new TokenInfo(new SourceSpan(new SourceLocation(37, 6, 1), new SourceLocation(38, 6, 2)), TokenCategory.NumericLiteral, TokenTriggers.None)
            );

            // index:                                 1111
            //                             0123456789 0123
            TestCategorizer(Engine, null, "puts <<L1\naaa", 
            // column:                     1234567890 1234
            // line:                       1111111111 2222
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(9, 1, 10)), TokenCategory.StringLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(9, 1, 10), new SourceLocation(10, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(10, 2, 1), new SourceLocation(13, 2, 4)), TokenCategory.StringLiteral, TokenTriggers.None),
                // <missing heredoc end>
                new TokenInfo(new SourceSpan(new SourceLocation(13, 2, 4), new SourceLocation(13, 2, 4)), TokenCategory.StringLiteral, TokenTriggers.None)
            );

            // index:                                1 1111 11111
            //                             01234567890 1234 56789
            TestCategorizer(Engine, null, "puts <<-L1\naaa\n  L1",
            // column:                     12345678901 1234 12345
            // line:                       11111111111 2222 33333
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<-L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(10, 1, 11)), TokenCategory.StringLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(10, 1, 11), new SourceLocation(11, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(11, 2, 1), new SourceLocation(15, 3, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L1
                new TokenInfo(new SourceSpan(new SourceLocation(17, 3, 3), new SourceLocation(19, 3, 5)), TokenCategory.StringLiteral, TokenTriggers.None)
            );
        }
Tests