Python pygments.token.Token.Text() Examples

The following are 30 code examples of pygments.token.Token.Text(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pygments.token.Token , or try the search function .
Example #1
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_multiple_mata_blocks(self):
        code = 'mata\nmata\nmata:\nend\nmata:'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.Text, 'm'),
            (Token.Text, 'a'),
            (Token.Text, 't'),
            (Token.Text, 'a'),
            (Token.Text, '\n'),
            (Token.Text, 'm'),
            (Token.Text, 'a'),
            (Token.Text, 't'),
            (Token.Text, 'a'),
            (Token.Text, ':'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n'),
            (Token.Mata.OpenError, 'mata:'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #2
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_line_continuation_ignored_after_inline_comment(self):
        """
        ```stata
        // /// Line continuation ignored due to inline comment
        disp "Printed 3"
        ```
        """
        code = '// /// a\na'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, 'a'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #3
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_line_continuation_comment_after_star_comment(self):
        """
        ```stata
        * ///
        disp "Not printed. Line continuation applies"
        ```
        """
        code = '* ///\na\na'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Comment.Single, '*'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Special, '///\n'),
            (Token.Comment.Special, 'a'),
            (Token.Comment.Special, '\n'),
            (Token.Text, 'a'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #4
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_ignored_multiline_after_inline_comment(self):
        """
        ```stata
        // /* Also ignored due to inline comment
        disp "Printed 2"
        ```
        """
        code = '// /* a\na'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '*'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, 'a'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #5
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_ignored_multiline_after_inline_comment_after_star_comment(self):
        """
        ```stata
        * // /* Ignored due to inline comment
        disp "Printed 1"
        ```
        """
        code = '* // /* a\na'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Comment.Single, '*'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '*'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, 'a'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #6
Source File: syntax.py    From rich with MIT License 6 votes vote down vote up
def _get_number_styles(self, console: Console) -> Tuple[Style, Style, Style]:
        """Get background, number, and highlight styles for line numbers."""
        background_style = Style(bgcolor=self._pygments_style_class.background_color)
        if console.color_system in ("256", "truecolor"):
            number_style = Style.chain(
                background_style,
                self._get_theme_style(Token.Text),
                Style(color=self._get_line_numbers_color()),
            )
            highlight_number_style = Style.chain(
                background_style,
                self._get_theme_style(Token.Text),
                Style(bold=True, color=self._get_line_numbers_color(0.9)),
            )
        else:
            number_style = highlight_number_style = Style()
        return background_style, number_style, highlight_number_style 
Example #7
Source File: disassembly.py    From gxf with MIT License 6 votes vote down vote up
def filter(self, lexer, stream):

        prefix = False
        for ttype, value in stream:

            if prefix and ttype is Token.Name.Variable:
                if value in self.prefixes:
                    ttype = Token.Keyword.Type
                else:
                    ttype = Token.Name.Function

            elif ttype is Token.Name.Function and value in self.prefixes:
                prefix = True
                ttype = Token.Keyword.Type

            elif ttype is not Token.Text:
                prefix = False

            yield ttype, value 
Example #8
Source File: styles.py    From Computable with MIT License 6 votes vote down vote up
def get_colors(stylename):
    """Construct the keys to be used building the base stylesheet
    from a templatee."""
    style = get_style_by_name(stylename)
    fgcolor = style.style_for_token(Token.Text)['color'] or ''
    if len(fgcolor) in (3,6):
        # could be 'abcdef' or 'ace' hex, which needs '#' prefix
        try:
            int(fgcolor, 16)
        except TypeError:
            pass
        else:
            fgcolor = "#"+fgcolor

    return dict(
        bgcolor = style.background_color,
        select = style.highlight_color,
        fgcolor = fgcolor
    ) 
Example #9
Source File: test_shell.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_virtualenv(lexer_session):
    fragment = u'(env) [~/project]$ foo -h\n'
    tokens = [
        (Token.Text, u''),
        (Token.Generic.Prompt.VirtualEnv, u'(env)'),
        (Token.Text, u''),
        (Token.Text, u' '),
        (Token.Text, u''),
        (Token.Generic.Prompt, u'[~/project]$'),
        (Token.Text, u' '),
        (Token.Text, u'foo'),
        (Token.Text, u' '),
        (Token.Text, u'-h'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer_session.get_tokens(fragment)) == tokens 
Example #10
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_multiline_comment_across_empty_whitespace_lines(self):
        """
        ```stata
        di /*

        */ "hi"
        ```
        """
        code = 'a /*\n\n*/ a'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Comment.Multiline, '/*'),
            (Token.Comment.Multiline, '\n'),
            (Token.Comment.Multiline, '\n'),
            (Token.Comment.Multiline, '*/'),
            (Token.Text, ' '),
            (Token.Text, 'a'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #11
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test4(self):
        code = 'a ///\n/// a ///'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, '\n'),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, ' '),
            (Token.Comment.Special, 'a'),
            (Token.Comment.Special, ' '),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '\n')]
        assert tokens == expected 
Example #12
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_cap_chunk(self):
        code = 'cap {\n a\n}'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Text, 'c'),
            (Token.Text, 'a'),
            (Token.Text, 'p'),
            (Token.Text, ' '),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, ' '),
            (Token.TextBlock, 'a'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #13
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_cap_chunk_recursive(self):
        code = 'cap {\n{\n a\n}\n}'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Text, 'c'),
            (Token.Text, 'a'),
            (Token.Text, 'p'),
            (Token.Text, ' '),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, ' '),
            (Token.TextBlock, 'a'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #14
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_cap_chunk_with_inner_multiline_comment(self):
        code = 'cap {\n/*{*/\n a\n}'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Text, 'c'),
            (Token.Text, 'a'),
            (Token.Text, 'p'),
            (Token.Text, ' '),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, ' '),
            (Token.TextBlock, 'a'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #15
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test5(self):
        code = 'a ///\n// a ///'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, '\n'),
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, 'a'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #16
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_paren_chunk_recursive(self):
        code = 'mata\n(\n(\n a\n)\n)\nend'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.TextBlockParen, '('),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, '('),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, ' '),
            (Token.TextBlockParen, 'a'),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, ')'),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, ')'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #17
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test5(self):
        code = 'mata:\na ///\n// a ///\n\nend'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Mata.OpenError, 'mata:'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, '\n'),
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, 'a'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '/'),
            (Token.Text, '\n'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #18
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test4(self):
        code = 'mata:\na ///\n/// a ///\nend\nend'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Mata.OpenError, 'mata:'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, '\n'),
            (Token.Comment.Special, '///'),
            (Token.Comment.Special, ' '),
            (Token.Comment.Special, 'a'),
            (Token.Comment.Special, ' '),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '/'),
            (Token.Comment.Special, '\n'),
            (Token.Text, 'e'),
            (Token.Text, 'n'),
            (Token.Text, 'd'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #19
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_cap_chunk_with_inner_multiline_comment(self):
        code = 'mata\n(\n/*(*/\n a\n)\nend'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.TextBlockParen, '('),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, ' '),
            (Token.TextBlockParen, 'a'),
            (Token.TextBlockParen, '\n'),
            (Token.TextBlockParen, ')'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #20
Source File: styles.py    From pySINDy with MIT License 6 votes vote down vote up
def get_colors(stylename):
    """Construct the keys to be used building the base stylesheet
    from a templatee."""
    style = get_style_by_name(stylename)
    fgcolor = style.style_for_token(Token.Text)['color'] or ''
    if len(fgcolor) in (3,6):
        # could be 'abcdef' or 'ace' hex, which needs '#' prefix
        try:
            int(fgcolor, 16)
        except TypeError:
            pass
        else:
            fgcolor = "#"+fgcolor

    return dict(
        bgcolor = style.background_color,
        select = style.highlight_color,
        fgcolor = fgcolor
    ) 
Example #21
Source File: color_styles.py    From azure-cli-shell with MIT License 6 votes vote down vote up
def color_mapping(curr_completion, completion, prompt, command, subcommand,
                  param, text, line, example, toolbar):

    return style_from_dict({
        # Completion colors
        Token.Menu.Completions.Completion.Current: curr_completion,
        Token.Menu.Completions.Completion: completion,
        Token.Menu.Completions.ProgressButton: 'bg:#b78991',
        Token.Menu.Completions.ProgressBar: 'bg:#ffc0cb',

        Token.Az: prompt,
        Token.Prompt.Arg: prompt,

        # Pretty Words
        Token.Keyword: command,
        Token.Keyword.Declaration: subcommand,
        Token.Name.Class: param,
        Token.Text: text,

        Token.Line: line,
        Token.Number: example,
        # toolbar
        Token.Operator: toolbar,
        Token.Toolbar: toolbar
    }) 
Example #22
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_ignored_multiline_after_inline_comment(self):
        """
        ```stata
        mata
        // /* Ignored due to inline comment
        printf("Printed 2\n")
        end
        ```
        """
        code = 'mata\n// /*a\na\nend'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.Comment.Single, '//'),
            (Token.Comment.Single, ' '),
            (Token.Comment.Single, '/'),
            (Token.Comment.Single, '*'),
            (Token.Comment.Single, 'a'),
            (Token.Text, '\n'),
            (Token.Text, 'a'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #23
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_multiline_comment(self):
        """
        ```stata
        mata
        /* This will be a multi-line comment
        printf("Not printed\n")
        */
        end
        ```
        """
        code = 'mata\n/*a\na\n*/\nend'
        tokens = CodeManager(code).tokens_fp_all
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.Comment.Multiline, '/*'),
            (Token.Comment.Multiline, 'a'),
            (Token.Comment.Multiline, '\n'),
            (Token.Comment.Multiline, 'a'),
            (Token.Comment.Multiline, '\n'),
            (Token.Comment.Multiline, '*/'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #24
Source File: test_shell.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_curly_no_escape_and_quotes(lexer_bash):
    fragment = u'echo "${a//["b"]/}"\n'
    tokens = [
        (Token.Name.Builtin, u'echo'),
        (Token.Text, u' '),
        (Token.Literal.String.Double, u'"'),
        (Token.String.Interpol, u'${'),
        (Token.Name.Variable, u'a'),
        (Token.Punctuation, u'//['),
        (Token.Literal.String.Double, u'"b"'),
        (Token.Punctuation, u']/'),
        (Token.String.Interpol, u'}'),
        (Token.Literal.String.Double, u'"'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer_bash.get_tokens(fragment)) == tokens 
Example #25
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_if_block_with_preceding_string(self):
        """ GH issue 139 """
        code = 'if "0" == "1" {'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Text, 'i'),
            (Token.Text, 'f'),
            (Token.Text, ' '),
            (Token.Text, '"'),
            (Token.Text, '0'),
            (Token.Text, '"'),
            (Token.Text, ' '),
            (Token.Text, '='),
            (Token.Text, '='),
            (Token.Text, ' '),
            (Token.Text, '"'),
            (Token.Text, '1'),
            (Token.Text, '"'),
            (Token.Text, ' '),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n')]
        assert tokens == expected 
Example #26
Source File: test_stata_lexer.py    From stata_kernel with GNU General Public License v3.0 6 votes vote down vote up
def test_cap_chunk_with_inner_line_comment(self):
        code = 'cap {\n*{\n a\n}'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Text, 'c'),
            (Token.Text, 'a'),
            (Token.Text, 'p'),
            (Token.Text, ' '),
            (Token.TextBlock, '{'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, ' '),
            (Token.TextBlock, 'a'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #27
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 5 votes vote down vote up
def test_newlines_in_semicolon_block_become_spaces(self):
        """
        This actually fails in mata
        ```stata
        #delimit ;
        mata;
        "
        a
        2
        b
        "
        ;
        end;
        ```
        """
        code = '#delimit ;\nmata;\n"\na\n2\nb\n"\n;\nend;'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, ' mata'),
            (Token.Text, '\n'),
            (Token.Text, ' '),
            (Token.Text, '"'),
            (Token.Text, ' '),
            (Token.Text, 'a'),
            (Token.Text, ' '),
            (Token.Text, '2'),
            (Token.Text, ' '),
            (Token.Text, 'b'),
            (Token.Text, ' '),
            (Token.Text, '"'),
            (Token.Text, ' '),
            (Token.Mata.Close, '\n end'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #28
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 5 votes vote down vote up
def test_if_block_not_matching_preceding_newline(self):
        code = 'mata\n1 if {\na\n}\nend'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.TextBlock, '1 if {'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, 'a'),
            (Token.TextBlock, '\n'),
            (Token.TextBlock, '}'),
            (Token.Mata.Close, '\nend'),
            (Token.Text, '\n')]
        assert tokens == expected 
Example #29
Source File: test_mata_lexer.py    From stata_kernel with GNU General Public License v3.0 5 votes vote down vote up
def test_if_block_with_preceding_string(self):
        """ GH issue 139 """
        code = 'mata\nif ("0" == "1") {'
        tokens = CodeManager(code).tokens_final
        expected = [
            (Token.Mata.Open, 'mata'),
            (Token.Text, '\n'),
            (Token.TextBlock, 'if ("0" == "1") {'),
            (Token.TextBlock, '\n')]
        assert tokens == expected 
Example #30
Source File: test_shell.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_powershell_session(lexer_powershell_session):
    fragment = u'PS C:\\> Get-ChildItem\n'
    tokens = [
        (Token.Name.Builtin, u''),
        (Token.Generic.Prompt, u'PS C:\\> '),
        (Token.Name.Builtin, u'Get-ChildItem'),
        (Token.Text, u'\n')
    ]
    assert list(lexer_powershell_session.get_tokens(fragment)) == tokens