Python pygments.token.Token.Punctuation() Examples

The following are 30 code examples of pygments.token.Token.Punctuation(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pygments.token.Token , or try the search function .
Example #1
Source File: test_qbasiclexer.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_keywords_with_dollar(lexer):
    fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
    expected = [
        (Token.Keyword.Declaration, u'DIM'),
        (Token.Text.Whitespace, u' '),
        (Token.Name.Variable.Global, u'x'),
        (Token.Text, u'\n'),
        (Token.Name.Variable.Global, u'x'),
        (Token.Text.Whitespace, u' '),
        (Token.Operator, u'='),
        (Token.Text.Whitespace, u' '),
        (Token.Keyword.Reserved, u'RIGHT$'),
        (Token.Punctuation, u'('),
        (Token.Literal.String.Double, u'"abc"'),
        (Token.Punctuation, u','),
        (Token.Text.Whitespace, u' '),
        (Token.Literal.Number.Integer.Long, u'1'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #2
Source File: test_lexer.py    From jsx-lexer with MIT License 6 votes vote down vote up
def test_lexing_short_syntax_fragments(self):
        """
        Testing <></> React Short Syntax JSXFragment
        see: `https://facebook.github.io/jsx/`
        JSXFragment :
            <> JSXChildrenopt </>
        """
        lexer = lexers.get_lexer_by_name("jsx")
        tokens = lexer.get_tokens("""<></>""")
        self.assertEqual(
            self.__filter_tokens(tokens),
            [
                (Token.Punctuation, "<"),
                (Token.Punctuation, ">"),
                (Token.Punctuation, "<"),
                (Token.Punctuation, "/"),
                (Token.Punctuation, ">"),
            ],
        ) 
Example #3
Source File: test_gdscript.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_inner_class(lexer):
    fragment = "class InnerClass:\n\tvar a = 5"
    tokens = [
        (Token.Keyword, "class"),
        (Token.Text, " "),
        (Token.Name, "InnerClass"),
        (Token.Punctuation, ":"),
        (Token.Text, "\n"),
        (Token.Text, "\t"),
        (Token.Keyword, "var"),
        (Token.Text, " "),
        (Token.Name, "a"),
        (Token.Text, " "),
        (Token.Operator, "="),
        (Token.Text, " "),
        (Token.Literal.Number.Integer, "5"),
        (Token.Text, "\n"),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #4
Source File: test_gdscript.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_export_array(lexer):
    fragment = "export (Array, AudioStream) var streams"
    tokens = [
        (Token.Keyword, "export"),
        (Token.Text, " "),
        (Token.Punctuation, "("),
        (Token.Name.Builtin.Type, "Array"),
        (Token.Punctuation, ","),
        (Token.Text, " "),
        (Token.Name, "AudioStream"),
        (Token.Punctuation, ")"),
        (Token.Text, " "),
        (Token.Keyword, "var"),
        (Token.Text, " "),
        (Token.Name, "streams"),
        (Token.Text, "\n"),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #5
Source File: test_gdscript.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_signal(lexer):
    fragment = "signal sig (arg1, arg2)"
    tokens = [
        (Token.Keyword, "signal"),
        (Token.Text, " "),
        (Token.Name, "sig"),
        (Token.Text, " "),
        (Token.Punctuation, "("),
        (Token.Name, "arg1"),
        (Token.Punctuation, ","),
        (Token.Text, " "),
        (Token.Name, "arg2"),
        (Token.Punctuation, ")"),
        (Token.Text, "\n"),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #6
Source File: test_gdscript.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_simple_function(lexer):
    fragment = "func abc(arg):\n\tprint(\"Hello, World!\")"
    tokens = [
        (Token.Keyword, "func"),
        (Token.Text, " "),
        (Token.Name, "abc"),
        (Token.Punctuation, "("),
        (Token.Name, "arg"),
        (Token.Punctuation, ")"),
        (Token.Punctuation, ":"),
        (Token.Text, "\n"),
        (Token.Text, "\t"),
        (Token.Name.Builtin, "print"),
        (Token.Punctuation, "("),
        (Token.Literal.String.Double, "\""),
        (Token.Literal.String.Double, "Hello, World!"),
        (Token.Literal.String.Double, "\""),
        (Token.Punctuation, ")"),
        (Token.Text, "\n"),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #7
Source File: test_praat.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_function_call(lexer):
    fragment = u'selected("Sound", i+(a*b))\n'
    tokens = [
        (Token.Name.Function, u'selected'),
        (Token.Punctuation, u'('),
        (Token.Literal.String, u'"'),
        (Token.Literal.String, u'Sound'),
        (Token.Literal.String, u'"'),
        (Token.Punctuation, u','),
        (Token.Text, u' '),
        (Token.Text, u'i'),
        (Token.Operator, u'+'),
        (Token.Text, u'('),
        (Token.Text, u'a'),
        (Token.Operator, u'*'),
        (Token.Text, u'b'),
        (Token.Text, u')'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #8
Source File: test_objectiveclexer.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_literal_number_nested_expression(lexer):
    fragment = u'@(1+(2+3));\n'
    expected = [
        (Token.Literal, u'@('),
        (Token.Literal.Number.Integer, u'1'),
        (Token.Operator, u'+'),
        (Token.Punctuation, u'('),
        (Token.Literal.Number.Integer, u'2'),
        (Token.Operator, u'+'),
        (Token.Literal.Number.Integer, u'3'),
        (Token.Punctuation, u')'),
        (Token.Literal, u')'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #9
Source File: test_grammar_notation.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_peg_modified_strings(lexer_peg):
    # see for example:
    # - http://textx.github.io/Arpeggio/
    # - https://nim-lang.org/docs/pegs.html
    # - https://github.com/erikrose/parsimonious
    fragment = u'~"regex" i"insensitive" "multimod"ilx ("not modified")\n'
    tokens = [
        # can't handle parsimonious-style regex while ~ is a cut operator
        (Token.Operator, u'~'),
        (Token.String.Double, u'"regex"'),
        (Token.Text, u' '),
        (Token.String.Double, u'i"insensitive"'),
        (Token.Text, u' '),
        (Token.String.Double, u'"multimod"ilx'),
        (Token.Text, u' '),
        (Token.Punctuation, u'('),
        (Token.String.Double, u'"not modified"'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer_peg.get_tokens(fragment)) == tokens 
Example #10
Source File: test_php.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_string_escaping_run(lexer):
    fragment = '<?php $x="{\\""; ?>\n'
    tokens = [
        (Token.Comment.Preproc, '<?php'),
        (Token.Text, ' '),
        (Token.Name.Variable, '$x'),
        (Token.Operator, '='),
        (Token.Literal.String.Double, '"'),
        (Token.Literal.String.Double, '{'),
        (Token.Literal.String.Escape, '\\"'),
        (Token.Literal.String.Double, '"'),
        (Token.Punctuation, ';'),
        (Token.Text, ' '),
        (Token.Comment.Preproc, '?>'),
        (Token.Other, '\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #11
Source File: test_matlab.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_operator_multiple_space(lexer):
    """
    Test that multiple spaces with an operator doesn't get formatted to a string.
    """
    fragment = 'x  > 100;\n'
    tokens = [
        (Token.Name, 'x'),
        (Token.Text, ' '),
        (Token.Text, ' '),
        (Token.Operator, '>'),
        (Token.Text, ' '),
        (Token.Literal.Number.Integer, '100'),
        (Token.Punctuation, ';'),
        (Token.Text, '\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #12
Source File: test_matlab.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_multiple_spaces_variable_assignment(lexer):
    """
    Test that multiple spaces with an equal sign doesn't get formatted to a string.
    """
    fragment = 'x  = 100;\n'
    tokens = [
        (Token.Name, 'x'),
        (Token.Text, ' '),
        (Token.Text, ' '),
        (Token.Punctuation, '='),
        (Token.Text, ' '),
        (Token.Literal.Number.Integer, '100'),
        (Token.Punctuation, ';'),
        (Token.Text, '\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #13
Source File: test_matlab.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_line_continuation(lexer):
    """
    Test that line continuation by ellipses does not produce generic
    output on the second line.
    """
    fragment = "set('T',300,...\n'P',101325);\n"
    tokens = [
        (Token.Name, 'set'),
        (Token.Punctuation, '('),
        (Token.Literal.String, "'"),
        (Token.Literal.String, "T'"),
        (Token.Punctuation, ','),
        (Token.Literal.Number.Integer, '300'),
        (Token.Punctuation, ','),
        (Token.Keyword, '...'),
        (Token.Text, '\n'),
        (Token.Literal.String, "'"),
        (Token.Literal.String, "P'"),
        (Token.Punctuation, ','),
        (Token.Literal.Number.Integer, '101325'),
        (Token.Punctuation, ')'),
        (Token.Punctuation, ';'),
        (Token.Text, '\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #14
Source File: test_matlab.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_single_line(lexer):
    """
    Test that a single line with strings, a method, and numbers is parsed correctly.
    """
    fragment = "set('T',300,'P',101325);\n"
    tokens = [
        (Token.Name, 'set'),
        (Token.Punctuation, '('),
        (Token.Literal.String, "'"),
        (Token.Literal.String, "T'"),
        (Token.Punctuation, ','),
        (Token.Literal.Number.Integer, '300'),
        (Token.Punctuation, ','),
        (Token.Literal.String, "'"),
        (Token.Literal.String, "P'"),
        (Token.Punctuation, ','),
        (Token.Literal.Number.Integer, '101325'),
        (Token.Punctuation, ')'),
        (Token.Punctuation, ';'),
        (Token.Text, '\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #15
Source File: test_ruby.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_escaped_bracestring(lexer):
    fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
    tokens = [
        (Token.Name, u'str'),
        (Token.Operator, u'.'),
        (Token.Name, u'gsub'),
        (Token.Punctuation, u'('),
        (Token.Literal.String.Regex, u'%r{'),
        (Token.Literal.String.Regex, u'\\\\'),
        (Token.Literal.String.Regex, u'\\\\'),
        (Token.Literal.String.Regex, u'}'),
        (Token.Punctuation, u','),
        (Token.Text, u' '),
        (Token.Literal.String.Double, u'"'),
        (Token.Literal.String.Double, u'/'),
        (Token.Literal.String.Double, u'"'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #16
Source File: test_python.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_walrus_operator(lexer3):
    """
    Tests that ':=' is recognized as an Operator
    """
    fragment = u'if (a := 2) > 4:'
    tokens = [
        (Token.Keyword, 'if'),
        (Token.Text, ' '),
        (Token.Punctuation, '('),
        (Token.Name, 'a'),
        (Token.Text, ' '),
        (Token.Operator, ':='),
        (Token.Text, ' '),
        (Token.Literal.Number.Integer, '2'),
        (Token.Punctuation, ')'),
        (Token.Text, ' '),
        (Token.Operator, '>'),
        (Token.Text, ' '),
        (Token.Literal.Number.Integer, '4'),
        (Token.Punctuation, ':'),
        (Token.Text, '\n'),
    ]
    assert list(lexer3.get_tokens(fragment)) == tokens 
Example #17
Source File: test_shell.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_curly_no_escape_and_quotes(lexer_bash):
    fragment = u'echo "${a//["b"]/}"\n'
    tokens = [
        (Token.Name.Builtin, u'echo'),
        (Token.Text, u' '),
        (Token.Literal.String.Double, u'"'),
        (Token.String.Interpol, u'${'),
        (Token.Name.Variable, u'a'),
        (Token.Punctuation, u'//['),
        (Token.Literal.String.Double, u'"b"'),
        (Token.Punctuation, u']/'),
        (Token.String.Interpol, u'}'),
        (Token.Literal.String.Double, u'"'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer_bash.get_tokens(fragment)) == tokens 
Example #18
Source File: test_ezhil.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_gcd_expr(lexer):
    fragment = u'1^3+(5-5)*gcd(a,b)\n'
    tokens = [
        (Token.Number.Integer, u'1'),
        (Token.Operator, u'^'),
        (Token.Literal.Number.Integer, u'3'),
        (Token.Operator, u'+'),
        (Token.Punctuation, u'('),
        (Token.Literal.Number.Integer, u'5'),
        (Token.Operator, u'-'),
        (Token.Literal.Number.Integer, u'5'),
        (Token.Punctuation, u')'),
        (Token.Operator, u'*'),
        (Token.Name, u'gcd'),
        (Token.Punctuation, u'('),
        (Token.Name, u'a'),
        (Token.Operator, u','),
        (Token.Name, u'b'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #19
Source File: test_clexer.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_label_followed_by_statement(lexer):
    fragment = u'''\
    int main()
    {
    foo:return 0;
      goto foo;
    }
    '''
    tokens = [
        (Token.Keyword.Type, u'int'),
        (Token.Text, u' '),
        (Token.Name.Function, u'main'),
        (Token.Punctuation, u'('),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
        (Token.Punctuation, u'{'),
        (Token.Text, u'\n'),
        (Token.Name.Label, u'foo'),
        (Token.Punctuation, u':'),
        (Token.Keyword, u'return'),
        (Token.Text, u' '),
        (Token.Literal.Number.Integer, u'0'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
        (Token.Text, u'  '),
        (Token.Keyword, u'goto'),
        (Token.Text, u' '),
        (Token.Name, u'foo'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens 
Example #20
Source File: test_lexer.py    From jsx-lexer with MIT License 5 votes vote down vote up
def test_lexing_object_attribute(self):
        lexer = lexers.get_lexer_by_name("jsx")
        tokens = lexer.get_tokens(
            """
            <div style={{ color: 'red' }} />
        """
        )

        self.assertEqual(
            self.__filter_tokens(tokens),
            [
                (Token.Punctuation, "<"),
                (Token.Name.Tag, "div"),
                (Token.Name.Attribute, "style"),
                (Token.Operator, "="),
                (Token.Punctuation, "{"),
                (Token.Punctuation, "{"),
                (Token.Name.Other, "color"),
                (Token.Operator, ":"),
                (Token.Literal.String.Single, "'red'"),
                (Token.Punctuation, "}"),
                (Token.Punctuation, "}"),
                (Token.Punctuation, "/"),
                (Token.Punctuation, ">"),
            ],
        ) 
Example #21
Source File: test_bibtex.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_preamble(lexer):
    data = u'@PREAMBLE{"% some LaTeX code here"}'
    tokens = [
        (Token.Name.Class, u'@PREAMBLE'),
        (Token.Punctuation, u'{'),
        (Token.String, u'"'),
        (Token.String, u'% some LaTeX code here'),
        (Token.String, u'"'),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(data)) == tokens 
Example #22
Source File: test_bibtex.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_mismatched_brace(lexer):
    data = '@PREAMBLE(""}'
    tokens = [
        (Token.Name.Class, u'@PREAMBLE'),
        (Token.Punctuation, u'('),
        (Token.String, u'"'),
        (Token.String, u'"'),
        (Token.Error, u'}'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(data)) == tokens 
Example #23
Source File: test_julia.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_unicode(lexer):
    """
    Test that unicode character, √, in an expression is recognized
    """
    fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
    tokens = [
        (Token.Name, u's'),
        (Token.Text, u' '),
        (Token.Operator, u'='),
        (Token.Text, u' '),
        (Token.Operator, u'\u221a'),
        (Token.Punctuation, u'('),
        (Token.Punctuation, u'('),
        (Token.Literal.Number.Integer, u'1'),
        (Token.Operator, u'/'),
        (Token.Name, u'n'),
        (Token.Punctuation, u')'),
        (Token.Text, u' '),
        (Token.Operator, u'*'),
        (Token.Text, u' '),
        (Token.Name, u'sum'),
        (Token.Punctuation, u'('),
        (Token.Name, u'count'),
        (Token.Text, u' '),
        (Token.Operator, u'.^'),
        (Token.Text, u' '),
        (Token.Literal.Number.Integer, u'2'),
        (Token.Punctuation, u')'),
        (Token.Text, u' '),
        (Token.Operator, u'-'),
        (Token.Text, u' '),
        (Token.Name, u'mu'),
        (Token.Text, u' '),
        (Token.Operator, u'.^'),
        (Token.Literal.Number.Integer, u'2'),
        (Token.Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #24
Source File: test_objectiveclexer.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_literal_number_int(lexer):
    fragment = u'@(1);\n'
    expected = [
        (Token.Literal, u'@('),
        (Token.Literal.Number.Integer, u'1'),
        (Token.Literal, u')'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #25
Source File: test_objectiveclexer.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_literal_number_expression(lexer):
    fragment = u'@(1+2);\n'
    expected = [
        (Token.Literal, u'@('),
        (Token.Literal.Number.Integer, u'1'),
        (Token.Operator, u'+'),
        (Token.Literal.Number.Integer, u'2'),
        (Token.Literal, u')'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #26
Source File: test_yang.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_namespace_2(lexer):
    """
    namespace-prefix `yang` should be explicitly highlighted
    """
    fragment = u'type yang:counter64;\n'
    tokens = [
        (Token.Keyword, u'type'),
        (Token.Text.Whitespace, u' '),
        (Token.Name.Namespace, u'yang'),
        (Token.Punctuation, u':'),
        (Token.Name.Variable, u'counter64'),
        (Token.Punctuation, u';'),
        (Token.Text.Whitespace, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #27
Source File: test_objectiveclexer.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_literal_number_bool_expression(lexer):
    fragment = u'@(YES);\n'
    expected = [
        (Token.Literal, u'@('),
        (Token.Name.Builtin, u'YES'),
        (Token.Literal, u')'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #28
Source File: test_objectiveclexer.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_module_import(lexer):
    fragment = u'@import ModuleA;\n'
    expected = [
        (Token.Keyword, u'@import'),
        (Token.Text, u' '),
        (Token.Name, u'ModuleA'),
        (Token.Punctuation, u';'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == expected 
Example #29
Source File: test_yang.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_namespace_1(lexer):
    """
    Namespace `urn:test:std:yang` should not be explicitly highlighted
    """
    fragment = u'namespace urn:test:std:yang;\n'
    tokens = [
        (Token.Keyword, u'namespace'),
        (Token.Text.Whitespace, u' '),
        (Token.Name.Variable, u'urn:test:std:yang'),
        (Token.Punctuation, u';'),
        (Token.Text.Whitespace, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #30
Source File: test_praat.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_broken_unquoted_string(lexer):
    fragment = u'printline string\n... \'interpolated\' string\n'
    tokens = [
        (Token.Keyword, u'printline'),
        (Token.Text, u' '),
        (Token.Literal.String, u'string'),
        (Token.Text, u'\n'),
        (Token.Punctuation, u'...'),
        (Token.Text, u' '),
        (Token.Literal.String.Interpol, u"'interpolated'"),
        (Token.Text, u' '),
        (Token.Literal.String, u'string'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens