Python google.protobuf.text_format.Tokenizer() Examples

The following are 30 code examples for showing how to use google.protobuf.text_format.Tokenizer(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module google.protobuf.text_format , or try the search function .

Example 1
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0 0 1.2'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(True, tokenizer.TryConsumeInteger())
    self.assertEqual(False, tokenizer.TryConsumeInteger())
    with self.assertRaises(text_format.ParseError):
      tokenizer.ConsumeInteger()
    self.assertEqual(1.2, tokenizer.ConsumeFloat())
    self.assertTrue(tokenizer.AtEnd()) 
Example 2
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 3
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeAndCheckTrailingComment(self):
    text = 'some_number: 4  # some comment'  # trailing comment on the same line
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)

    self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
    self.assertEqual(tokenizer.token, ':')
    tokenizer.NextToken()
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)
    self.assertEqual(4, tokenizer.ConsumeInteger())
    self.assertFalse(tokenizer.AtEnd())

    self.assertEqual((True, '# some comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 4
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd()) 
Example 5
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 6
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd()) 
Example 7
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 8
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def testConsumeAndCheckTrailingComment(self):
    text = 'some_number: 4  # some comment'  # trailing comment on the same line
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)

    self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
    self.assertEqual(tokenizer.token, ':')
    tokenizer.NextToken()
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)
    self.assertEqual(4, tokenizer.ConsumeInteger())
    self.assertFalse(tokenizer.AtEnd())

    self.assertEqual((True, '# some comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 9
Project: go2mapillary   Author: enricofer   File: text_format_test.py    License: GNU General Public License v3.0 6 votes vote down vote up
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd()) 
Example 10
Project: go2mapillary   Author: enricofer   File: text_format_test.py    License: GNU General Public License v3.0 6 votes vote down vote up
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 11
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0 0 1.2'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(True, tokenizer.TryConsumeInteger())
    self.assertEqual(False, tokenizer.TryConsumeInteger())
    with self.assertRaises(text_format.ParseError):
      tokenizer.ConsumeInteger()
    self.assertEqual(1.2, tokenizer.ConsumeFloat())
    self.assertTrue(tokenizer.AtEnd()) 
Example 12
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 13
def testConsumeAndCheckTrailingComment(self):
    text = 'some_number: 4  # some comment'  # trailing comment on the same line
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)

    self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
    self.assertEqual(tokenizer.token, ':')
    tokenizer.NextToken()
    self.assertRaises(text_format.ParseError,
                      tokenizer.ConsumeCommentOrTrailingComment)
    self.assertEqual(4, tokenizer.ConsumeInteger())
    self.assertFalse(tokenizer.AtEnd())

    self.assertEqual((True, '# some comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 14
Project: keras-lambda   Author: sunilmallya   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeAbstractIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(-1, tokenizer.ConsumeInteger())

    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())

    self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertEqual(0, tokenizer.ConsumeInteger())
    self.assertTrue(tokenizer.AtEnd()) 
Example 15
Project: keras-lambda   Author: sunilmallya   File: text_format_test.py    License: MIT License 6 votes vote down vote up
def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) 
Example 16
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint64, tokenizer)
    self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt32, tokenizer)
    self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt64, tokenizer)
    self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 -0 0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd()) 
Example 17
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeBool(self):
    text = 'not-a-bool'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool) 
Example 18
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testSkipComment(self):
    tokenizer = text_format.Tokenizer('# some comment'.splitlines())
    self.assertTrue(tokenizer.AtEnd())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment) 
Example 19
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeTwoComments(self):
    text = '# some comment\n# another comment'
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertEqual('# some comment', tokenizer.ConsumeComment())
    self.assertFalse(tokenizer.AtEnd())
    self.assertEqual('# another comment', tokenizer.ConsumeComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 20
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeTrailingComment(self):
    text = 'some_number: 4\n# some comment'
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)

    self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
    self.assertEqual(tokenizer.token, ':')
    tokenizer.NextToken()
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
    self.assertEqual(4, tokenizer.ConsumeInteger())
    self.assertFalse(tokenizer.AtEnd())

    self.assertEqual('# some comment', tokenizer.ConsumeComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 21
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeLineComment(self):
    tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
                                      skip_comments=False)
    self.assertFalse(tokenizer.AtEnd())
    self.assertEqual((False, '# some comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 22
Project: lambda-packs   Author: ryfeus   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeTwoLineComments(self):
    text = '# some comment\n# another comment'
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertEqual((False, '# some comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertFalse(tokenizer.AtEnd())
    self.assertEqual((False, '# another comment'),
                     tokenizer.ConsumeCommentOrTrailingComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 23
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint64, tokenizer)
    self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt32, tokenizer)
    self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt64, tokenizer)
    self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 -0 0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd()) 
Example 24
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeBool(self):
    text = 'not-a-bool'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool) 
Example 25
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testSkipComment(self):
    tokenizer = text_format.Tokenizer('# some comment'.splitlines())
    self.assertTrue(tokenizer.AtEnd())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment) 
Example 26
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeTwoComments(self):
    text = '# some comment\n# another comment'
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertEqual('# some comment', tokenizer.ConsumeComment())
    self.assertFalse(tokenizer.AtEnd())
    self.assertEqual('# another comment', tokenizer.ConsumeComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 27
Project: auto-alt-text-lambda-api   Author: abhisuri97   File: text_format_test.py    License: MIT License 5 votes vote down vote up
def testConsumeTrailingComment(self):
    text = 'some_number: 4\n# some comment'
    tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)

    self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
    self.assertEqual(tokenizer.token, ':')
    tokenizer.NextToken()
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
    self.assertEqual(4, tokenizer.ConsumeInteger())
    self.assertFalse(tokenizer.AtEnd())

    self.assertEqual('# some comment', tokenizer.ConsumeComment())
    self.assertTrue(tokenizer.AtEnd()) 
Example 28
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def testConsumeIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint64, tokenizer)
    self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeUint32, tokenizer)
    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt32, tokenizer)
    self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))

    self.assertRaises(text_format.ParseError,
                      text_format._ConsumeInt64, tokenizer)
    self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 -0 0 0'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
    self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
    self.assertTrue(tokenizer.AtEnd()) 
Example 29
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def testConsumeBool(self):
    text = 'not-a-bool'
    tokenizer = text_format.Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool) 
Example 30
Project: coremltools   Author: apple   File: text_format_test.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def testSkipComment(self):
    tokenizer = text_format.Tokenizer('# some comment'.splitlines())
    self.assertTrue(tokenizer.AtEnd())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)