Python ply.yacc.yacc() Examples

The following are 30 code examples of ply.yacc.yacc(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module ply.yacc , or try the search function .
Example #1
Source File: tagvalue.py    From tools-python with Apache License 2.0 6 votes vote down vote up
def parse(self, text):
        self.document = document.Document()
        self.error = False
        self.yacc.parse(text, lexer=self.lex)
        # FIXME: this state does not make sense
        self.builder.reset()
        validation_messages = []
        # Report extra errors if self.error is False otherwise there will be
        # redundent messages
        validation_messages = self.document.validate(validation_messages)
        if not self.error:
            if validation_messages:
                for msg in validation_messages:
                    self.logger.log(msg)
                self.error = True
        return self.document, self.error 
Example #2
Source File: parser.py    From stone with MIT License 6 votes vote down vote up
def parse(self, data, path=None):
        """
        Args:
            data (str): Raw specification text.
            path (Optional[str]): Path to specification on filesystem. Only
                used to tag tokens with the file they originated from.
        """
        assert not self.exhausted, 'Must call get_parser() to reset state.'
        self.path = path
        parsed_data = self.yacc.parse(data, lexer=self.lexer, debug=self.debug)
        # It generally makes sense for lexer errors to come first, because
        # those can be the root of parser errors. Also, since we only show one
        # error max right now, it's best to show the lexing one.
        for err_msg, lineno in self.lexer.errors[::-1]:
            self.errors.insert(0, (err_msg, lineno, self.path))
        parsed_data.extend(self.anony_defs)
        self.exhausted = True
        return parsed_data 
Example #3
Source File: syntactic_step_parser.py    From PyRATA with Apache License 2.0 6 votes vote down vote up
def __init__(self, **kwargs):
    if 'tokens' in kwargs.keys(): # MANDATORY
      self.tokens = kwargs['tokens']
    kwargs.pop('tokens', None)

    # debugging and logging http://www.dabeaz.com/ply/ply.html#ply_nn44 
    #self.parser = yacc.yacc(module=self, start='step', errorlog=yacc.NullLogger(), debug = True, debugfile='debug_file', **kwargs) 
    self.parser = yacc.yacc(module=self, start='step', errorlog=yacc.NullLogger(), debug = False, **kwargs) 

    # https://github.com/dabeaz/ply/blob/master/ply/yacc.py
    # debug yaccdebug   = True        # Debugging mode.  If set, yacc generates a
                               # a 'parser.out' file in the current directory

# """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
#  MAIN
# """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""


# example use: 
Example #4
Source File: qasmparser.py    From qiskit-terra with Apache License 2.0 6 votes vote down vote up
def __init__(self, filename):
        """Create the parser."""
        if filename is None:
            filename = ""
        self.lexer = QasmLexer(filename)
        self.tokens = self.lexer.tokens
        self.parse_dir = tempfile.mkdtemp(prefix='qiskit')
        self.precedence = (
            ('left', '+', '-'),
            ('left', '*', '/'),
            ('left', 'negative', 'positive'),
            ('right', '^'))
        # For yacc, also, write_tables = Bool and optimize = Bool
        self.parser = yacc.yacc(module=self, debug=False,
                                outputdir=self.parse_dir)
        self.qasm = None
        self.parse_deb = False
        self.global_symtab = {}                          # global symtab
        self.current_symtab = self.global_symtab         # top of symbol stack
        self.symbols = []                                # symbol stack
        self.external_functions = ['sin', 'cos', 'tan', 'exp', 'ln', 'sqrt',
                                   'acos', 'atan', 'asin'] 
Example #5
Source File: parser.py    From Ocean-Data-Map-Project with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, **kwargs):
        self.lexer = data.calculated_parser.lexer.Lexer()
        self.tokens = self.lexer.tokens

        # Sets the operator precedence for the parser. The unary minus is the
        # highest, followed by exponentiation, then multiplication/division and
        # addition/subtraction is last on the list.
        self.precedence = (
            ('left', 'PLUS', 'MINUS'),
            ('left', 'TIMES', 'DIVIDE'),
            ('left', 'POWER'),
            ('right', 'UMINUS'),
        )
        self.parser = yacc.yacc(module=self)
        self.data = None
        self.expression = None
        self.result = np.nan 
Example #6
Source File: _parser.py    From altair-transform with MIT License 6 votes vote down vote up
def __init__(self, **kw):
        self.debug = kw.get("debug", 0)
        try:
            modname = (
                os.path.split(os.path.splitext(__file__)[0])[1]
                + "_"
                + self.__class__.__name__
            )
        except ValueError:
            modname = "parser" + "_" + self.__class__.__name__
        self.debugfile = modname + ".dbg"
        self.tabmodule = modname + "_" + "parsetab"

        # Build the lexer and parser
        lex.lex(module=self, debug=self.debug)
        yacc.yacc(
            module=self,
            debug=self.debug,
            debugfile=self.debugfile,
            tabmodule=self.tabmodule,
        ) 
Example #7
Source File: _mof_compiler.py    From pywbem with GNU Lesser General Public License v2.1 6 votes vote down vote up
def __init__(self, msg, parser_token=None, cim_error=None):
        """
        Parameters:

          msg (:term:`string`):
            Message text describing the error.

          parser_token (lex.LexToken or yacc.YaccProduction):
            PLY lex or yacc parser token (that is, the ``p`` argument of a yacc
            parser function or the ``t`` argument of a lex parser function).
            This token is used to obtain the MOF source text and location
            information.

            `None` will result in no MOF source text and location information
            to be obtained.

          cim_error (:class:`~pywbem.CIMError`):
            CIM error returned by the CIM repository.
        """
        super(MOFRepositoryError, self).__init__(msg, parser_token)
        self._cim_error = cim_error 
Example #8
Source File: prettypfa.py    From hadrian with Apache License 2.0 6 votes vote down vote up
def ppfas(text, subs={}, **subs2):
    """Parse a string of several PrettyPFA expressions (delimited by semicolons) as a list of PFA abstract syntax trees.

    :type text: string
    :param text: PrettyPFA expressions (delimited by semicolons)
    :type subs: dict from substitution names to substitutions
    :param subs: replacement values as PFA titus.pfaast.Ast, PrettyPFA strings, or PFA Pythonized JSON
    :type subs2: dict from substitution names to substitutions
    :param subs2: added to ``subs`` (a more convenient way to pass them)
    :rtype: list of titus.pfaast.Expression
    :return: parsed expressions as PFA
    """

    subs2.update(subs)

    if not exprParser.initialized:
        try:
            import ply.lex as lex
            import ply.yacc as yacc
        except ImportError:
            raise ImportError("ply (used to parse the PrettyPFA) is not available on your system")
        else:
            exprParser.initialize(lex, yacc)

    return exprParser.parse(text, subs2) 
Example #9
Source File: WebIDL.py    From JavascriptSubtitlesOctopus with MIT License 6 votes vote down vote up
def __init__(self, outputdir='', lexer=None):
        Tokenizer.__init__(self, outputdir, lexer)
        self.parser = yacc.yacc(module=self,
                                outputdir=outputdir,
                                tabmodule='webidlyacc',
                                errorlog=yacc.NullLogger(),
                                picklefile='WebIDLGrammar.pkl')
        self._globalScope = IDLScope(BuiltinLocation("<Global Scope>"), None, None)
        self._installBuiltins(self._globalScope)
        self._productions = []

        self._filename = "<builtin>"
        self.lexer.input(Parser._builtins)
        self._filename = None

        self.parser.parse(lexer=self.lexer,tracking=True) 
Example #10
Source File: ksp_parser.py    From SublimeKSP with GNU General Public License v3.0 6 votes vote down vote up
def init(outputdir=None):
    outputdir = outputdir or os.path.dirname(__file__)  # os.getcwd()
    current_module = sys.modules[__name__]
    #print (outputdir, current_module)
    debug = 0
    optimize = 0
    lexer = lex.lex(optimize=0, debug=debug)

    # lexer.input('on init\n   declare shared parameter cutoff')
    # while True:
    #     tok = lexer.token()
    #     if tok is None:
    #         break
    #     print (tok)

    return yacc.yacc(method="LALR", optimize=optimize, debug=debug,
                     write_tables=0, module=current_module, start='script',
                     outputdir=outputdir, tabmodule='ksp_parser_tab') 
Example #11
Source File: _mof_compiler.py    From pywbem with GNU Lesser General Public License v2.1 5 votes vote down vote up
def _yacc(verbose=False, out_dir=None):
    """
    Return YACC parser object for the MOF compiler.

    Parameters:

      verbose (bool): Print messages while creating the parser object.

      out_dir (string): Path name of the directory in which the YACC table
        module source file (_mofparsetab.py) for the MOF compiler will be
        generated. If None, that file will not be generated.

    Returns:

      yacc.Parser: YACC parser object for the MOF compiler.
    """

    # The write_tables argument controls whether the YACC parser writes
    # the YACC table module file.
    write_tables = (out_dir is not None)

    # In yacc(), the 'debug' parameter controls the main error
    # messages to the 'errorlog' in addition to the debug messages
    # to the 'debuglog'. Because we want to see the error messages,
    # we enable debug but set the debuglog to the NullLogger.
    # To enable debug logging, set debuglog to some other logger
    # (ex. PlyLogger(sys.stdout) to generate log output.
    return yacc.yacc(optimize=_optimize,
                     tabmodule=_tabmodule,
                     outputdir=out_dir,
                     write_tables=write_tables,
                     debug=verbose,
                     debuglog=yacc.NullLogger(),
                     errorlog=yacc.PlyLogger(sys.stdout) if verbose
                     else yacc.NullLogger()) 
Example #12
Source File: dot.py    From CodeAtlasSublime with Eclipse Public License 1.0 5 votes vote down vote up
def build(self,**kargs):
            opt=dict(debug=0,write_tables=0)
            opt.update(**kargs)
            if _has_ply:
                self._parser = yacc.yacc(module=self,**opt) 
Example #13
Source File: _mof_compiler.py    From pywbem with GNU Lesser General Public License v2.1 5 votes vote down vote up
def __init__(self, msg, parser_token=None):
        """
        Parameters:

          msg (:term:`string`):
            Message text describing the error.

          parser_token (lex.LexToken or yacc.YaccProduction):
            PLY lex or yacc parser token (that is, the ``p`` argument of a yacc
            parser function or the ``t`` argument of a lex parser function).
            This token is used to obtain the MOF source text and location
            information.

            `None` will result in no MOF source text and location information
            to be obtained.
        """
        assert msg is not None
        self._msg = msg
        if parser_token is None:
            self.args = (None, None, None, None)
        else:
            assert isinstance(
                parser_token,
                (lex.LexToken, yacc.YaccProduction))
            mof_ = parser_token.lexer.parser.mof
            self.args = (parser_token.lexer.lineno,
                         _find_column(mof_, parser_token),
                         parser_token.lexer.parser.file,
                         _get_error_context(mof_, parser_token)) 
Example #14
Source File: _mof_compiler.py    From pywbem with GNU Lesser General Public License v2.1 5 votes vote down vote up
def _value(token):
    """
    Return the value of the (Lex or Yacc) token.
    """
    if isinstance(token, lex.LexToken):
        value = token.value
    else:
        assert isinstance(token, yacc.YaccProduction)
        value = token[1]  # always first item in grammar
    return value 
Example #15
Source File: parser.py    From p4-hlir with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.lexer = P4Lexer()
        self.lexer.build()
        self.tokens = self.lexer.tokens

        self.parser = yacc.yacc(module = self,
                                write_tables=0,
                                debug=False,
                                start = 'p4_objects')

        self.errors_cnt = 0
        self.current_pragmas = set() 
Example #16
Source File: parser.py    From calvin-base with Apache License 2.0 5 votes vote down vote up
def __init__(self, lexer=None):
        super(CalvinParser, self).__init__()
        if lexer:
            self.lexer = lexer
        else:
            self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=False)
        # Since the parse may be called from other scripts, we want to have control
        # over where parse tables (and parser.out log) will be put if the tables
        # have to be recreated
        this_file = os.path.realpath(__file__)
        containing_dir = os.path.dirname(this_file)
        self.parser = yacc.yacc(module=self, debug=True, optimize=False, outputdir=containing_dir) 
Example #17
Source File: parser.py    From nml with GNU General Public License v2.0 5 votes vote down vote up
def __init__(self, rebuild = False, debug = False):
        if debug:
            try:
                import os
                os.remove(os.path.normpath(os.path.join(os.path.dirname(__file__), "generated", "parsetab.py")))
            except FileNotFoundError:
                pass
        self.lexer = tokens.NMLLexer()
        self.lexer.build(rebuild or debug)
        self.tokens = self.lexer.tokens
        self.parser = yacc.yacc(module=self,
                                debug=debug, optimize=not (rebuild or debug),
                                write_tables=not debug,
                                tabmodule='nml.generated.parsetab') 
Example #18
Source File: parser.py    From lift with MIT License 5 votes vote down vote up
def __init__(self, filename):
        self.filename = filename
        self.tokens = [ r.upper() for r in self.reserved ] + [ a[2:] for a in dir(self) if a[:2] == 't_' and a[2:].isupper() ]
        self.lexer = lex.lex(module=self, debug=False)
        self.parser = yacc.yacc(
            module=self,
            debug=False,
            write_tables=False,
            picklefile=os.path.splitext(
                sys.modules[self.__class__.__module__].__file__
                )[0]+'.parsetab') 
Example #19
Source File: _mof_compiler.py    From pywbem with GNU Lesser General Public License v2.1 5 votes vote down vote up
def _lexpos(token):
    """
    Return the position in the (Lex or Yacc) token.
    """
    if isinstance(token, lex.LexToken):
        lexpos = token.lexpos
    else:
        assert isinstance(token, yacc.YaccProduction)
        lexpos = token.lexpos(1)  # always first item in grammar
    assert isinstance(lexpos, int)
    return lexpos 
Example #20
Source File: pyflwor_monkey_patch.py    From haros with MIT License 5 votes vote down vote up
def __new__(cls, pyflwor_dir, **kwargs):
        self = super(Parser, cls).__new__(cls, **kwargs)
        self.names = dict()
        self.yacc = yacc.yacc(module=self, debug=False,
                              optimize=True, write_tables=False,
                              outputdir=pyflwor_dir, **kwargs)
        return self.yacc


###############################################################################
# Entry Point
############################################################################### 
Example #21
Source File: parser.py    From iguana with Creative Commons Attribution Share Alike 4.0 International 5 votes vote down vote up
def compile(expression):
    global obj_to_query
    global sort_by
    global limit
    obj_to_query = ''
    sort_by = []
    limit = -1
    lexer = lex.lex(module=search.lexer)
    parser = yacc.yacc()
    return parser.parse(expression, lexer=lexer) 
Example #22
Source File: grammar.py    From pytezos with MIT License 5 votes vote down vote up
def __init__(self, debug=False, write_tables=False):
        self.lexer = SimpleMichelsonLexer()
        self.parser = yacc(
            module=self,
            debug=debug,
            write_tables=write_tables,
        ) 
Example #23
Source File: interpreter.py    From pikalang with MIT License 5 votes vote down vote up
def parse(self, source):
        lexer = lex.lex()
        parser = yacc.yacc(debug=False, write_tables=False)
        return parser.parse(source) 
Example #24
Source File: parser.py    From MARA_Framework with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.lexer = lex.lex(module=MyLexer(), optimize=1)
        self.parser = yacc.yacc(module=MyParser(), start='goal', optimize=1) 
Example #25
Source File: _parser.py    From altair-transform with MIT License 5 votes vote down vote up
def parse(self, expression):
        return yacc.parse(expression) 
Example #26
Source File: _parser.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.parser = yacc.yacc(module=self, debug=False, write_tables=False)
        self.circuit = Circuit()
        self.qregs: Dict[str, int] = {}
        self.cregs: Dict[str, int] = {}
        self.qelibinc = False
        self.lexer = QasmLexer()
        self.supported_format = False
        self.parsedQasm: Optional[Qasm] = None
        self.qubits: Dict[str, ops.Qid] = {}
        self.functions = {
            'sin': np.sin,
            'cos': np.cos,
            'tan': np.tan,
            'exp': np.exp,
            'ln': np.log,
            'sqrt': np.sqrt,
            'acos': np.arccos,
            'atan': np.arctan,
            'asin': np.arcsin
        }

        self.binary_operators = {
            '+': operator.add,
            '-': operator.sub,
            '*': operator.mul,
            '/': operator.truediv,
            '^': operator.pow
        } 
Example #27
Source File: parser.py    From Requester with MIT License 5 votes vote down vote up
def parse(self, input=None, lexer=None, **kwargs):
        lexer = lexer or self.default_lexer
        return self.yacc.parse(input=input, lexer=lexer, **kwargs) 
Example #28
Source File: parser.py    From Requester with MIT License 5 votes vote down vote up
def __init__(self, debug=False, **kwargs):
        self.default_lexer = GraphQLLexer()
        self.tokens = self.default_lexer.tokens
        kwargs['debug'] = debug
        self.yacc = yacc.yacc(module=self, **kwargs) 
Example #29
Source File: refine_parser.py    From nnabla with Apache License 2.0 5 votes vote down vote up
def __init__(self, graph):
        self.graph = graph
        self.tokens = Token.tokens
        self.parser = yacc.yacc(module=self,
                                start='rf_net',
                                debug=False) 
Example #30
Source File: prettypfa.py    From hadrian with Apache License 2.0 5 votes vote down vote up
def parse(self, text, subs):
        """Parse the given text, returning a PFA abstract syntax tree.

        :type text: string
        :param text: command line to parse
        :type subs: dict of substitutions
        :param subs: substitutions to apply to any strings in ``<<French quotes>>``
        :rtype: titus.pfaast.EngineConfig or titus.pfaast.Expression
        :return: parsed text as an abstract syntax tree
        """

        self.lexer.lineno = 1
        self.text = text
        self.subs = subs
        out = self.yacc.parse(text, lexer=self.lexer)
        if self.wholeDocument:
            return out
        else:
            state = InterpretationState()
            if isinstance(out, (list, tuple)):
                out2 = [x.asExpr(state) for x in out]
            else:
                out2 = out.asExpr(state)
            state.avroTypeBuilder.resolveTypes()
            return out2

###