Python tokenize.open() Examples

The following are 30 code examples of tokenize.open(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tokenize , or try the search function .
Example #1
Source File: PyShell.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, *args):
        self.breakpoints = []
        EditorWindow.__init__(self, *args)
        self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
        self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
        self.text.bind("<<open-python-shell>>", self.flist.open_shell)

        self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
                                           'breakpoints.lst')
        # whenever a file is changed, restore breakpoints
        def filename_changed_hook(old_hook=self.io.filename_change_hook,
                                  self=self):
            self.restore_file_breaks()
            old_hook()
        self.io.set_filename_change_hook(filename_changed_hook)
        if self.io.filename:
            self.restore_file_breaks()
        self.color_breakpoint_text() 
Example #2
Source File: test_warnings.py    From vnpy_crypto with MIT License 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #3
Source File: quitter.py    From qutebrowser with GNU General Public License v3.0 6 votes vote down vote up
def quit_(save: bool = False,
          session: sessions.ArgType = None) -> None:
    """Quit qutebrowser.

    Args:
        save: When given, save the open windows even if auto_save.session
                is turned off.
        session: The name of the session to save.
    """
    if session is not None and not save:
        raise cmdutils.CommandError("Session name given without --save!")
    if save:
        if session is None:
            session = sessions.default
        instance.shutdown(session=session)
    else:
        instance.shutdown() 
Example #4
Source File: _pydev_execfile.py    From PyDev.Debugger with Eclipse Public License 1.0 6 votes vote down vote up
def execfile(file, glob=None, loc=None):
    if glob is None:
        import sys
        glob = sys._getframe().f_back.f_globals
    if loc is None:
        loc = glob

    # It seems that the best way is using tokenize.open(): http://code.activestate.com/lists/python-dev/131251/
    # (but tokenize.open() is only available for python 3.2)
    import tokenize
    if hasattr(tokenize, 'open'):
        # version 3.2
        stream = tokenize.open(file)  # @UndefinedVariable
    else:
        # version 3.0 or 3.1
        detect_encoding = tokenize.detect_encoding(open(file, mode="rb" ).readline)
        stream = open(file, encoding=detect_encoding[0])
    try:
        contents = stream.read()
    finally:
        stream.close()

    #execute the script (note: it's important to compile first to have the filename set in debug mode)
    exec(compile(contents+"\n", file, 'exec'), glob, loc) 
Example #5
Source File: quitter.py    From qutebrowser with GNU General Public License v3.0 6 votes vote down vote up
def _compile_modules(self) -> None:
        """Compile all modules to catch SyntaxErrors."""
        if os.path.basename(sys.argv[0]) == 'qutebrowser':
            # Launched via launcher script
            return
        elif hasattr(sys, 'frozen'):
            return
        else:
            path = os.path.abspath(os.path.dirname(qutebrowser.__file__))
            if not os.path.isdir(path):
                # Probably running from a python egg.
                return

        for dirpath, _dirnames, filenames in os.walk(path):
            for fn in filenames:
                if os.path.splitext(fn)[1] == '.py' and os.path.isfile(fn):
                    with tokenize.open(os.path.join(dirpath, fn)) as f:
                        compile(f.read(), fn, 'exec') 
Example #6
Source File: __init__.py    From pyta with GNU General Public License v3.0 6 votes vote down vote up
def _verify_pre_check(filepath):
    """Check student code for certain issues."""
    # Make sure the program doesn't crash for students.
    # Could use some improvement for better logging and error reporting.
    try:
        # Check for inline "pylint:" comment, which may indicate a student
        # trying to disable a check.
        with tokenize.open(os.path.expanduser(filepath)) as f:
            for tok_type, content, _, _, _ in tokenize.generate_tokens(f.readline):
                if tok_type != tokenize.COMMENT:
                    continue
                match = pylint.constants.OPTION_RGX.search(content)
                if match is not None:
                    print('[ERROR] String "pylint:" found in comment. ' +
                          'No check run on file `{}.`\n'.format(filepath))
                    return False
    except IndentationError as e:
        print('[ERROR] python_ta could not check your code due to an ' +
              'indentation error at line {}.'.format(e.lineno))
        return False
    except tokenize.TokenError as e:
        print('[ERROR] python_ta could not check your code due to a ' +
              'syntax error in your file.')
        return False
    return True 
Example #7
Source File: test_warnings.py    From recruit with Apache License 2.0 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #8
Source File: openpy.py    From Computable with MIT License 6 votes vote down vote up
def read_py_file(filename, skip_encoding_cookie=True):
    """Read a Python file, using the encoding declared inside the file.
    
    Parameters
    ----------
    filename : str
      The path to the file to read.
    skip_encoding_cookie : bool
      If True (the default), and the encoding declaration is found in the first
      two lines, that line will be excluded from the output - compiling a
      unicode string with an encoding declaration is a SyntaxError in Python 2.
    
    Returns
    -------
    A unicode string containing the contents of the file.
    """
    with open(filename) as f:   # the open function defined in this module.
        if skip_encoding_cookie:
            return "".join(strip_encoding_cookie(f))
        else:
            return f.read() 
Example #9
Source File: test_warnings.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #10
Source File: test_warnings.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def warning_calls():
    # combined "ignore" and stacklevel error
    base = Path(scipy.__file__).parent

    bad_filters = []
    bad_stacklevels = []
    
    for path in base.rglob("*.py"):
        # use tokenize to auto-detect encoding on systems where no
        # default encoding is defined (e.g. LANG='C')
        with tokenize.open(str(path)) as file:
            tree = ast.parse(file.read(), filename=str(path))
            finder = FindFuncs(path.relative_to(base))
            finder.visit(tree)
            bad_filters.extend(finder.bad_filters)
            bad_stacklevels.extend(finder.bad_stacklevels)

    return bad_filters, bad_stacklevels 
Example #11
Source File: untabify.py    From python3_ios with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def process(filename, tabsize, verbose=True):
    try:
        with tokenize.open(filename) as f:
            text = f.read()
            encoding = f.encoding
    except IOError as msg:
        print("%r: I/O error: %s" % (filename, msg))
        return
    newtext = text.expandtabs(tabsize)
    if newtext == text:
        return
    backup = filename + "~"
    try:
        os.unlink(backup)
    except OSError:
        pass
    try:
        os.rename(filename, backup)
    except OSError:
        pass
    with open(filename, "w", encoding=encoding) as f:
        f.write(newtext)
    if verbose:
        print(filename) 
Example #12
Source File: test_warnings.py    From predictive-maintenance-using-machine-learning with Apache License 2.0 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #13
Source File: trace.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, counts=None, calledfuncs=None, infile=None,
                 callers=None, outfile=None):
        self.counts = counts
        if self.counts is None:
            self.counts = {}
        self.counter = self.counts.copy() # map (filename, lineno) to count
        self.calledfuncs = calledfuncs
        if self.calledfuncs is None:
            self.calledfuncs = {}
        self.calledfuncs = self.calledfuncs.copy()
        self.callers = callers
        if self.callers is None:
            self.callers = {}
        self.callers = self.callers.copy()
        self.infile = infile
        self.outfile = outfile
        if self.infile:
            # Try to merge existing counts file.
            try:
                with open(self.infile, 'rb') as f:
                    counts, calledfuncs, callers = pickle.load(f)
                self.update(self.__class__(counts, calledfuncs, callers))
            except (OSError, EOFError, ValueError) as err:
                print(("Skipping counts file %r: %s"
                                      % (self.infile, err)), file=sys.stderr) 
Example #14
Source File: trace.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def _find_strings(filename, encoding=None):
    """Return a dict of possible docstring positions.

    The dict maps line numbers to strings.  There is an entry for
    line that contains only a string or a part of a triple-quoted
    string.
    """
    d = {}
    # If the first token is a string, then it's the module docstring.
    # Add this special case so that the test in the loop passes.
    prev_ttype = token.INDENT
    with open(filename, encoding=encoding) as f:
        tok = tokenize.generate_tokens(f.readline)
        for ttype, tstr, start, end, line in tok:
            if ttype == token.STRING:
                if prev_ttype == token.INDENT:
                    sline, scol = start
                    eline, ecol = end
                    for i in range(sline, eline + 1):
                        d[i] = 1
            prev_ttype = ttype
    return d 
Example #15
Source File: ScriptBinding.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def tabnanny(self, filename):
        # XXX: tabnanny should work on binary files as well
        with tokenize.open(filename) as f:
            try:
                tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
            except tokenize.TokenError as msg:
                msgtxt, (lineno, start) = msg.args
                self.editwin.gotoline(lineno)
                self.errorbox("Tabnanny Tokenizing Error",
                              "Token Error: %s" % msgtxt)
                return False
            except tabnanny.NannyNag as nag:
                # The error messages from tabnanny are too confusing...
                self.editwin.gotoline(nag.get_lineno())
                self.errorbox("Tab/space error", indent_message)
                return False
        return True 
Example #16
Source File: pydoc.py    From jawfish with MIT License 6 votes vote down vote up
def importfile(path):
    """Import a Python source file or compiled file given its path."""
    magic = imp.get_magic()
    with open(path, 'rb') as file:
        if file.read(len(magic)) == magic:
            kind = imp.PY_COMPILED
        else:
            kind = imp.PY_SOURCE
        file.seek(0)
        filename = os.path.basename(path)
        name, ext = os.path.splitext(filename)
        try:
            module = imp.load_module(name, file, path, (ext, 'r', kind))
        except:
            raise ErrorDuringImport(path, sys.exc_info())
    return module 
Example #17
Source File: PyShell.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def restore_file_breaks(self):
        self.text.update()   # this enables setting "BREAK" tags to be visible
        if self.io is None:
            # can happen if IDLE closes due to the .update() call
            return
        filename = self.io.filename
        if filename is None:
            return
        if os.path.isfile(self.breakpointPath):
            with open(self.breakpointPath, "r") as fp:
                lines = fp.readlines()
            for line in lines:
                if line.startswith(filename + '='):
                    breakpoint_linenumbers = eval(line[len(filename)+1:])
                    for breakpoint_linenumber in breakpoint_linenumbers:
                        self.set_breakpoint(breakpoint_linenumber) 
Example #18
Source File: PyShell.py    From Fluid-Designer with GNU General Public License v3.0 6 votes vote down vote up
def execfile(self, filename, source=None):
        "Execute an existing file"
        if source is None:
            with tokenize.open(filename) as fp:
                source = fp.read()
        try:
            code = compile(source, filename, "exec")
        except (OverflowError, SyntaxError):
            self.tkconsole.resetoutput()
            print('*** Error in script or command!\n'
                 'Traceback (most recent call last):',
                  file=self.tkconsole.stderr)
            InteractiveInterpreter.showsyntaxerror(self, filename)
            self.tkconsole.showprompt()
        else:
            self.runcode(code) 
Example #19
Source File: test_warnings.py    From pySINDy with MIT License 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #20
Source File: trace.py    From Imogen with MIT License 6 votes vote down vote up
def __init__(self, counts=None, calledfuncs=None, infile=None,
                 callers=None, outfile=None):
        self.counts = counts
        if self.counts is None:
            self.counts = {}
        self.counter = self.counts.copy() # map (filename, lineno) to count
        self.calledfuncs = calledfuncs
        if self.calledfuncs is None:
            self.calledfuncs = {}
        self.calledfuncs = self.calledfuncs.copy()
        self.callers = callers
        if self.callers is None:
            self.callers = {}
        self.callers = self.callers.copy()
        self.infile = infile
        self.outfile = outfile
        if self.infile:
            # Try to merge existing counts file.
            try:
                with open(self.infile, 'rb') as f:
                    counts, calledfuncs, callers = pickle.load(f)
                self.update(self.__class__(counts, calledfuncs, callers))
            except (OSError, EOFError, ValueError) as err:
                print(("Skipping counts file %r: %s"
                                      % (self.infile, err)), file=sys.stderr) 
Example #21
Source File: trace.py    From Imogen with MIT License 6 votes vote down vote up
def _find_strings(filename, encoding=None):
    """Return a dict of possible docstring positions.

    The dict maps line numbers to strings.  There is an entry for
    line that contains only a string or a part of a triple-quoted
    string.
    """
    d = {}
    # If the first token is a string, then it's the module docstring.
    # Add this special case so that the test in the loop passes.
    prev_ttype = token.INDENT
    with open(filename, encoding=encoding) as f:
        tok = tokenize.generate_tokens(f.readline)
        for ttype, tstr, start, end, line in tok:
            if ttype == token.STRING:
                if prev_ttype == token.INDENT:
                    sline, scol = start
                    eline, ecol = end
                    for i in range(sline, eline + 1):
                        d[i] = 1
            prev_ttype = ttype
    return d 
Example #22
Source File: trace.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def __init__(self, counts=None, calledfuncs=None, infile=None,
                 callers=None, outfile=None):
        self.counts = counts
        if self.counts is None:
            self.counts = {}
        self.counter = self.counts.copy() # map (filename, lineno) to count
        self.calledfuncs = calledfuncs
        if self.calledfuncs is None:
            self.calledfuncs = {}
        self.calledfuncs = self.calledfuncs.copy()
        self.callers = callers
        if self.callers is None:
            self.callers = {}
        self.callers = self.callers.copy()
        self.infile = infile
        self.outfile = outfile
        if self.infile:
            # Try to merge existing counts file.
            try:
                counts, calledfuncs, callers = \
                        pickle.load(open(self.infile, 'rb'))
                self.update(self.__class__(counts, calledfuncs, callers))
            except (OSError, EOFError, ValueError) as err:
                print(("Skipping counts file %r: %s"
                                      % (self.infile, err)), file=sys.stderr) 
Example #23
Source File: trace.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def _find_strings(filename, encoding=None):
    """Return a dict of possible docstring positions.

    The dict maps line numbers to strings.  There is an entry for
    line that contains only a string or a part of a triple-quoted
    string.
    """
    d = {}
    # If the first token is a string, then it's the module docstring.
    # Add this special case so that the test in the loop passes.
    prev_ttype = token.INDENT
    with open(filename, encoding=encoding) as f:
        tok = tokenize.generate_tokens(f.readline)
        for ttype, tstr, start, end, line in tok:
            if ttype == token.STRING:
                if prev_ttype == token.INDENT:
                    sline, scol = start
                    eline, ecol = end
                    for i in range(sline, eline + 1):
                        d[i] = 1
            prev_ttype = ttype
    return d 
Example #24
Source File: ScriptBinding.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def tabnanny(self, filename):
        # XXX: tabnanny should work on binary files as well
        with tokenize.open(filename) as f:
            try:
                tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
            except tokenize.TokenError as msg:
                msgtxt, (lineno, start) = msg.args
                self.editwin.gotoline(lineno)
                self.errorbox("Tabnanny Tokenizing Error",
                              "Token Error: %s" % msgtxt)
                return False
            except tabnanny.NannyNag as nag:
                # The error messages from tabnanny are too confusing...
                self.editwin.gotoline(nag.get_lineno())
                self.errorbox("Tab/space error", indent_message)
                return False
        return True 
Example #25
Source File: PyShell.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def __init__(self, *args):
        self.breakpoints = []
        EditorWindow.__init__(self, *args)
        self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
        self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
        self.text.bind("<<open-python-shell>>", self.flist.open_shell)

        self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
                                           'breakpoints.lst')
        # whenever a file is changed, restore breakpoints
        def filename_changed_hook(old_hook=self.io.filename_change_hook,
                                  self=self):
            self.restore_file_breaks()
            old_hook()
        self.io.set_filename_change_hook(filename_changed_hook)
        if self.io.filename:
            self.restore_file_breaks()
        self.color_breakpoint_text() 
Example #26
Source File: PyShell.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def restore_file_breaks(self):
        self.text.update()   # this enables setting "BREAK" tags to be visible
        if self.io is None:
            # can happen if IDLE closes due to the .update() call
            return
        filename = self.io.filename
        if filename is None:
            return
        if os.path.isfile(self.breakpointPath):
            with open(self.breakpointPath, "r") as fp:
                lines = fp.readlines()
            for line in lines:
                if line.startswith(filename + '='):
                    breakpoint_linenumbers = eval(line[len(filename)+1:])
                    for breakpoint_linenumber in breakpoint_linenumbers:
                        self.set_breakpoint(breakpoint_linenumber) 
Example #27
Source File: PyShell.py    From ironpython3 with Apache License 2.0 6 votes vote down vote up
def execfile(self, filename, source=None):
        "Execute an existing file"
        if source is None:
            with tokenize.open(filename) as fp:
                source = fp.read()
        try:
            code = compile(source, filename, "exec")
        except (OverflowError, SyntaxError):
            self.tkconsole.resetoutput()
            print('*** Error in script or command!\n'
                 'Traceback (most recent call last):',
                  file=self.tkconsole.stderr)
            InteractiveInterpreter.showsyntaxerror(self, filename)
            self.tkconsole.showprompt()
        else:
            self.runcode(code) 
Example #28
Source File: test_warnings.py    From mxnet-lambda with Apache License 2.0 6 votes vote down vote up
def test_warning_calls():
        # combined "ignore" and stacklevel error
        base = Path(numpy.__file__).parent

        for path in base.rglob("*.py"):
            if base / "testing" in path.parents:
                continue
            if path == base / "__init__.py":
                continue
            if path == base / "random" / "__init__.py":
                continue
            # use tokenize to auto-detect encoding on systems where no
            # default encoding is defined (e.g. LANG='C')
            with tokenize.open(str(path)) as file:
                tree = ast.parse(file.read())
                FindFuncs(path).visit(tree) 
Example #29
Source File: trace.py    From Project-New-Reign---Nemesis-Main with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, counts=None, calledfuncs=None, infile=None,
                 callers=None, outfile=None):
        self.counts = counts
        if self.counts is None:
            self.counts = {}
        self.counter = self.counts.copy() # map (filename, lineno) to count
        self.calledfuncs = calledfuncs
        if self.calledfuncs is None:
            self.calledfuncs = {}
        self.calledfuncs = self.calledfuncs.copy()
        self.callers = callers
        if self.callers is None:
            self.callers = {}
        self.callers = self.callers.copy()
        self.infile = infile
        self.outfile = outfile
        if self.infile:
            # Try to merge existing counts file.
            try:
                with open(self.infile, 'rb') as f:
                    counts, calledfuncs, callers = pickle.load(f)
                self.update(self.__class__(counts, calledfuncs, callers))
            except (OSError, EOFError, ValueError) as err:
                print(("Skipping counts file %r: %s"
                                      % (self.infile, err)), file=sys.stderr) 
Example #30
Source File: trace.py    From Project-New-Reign---Nemesis-Main with GNU General Public License v3.0 6 votes vote down vote up
def _find_strings(filename, encoding=None):
    """Return a dict of possible docstring positions.

    The dict maps line numbers to strings.  There is an entry for
    line that contains only a string or a part of a triple-quoted
    string.
    """
    d = {}
    # If the first token is a string, then it's the module docstring.
    # Add this special case so that the test in the loop passes.
    prev_ttype = token.INDENT
    with open(filename, encoding=encoding) as f:
        tok = tokenize.generate_tokens(f.readline)
        for ttype, tstr, start, end, line in tok:
            if ttype == token.STRING:
                if prev_ttype == token.INDENT:
                    sline, scol = start
                    eline, ecol = end
                    for i in range(sline, eline + 1):
                        d[i] = 1
            prev_ttype = ttype
    return d