Python os.path() Examples

The following are code examples for showing how to use os.path(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: Coulomb   Author: DynamoDS   File: stacktrace_extractor.py    MIT License 6 votes vote down vote up
def writeDataToFile():
            if (len(stackTraces) == 0): # If no stack traces, skip
                if os.path.exists(out_path):
                    os.remove(out_path)
                return

            print (json.dumps(
                {
                    "StackTraces" : stackTraces,
                    "Tags" : list(tags),
                    "UserID": userId,
                    "WorkspaceVersion": version,
                    "SessionDuration": sessionEndMicroTime - sessionStartMicroTime,
                    "Date": sessionDate
                }), file=fo)


        # Process each line of the session file 
Example 2
Project: godot-mono-builds   Author: godotengine   File: cmd_utils.py    MIT License 6 votes vote down vote up
def add_base_arguments(parser, default_help):
    import os
    from os.path import join as path_join

    home = os.environ.get('HOME')
    mono_sources_default = os.environ.get('MONO_SOURCE_ROOT', '')

    parser.add_argument('--verbose-make', action='store_true', default=False, help=default_help)
    parser.add_argument('--configure-dir', default=path_join(home, 'mono-configs'), help=default_help)
    parser.add_argument('--install-dir', default=path_join(home, 'mono-installs'), help=default_help)

    if mono_sources_default:
        parser.add_argument('--mono-sources', default=mono_sources_default, help=default_help)
    else:
        parser.add_argument('--mono-sources', required=True)

    parser.add_argument('--mxe-prefix', default='/usr', help=default_help) 
Example 3
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 6 votes vote down vote up
def find_executable(name) -> str:
    is_windows = os.name == 'nt'
    windows_exts = ENV_PATH_SEP.split(os.environ['PATHEXT']) if is_windows else None
    path_dirs = ENV_PATH_SEP.split(os.environ['PATH'])

    search_dirs = path_dirs + [os.getcwd()] # cwd is last in the list

    for dir in search_dirs:
        path = os.path.join(dir, name)

        if is_windows:
            for extension in windows_exts:
                path_with_ext = path + extension

                if os.path.isfile(path_with_ext) and os.access(path_with_ext, os.X_OK):
                    return path_with_ext
        else:
            if os.path.isfile(path) and os.access(path, os.X_OK):
                return path

    return '' 
Example 4
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 6 votes vote down vote up
def get_emsdk_root():
    # Shamelessly copied from Godot's detect.py
    em_config_file = os.getenv('EM_CONFIG') or os.path.expanduser('~/.emscripten')
    if not os.path.exists(em_config_file):
        raise BuildError("Emscripten configuration file '%s' does not exist" % em_config_file)
    with open(em_config_file) as f:
        em_config = {}
        try:
            # Emscripten configuration file is a Python file with simple assignments.
            exec(f.read(), em_config)
        except StandardError as e:
            raise BuildError("Emscripten configuration file '%s' is invalid:\n%s" % (em_config_file, e))
    if 'BINARYEN_ROOT' in em_config and os.path.isdir(os.path.join(em_config.get('BINARYEN_ROOT'), 'emscripten')):
        # New style, emscripten path as a subfolder of BINARYEN_ROOT
        return os.path.join(em_config.get('BINARYEN_ROOT'), 'emscripten')
    elif 'EMSCRIPTEN_ROOT' in em_config:
        # Old style (but can be there as a result from previous activation, so do last)
        return em_config.get('EMSCRIPTEN_ROOT')
    else:
        raise BuildError("'BINARYEN_ROOT' or 'EMSCRIPTEN_ROOT' missing in Emscripten configuration file '%s'" % em_config_file) 
Example 5
Project: godot-mono-builds   Author: godotengine   File: android.py    MIT License 6 votes vote down vote up
def android_autodetect_cmake(opts: AndroidOpts) -> str:
    from distutils.version import LooseVersion
    from os import listdir

    sdk_cmake_basedir = path_join(opts.android_sdk_root, 'cmake')
    versions = []

    for entry in listdir(sdk_cmake_basedir):
        if os.path.isdir(path_join(sdk_cmake_basedir, entry)):
            try:
                version = LooseVersion(entry)
                versions += [version]
            except ValueError:
                continue # Not a version folder

    if len(versions) == 0:
        raise BuildError('Cannot auto-detect Android CMake version')

    lattest_version = str(sorted(versions)[-1])
    print('Auto-detected Android CMake version: ' + lattest_version)

    return lattest_version 
Example 6
Project: autofff   Author: ChiefGokhlayeh   File: scanner.py    MIT License 6 votes vote down vote up
def _mine_function_definitions(self, ast: pycparser.c_ast.FileAST) -> list:
        foundFunctions = []

        for elem in ast.ext:
            if isinstance(elem, pycparser.c_ast.FuncDef):
                decl = elem.decl
                funcName = decl.name
                header = decl.coord.file
                if os.path.normpath(header) == os.path.normpath(self.inputFile):
                    funcDecl = decl.type
                    foundFunctions.append(elem)
                    LOGGER.debug(
                        f"[{len(foundFunctions)}] Function Definition: {funcName}")
                    LOGGER.debug(f"\tReturn type: {utils.get_type_name(decl)}")
                    if funcDecl.args == None:
                        LOGGER.debug("\tEmpty parameter list")
                    else:
                        paramList = funcDecl.args.params
                        for param in paramList:
                            paramName = param.name
                            paramType = utils.get_type_name(param)
                            LOGGER.debug(
                                f"\tParameter: {paramName} of Type: {paramType}")
        return foundFunctions 
Example 7
Project: autofff   Author: ChiefGokhlayeh   File: scanner.py    MIT License 6 votes vote down vote up
def _preprocess_file(self, filename: str, cpp_path: str = 'cpp', cpp_args: str = '') -> str:
        path_list = [cpp_path]
        if isinstance(cpp_args, list):
            path_list += cpp_args
        elif cpp_args != '':
            path_list += [cpp_args]
        path_list += [filename]

        try:
            # Note the use of universal_newlines to treat all newlines
            # as \n for Python's purpose
            #
            pipe = subprocess.Popen(path_list,
                                    stdout=subprocess.PIPE,
                                    universal_newlines=True)
            text = pipe.communicate()[0]
        except OSError as e:
            raise RuntimeError("Unable to invoke 'cpp'.  " +
                               'Make sure its path was passed correctly\n' +
                               ('Original error: %s' % e))

        filteredText = self.ignorePattern.sub('', text)

        return filteredText 
Example 8
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def cachedir(self):
        """Path to workflow's cache directory.

        The cache directory is a subdirectory of Alfred's own cache directory
        in ``~/Library/Caches``. The full path is:

        ``~/Library/Caches/com.runningwithcrayons.Alfred-X/Workflow Data/<bundle id>``

        ``Alfred-X`` may be ``Alfred-2`` or ``Alfred-3``.

        :returns: full path to workflow's cache directory
        :rtype: ``unicode``

        """
        if self.alfred_env.get('workflow_cache'):
            dirpath = self.alfred_env.get('workflow_cache')

        else:
            dirpath = self._default_cachedir

        return self._create(dirpath) 
Example 9
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def cache_data(self, name, data):
        """Save ``data`` to cache under ``name``.

        If ``data`` is ``None``, the corresponding cache file will be
        deleted.

        :param name: name of datastore
        :param data: data to store. This may be any object supported by
                the cache serializer

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))

        if data is None:
            if os.path.exists(cache_path):
                os.unlink(cache_path)
                self.logger.debug('deleted cache file: %s', cache_path)
            return

        with atomic_writer(cache_path, 'wb') as file_obj:
            serializer.dump(data, file_obj)

        self.logger.debug('cached data: %s', cache_path) 
Example 10
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def _delete_directory_contents(self, dirpath, filter_func):
        """Delete all files in a directory.

        :param dirpath: path to directory to clear
        :type dirpath: ``unicode`` or ``str``
        :param filter_func function to determine whether a file shall be
            deleted or not.
        :type filter_func ``callable``

        """
        if os.path.exists(dirpath):
            for filename in os.listdir(dirpath):
                if not filter_func(filename):
                    continue
                path = os.path.join(dirpath, filename)
                if os.path.isdir(path):
                    shutil.rmtree(path)
                else:
                    os.unlink(path)
                self.logger.debug('deleted : %r', path) 
Example 11
Project: pyblish-win   Author: pyblish   File: patchcheck.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def changed_files():
    """Get the list of changed or added files from the VCS."""
    if os.path.isdir(os.path.join(SRCDIR, '.hg')):
        vcs = 'hg'
        cmd = 'hg status --added --modified --no-status'
        if mq_patches_applied():
            cmd += ' --rev qparent'
    elif os.path.isdir('.svn'):
        vcs = 'svn'
        cmd = 'svn status --quiet --non-interactive --ignore-externals'
    else:
        sys.exit('need a checkout to get modified files')

    st = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
    try:
        st.wait()
        if vcs == 'hg':
            return [x.decode().rstrip() for x in st.stdout]
        else:
            output = (x.decode().rstrip().rsplit(None, 1)[-1]
                      for x in st.stdout if x[0] in 'AM')
        return set(path for path in output if os.path.isfile(path))
    finally:
        st.stdout.close() 
Example 12
Project: pyblish-win   Author: pyblish   File: patchcheck.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def normalize_docs_whitespace(file_paths):
    fixed = []
    for path in file_paths:
        abspath = os.path.join(SRCDIR, path)
        try:
            with open(abspath, 'rb') as f:
                lines = f.readlines()
            new_lines = [ws_re.sub(br'\1', line) for line in lines]
            if new_lines != lines:
                shutil.copyfile(abspath, abspath + '.bak')
                with open(abspath, 'wb') as f:
                    f.writelines(new_lines)
                fixed.append(path)
        except Exception as err:
            print 'Cannot fix %s: %s' % (path, err)
    return fixed 
Example 13
Project: pyblish-win   Author: pyblish   File: pindent.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def make_backup(filename):
    import os, os.path
    backup = filename + '~'
    if os.path.lexists(backup):
        try:
            os.remove(backup)
        except os.error:
            print("Can't remove backup %r" % (backup,), file=sys.stderr)
        # end try
    # end if
    try:
        os.rename(filename, backup)
    except os.error:
        print("Can't rename %r to %r" % (filename, backup), file=sys.stderr)
    # end try
# end def make_backup 
Example 14
Project: fs_image   Author: facebookincubator   File: test_compiler.py    MIT License 5 votes vote down vote up
def _subvol_mock_lexists_is_btrfs_and_run_as_root(fn):
    '''
    The purpose of these mocks is to run the compiler while recording
    what commands we WOULD HAVE run on the subvolume.  This is possible
    because all subvolume mutations are supposed to go through
    `Subvol.run_as_root`.  This lets our tests assert that the
    expected operations would have been executed.
    '''
    fn = unittest.mock.patch.object(os.path, 'lexists')(fn)
    fn = unittest.mock.patch.object(subvol_utils, '_path_is_btrfs_subvol')(fn)
    fn = unittest.mock.patch.object(subvol_utils.Subvol, 'run_as_root')(fn)
    fn = unittest.mock.patch.object(rpm_action, 'nspawn_in_subvol')(fn)
    return fn 
Example 15
Project: fs_image   Author: facebookincubator   File: test_compiler.py    MIT License 5 votes vote down vote up
def _os_path_lexists(path):
    '''
    This ugly mock exists because I don't want to set up a fake subvolume,
    from which the `sample_items` `RemovePathItem`s can remove their files.
    '''
    if path.endswith(b'/to/remove'):
        return True
    assert 'AFAIK, os.path.lexists is only used by the `RemovePathItem` tests' 
Example 16
Project: fs_image   Author: facebookincubator   File: test_compiler.py    MIT License 5 votes vote down vote up
def _btrfs_get_volume_props(subvol_path):
    if subvol_path == os.path.join(_SUBVOLS_DIR, _FAKE_SUBVOL):
        # We don't have an actual btrfs subvolume, so make up a UUID.
        return {'UUID': 'fake uuid', 'Parent UUID': None}
    return _orig_btrfs_get_volume_props(subvol_path) 
Example 17
Project: fs_image   Author: facebookincubator   File: test_compiler.py    MIT License 5 votes vote down vote up
def mock_layer_dir_access(test_case, subvolume_path):
    '''
    `SubvolumeOnDisk` does a ton of validation, which makes it hard to
    use it to read or write subvols that are not actual target outputs.

    Instead, this yields a fake layer directory path, and mocks
    `SubvolumeOnDisk.from_json_file` **only** for calls querying the fake
    path.  For those calls, it returns a fake `SubvolumeOnDisk` pointing at
    the supplied `subvolume_path`.
    '''
    sigil_dirname = b'fake-parent-layer'
    orig_from_json_file = svod.SubvolumeOnDisk.from_json_file
    with unittest.mock.patch.object(
        svod.SubvolumeOnDisk, 'from_json_file'
    ) as from_json_file, temp_dir() as td:
        parent_layer_file = td / sigil_dirname / 'layer.json'
        os.mkdir(parent_layer_file.dirname())
        with open(parent_layer_file, 'w') as f:
            f.write('this will never be read')

        def check_call(infile, subvolumes_dir):
            if Path(infile.name).dirname().basename() != sigil_dirname:
                return orig_from_json_file(infile, subvolumes_dir)

            test_case.assertEqual(parent_layer_file, infile.name)
            test_case.assertEqual(_SUBVOLS_DIR, subvolumes_dir)

            class FakeSubvolumeOnDisk:
                def subvolume_path(self):
                    return subvolume_path.decode()

            return FakeSubvolumeOnDisk()

        from_json_file.side_effect = check_call
        yield parent_layer_file.dirname() 
Example 18
Project: fs_image   Author: facebookincubator   File: test_compiler.py    MIT License 5 votes vote down vote up
def setUp(self):
        # More output for easier debugging
        unittest.util._MAX_LENGTH = 12345
        self.maxDiff = 12345

        self.ba_path = os.path.join(
            os.path.dirname(__file__),
            'host-test-build-appliance',
            'layer.json',
        ) 
Example 19
Project: leapp-repository   Author: oamg   File: test_forcedefaultboot.py    Apache License 2.0 5 votes vote down vote up
def mocked_exists(case, orig_path_exists):
    def impl(path):
        if path == TARGET_KERNEL_PATH:
            return case.kernel_exists
        if path == TARGET_INITRD_PATH:
            return case.initrd_exists
        return orig_path_exists(path)
    return impl 
Example 20
Project: leapp-repository   Author: oamg   File: test_forcedefaultboot.py    Apache License 2.0 5 votes vote down vote up
def test_force_default_boot_target_scenario(case_result, monkeypatch):
    case, result = case_result
    mocked_run = MockedRun(case)
    monkeypatch.setattr(api, 'consume', mocked_consume(case))
    monkeypatch.setattr(stdlib, 'run', mocked_run)
    monkeypatch.setattr(os.path, 'exists', mocked_exists(case, os.path.exists))
    monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(case))
    monkeypatch.setattr(api, 'current_logger', mocked_logger())
    forcedefaultboot.process()
    assert result.grubby_setdefault == mocked_run.called_setdefault
    assert result.zipl_called == mocked_run.called_zipl 
Example 21
Project: leapp-repository   Author: oamg   File: test_modscan.py    Apache License 2.0 5 votes vote down vote up
def _files_get_folder_path(name):
    path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files', name)
    assert os.path.isdir(path)
    return path 
Example 22
Project: leapp-repository   Author: oamg   File: test_modscan.py    Apache License 2.0 5 votes vote down vote up
def test_created_modules(monkeypatch):
    monkeypatch.setattr(api, 'get_actor_folder_path', _files_get_folder_path)
    path = os.path.abspath(api.get_actor_folder_path('dracut'))
    required_modules = ['sys-upgrade', 'sys-upgrade-redhat']
    for mod in modscan._create_dracut_modules():
        index = required_modules.index(mod.name)

        # Ensures that this actor only includes known modules
        assert index != -1

        required_modules.pop(index)

        # Ensures that the path is valid
        assert mod.module_path
        assert mod.module_path == os.path.abspath(mod.module_path)

        # Ensure it's a directory
        assert os.path.isdir(mod.module_path)

        # Ensure it's located within the actors files path
        assert mod.module_path.startswith(path)

        # Ensure the directory name ends with the module name
        assert os.path.basename(mod.module_path).endswith(mod.name)
        assert not re.sub(r'^(85|90){}$'.format(mod.name), '', os.path.basename(mod.module_path))
    assert not required_modules 
Example 23
Project: leapp-repository   Author: oamg   File: scanner.py    Apache License 2.0 5 votes vote down vote up
def load_tasks_file(path, logger):
    # Loads the given file and converts it to a deduplicated list of strings that are stripped
    if os.path.isfile(path):
        try:
            with open(path, 'r') as f:
                return list(
                    {entry.strip() for entry in f.read().split('\n') if entry.strip() and
                        not entry.strip().startswith('#')}
                )
        except IOError as e:
            logger.warn('Failed to open %s to load additional transaction data. Error: %s', path, str(e))
    return [] 
Example 24
Project: Coulomb   Author: DynamoDS   File: extract_sessions_buffered.py    MIT License 5 votes vote down vote up
def flush(outpath, session_maps):

    lns_count = 0
    for sessions_lst in session_maps.values():
        lns_count += len(sessions_lst)

    log ("Flushing lines/sessions: " + str(lns_count) + " / " + str(len(session_maps.keys())))

    # Verify that the folders exist
    for session in session_maps.keys():
        sessions_folder_path = join(outPath, session[0:3])
        if sessions_folder_path not in existing_sessions_path:
            if not os.path.exists(sessions_folder_path):
                os.makedirs(sessions_folder_path)
            existing_sessions_path.add(sessions_folder_path)

        sessionPath = join(sessions_folder_path, session + ".gz")
        if not os.path.exists(sessionPath):
            newSessionIDSet.add(session)
            

        o = gzip.open(sessionPath, 'a')
        # o = open(join(outPath, session + ".json"), 'a')
        for ln in session_maps[session]:
            o.write(ln)
        o.flush()

        f = open(completedInputListPath, 'a')
        for filePath in completedInputFiles_buffer:
            completedInputFiles.add(filePath)
            f.write(filePath + "\n")
        f.flush()
        completedInputFiles_buffer.clear()

    log ("Flushing complete. Total sessions:\t" + str(len(sessionIDSet)) + "\tTotal new sessions:\t" + str(len(newSessionIDSet))) 
Example 25
Project: Coulomb   Author: DynamoDS   File: feature_usage_jsons_shuffler.py    MIT License 5 votes vote down vote up
def flush():
    # Create one file per feature version
    for k in feature_versions_map.keys():
        out_full_path = out_path + "." + k + '.jsons'
        
        # Ensure we created all of the data files
        if out_full_path not in known_files:
            known_files.add(out_full_path)
            if os.path.exists(out_full_path):
                print ("Removing existing file: " + out_full_path)
                os.remove(out_full_path)
        
        sessions = feature_versions_map[k]
        feature_version = k

        with open(out_full_path, 'a') as f:
            for session_id in sessions.keys():
                data_to_dump = {
                    'feature_version' : feature_version,
                    'session_id' : session_id,
                    'features' : sessions[session_id]
                }

                if pretty_print_json_output:
                    f.write(json.dumps(data_to_dump, sort_keys=True, indent=2) + "\n")
                else:
                    f.write(json.dumps(data_to_dump) + "\n")
            f.flush()
    feature_versions_map.clear()


# Main function 
Example 26
Project: godot-mono-builds   Author: godotengine   File: bcl.py    MIT License 5 votes vote down vote up
def configure_bcl(opts: BclOpts):
    stamp_file = path_join(opts.configure_dir, '.stamp-bcl-configure')

    if os.path.isfile(stamp_file):
        return

    if not os.path.isfile(path_join(opts.mono_source_root, 'configure')):
        runtime.run_autogen(opts)

    build_dir = path_join(opts.configure_dir, 'bcl')
    mkdir_p(build_dir)

    CONFIGURE_FLAGS = [
        '--disable-boehm',
        '--disable-btls-lib',
        '--disable-nls',
        '--disable-support-build',
        '--with-mcs-docs=no'
    ]

    configure = path_join(opts.mono_source_root, 'configure')
    configure_args = CONFIGURE_FLAGS

    run_command(configure, args=configure_args, cwd=build_dir, name='configure bcl')

    touch(stamp_file) 
Example 27
Project: godot-mono-builds   Author: godotengine   File: bcl.py    MIT License 5 votes vote down vote up
def make_bcl(opts: BclOpts):
    stamp_file = path_join(opts.configure_dir, '.stamp-bcl-make')

    if os.path.isfile(stamp_file):
        return

    build_dir = path_join(opts.configure_dir, 'bcl')

    make_args = ['-C', build_dir, '-C', 'mono']
    make_args += ['V=1'] if opts.verbose_make else []

    run_command('make', args=make_args, name='make bcl')

    touch(stamp_file) 
Example 28
Project: godot-mono-builds   Author: godotengine   File: patch_mono.py    MIT License 5 votes vote down vote up
def main(raw_args):
    import cmd_utils
    import os
    import os.path
    from os_utils import get_emsdk_root

    parser = cmd_utils.build_arg_parser(description='Apply patches to the Mono source tree')

    default_help = 'default: %(default)s'

    mono_sources_default = os.environ.get('MONO_SOURCE_ROOT', '')

    if mono_sources_default:
        parser.add_argument('--mono-sources', default=mono_sources_default, help=default_help)
    else:
        parser.add_argument('--mono-sources', required=True)

    args = parser.parse_args(raw_args)

    this_script_dir = os.path.dirname(os.path.realpath(__file__))
    patches_dir = os.path.join(this_script_dir, 'files', 'patches')

    mono_source_root = args.mono_sources

    patches = [
        'fix-mono-android-tkill.diff'
    ]

    from subprocess import Popen
    for patch in patches:
        proc = Popen('bash -c \'patch -N -p1 < %s; exit 0\'' % os.path.join(patches_dir, patch), cwd=mono_source_root, shell=True) 
Example 29
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 5 votes vote down vote up
def source(script: str, cwd=None) -> dict:
    popen_args = {}
    if cwd is not None:
        popen_args['cwd'] = cwd

    import subprocess
    proc = subprocess.Popen('bash -c \'source %s; env -0\'' % script, stdout=subprocess.PIPE, shell=True, **popen_args)
    output = proc.communicate()[0]
    return dict(line.split('=', 1) for line in output.decode().split('\x00') if line)


# Creates the directory if no other file or directory with the same path exists 
Example 30
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 5 votes vote down vote up
def mkdir_p(path):
    if not os.path.exists(path):
        print('creating directory: ' + path)
        os.makedirs(path)


# Remove files and/or directories recursively 
Example 31
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 5 votes vote down vote up
def rm_rf(*paths):
    from shutil import rmtree
    for path in paths:
        if os.path.isfile(path):
            print('removing file: ' + path)
            os.remove(path)
        elif os.path.isdir(path):
            print('removing directory and its contents: ' + path)
            rmtree(path) 
Example 32
Project: godot-mono-builds   Author: godotengine   File: desktop.py    MIT License 5 votes vote down vote up
def get_osxcross_sdk(target, osxcross_bin):
    osxcross_sdk = os.environ.get('OSXCROSS_SDK', 14)

    name_fmt = path_join(osxcross_bin, target + '-apple-darwin%s-%s')

    if not 'OSXCROSS_SDK' in os.environ and not os.path.isfile(name_fmt % (osxcross_sdk, 'ar')):
        # Default 14 wasn't it, try 15
        osxcross_sdk = 15

    if not os.path.isfile(name_fmt % (osxcross_sdk, 'ar')):
        raise BuildError('Specify a valid osxcross SDK with the environment variable \'OSXCROSS_SDK\'')

    return osxcross_sdk 
Example 33
Project: godot-mono-builds   Author: godotengine   File: desktop.py    MIT License 5 votes vote down vote up
def configure(opts: DesktopOpts, product: str, target_platform: str, target: str):
    env = {}

    setup_desktop_template(env, opts, product, target_platform, target)

    if not os.path.isfile(path_join(opts.mono_source_root, 'configure')):
        runtime.run_autogen(opts)

    runtime.run_configure(env, opts, product, target) 
Example 34
Project: godot-mono-builds   Author: godotengine   File: android.py    MIT License 5 votes vote down vote up
def make_standalone_toolchain(opts: AndroidOpts, target: str, api: str):
    install_dir = path_join(opts.android_toolchains_prefix, opts.toolchain_name_fmt % (target, api))
    if os.path.isdir(path_join(install_dir, 'bin')):
        return # Looks like it's already there, so no need to re-create it
    command = path_join(opts.android_ndk_root, 'build', 'tools', 'make_standalone_toolchain.py')
    args = ['--verbose', '--force', '--api=' + api, '--arch=' + AndroidTargetTable.archs[target],
            '--install-dir=' + install_dir]
    run_command(command, args=args, name='make_standalone_toolchain') 
Example 35
Project: godot-mono-builds   Author: godotengine   File: wasm.py    MIT License 5 votes vote down vote up
def configure(opts: RuntimeOpts, product: str, target: str):
    env = {}

    if is_cross(target):
        if is_cross_mxe(target):
            raise RuntimeError('TODO')
        else:
            raise RuntimeError('TODO')
    else:
        setup_wasm_target_template(env, opts, target)

    if not os.path.isfile(path_join(opts.mono_source_root, 'configure')):
        runtime.run_autogen(opts)

    wasm_run_configure(env, opts, product, target, get_emsdk_root()) 
Example 36
Project: godot-mono-builds   Author: godotengine   File: runtime.py    MIT License 5 votes vote down vote up
def run_autogen(opts: RuntimeOpts):
    autogen_env = os.environ.copy()
    autogen_env['NOCONFIGURE'] = '1'

    if not find_executable('glibtoolize') and 'CUSTOM_GLIBTOOLIZE_PATH' in os.environ:
        autogen_env['PATH'] = os.environ['CUSTOM_GLIBTOOLIZE_PATH'] + ':' + autogen_env['PATH']

    run_command(os.path.join(opts.mono_source_root, 'autogen.sh'), cwd=opts.mono_source_root, env=autogen_env, name='autogen') 
Example 37
Project: autofff   Author: ChiefGokhlayeh   File: scanner.py    MIT License 5 votes vote down vote up
def _mine_function_declarations(self, ast: pycparser.c_ast.FileAST) -> list:
        foundFunctions = []

        for elem in ast.ext:
            if isinstance(elem, pycparser.c_ast.Decl) and isinstance(elem.type, pycparser.c_ast.FuncDecl):
                funcName = elem.name
                header = elem.coord.file
                if os.path.normpath(header) == os.path.normpath(self.inputFile):
                    funcDecl = elem.type
                    foundFunctions.append(elem)
                    LOGGER.debug(
                        f"[{len(foundFunctions)}] Function Declaration: {funcName}")
                    LOGGER.debug(f"\tReturn type: {utils.get_type_name(elem)}")
                    if funcDecl.args == None:
                        LOGGER.debug("\tEmpty parameter list")
                    else:
                        paramList = funcDecl.args.params
                        for param in paramList:
                            if isinstance(param, pycparser.c_ast.EllipsisParam):
                                paramName = '...'
                            else:
                                paramName = param.name
                                paramType = utils.get_type_name(param)
                            LOGGER.debug(
                                f"\tParameter: {paramName} of Type: {paramType}")
        return foundFunctions 
Example 38
Project: autofff   Author: ChiefGokhlayeh   File: scanner.py    MIT License 5 votes vote down vote up
def _read_symbols(self, pathToObj: str) -> SymbolTable:
        path_list = ['objdump', '-t']
        path_list += [pathToObj]

        try:
            pipe = subprocess.Popen(path_list,
                                    stdout=subprocess.PIPE,
                                    universal_newlines=True)
            text = pipe.communicate()[0]
            matches = re.finditer(
                r"(?P<object>.*):\s+file format.*\s+SYMBOL TABLE:\n(?P<symbols>(?:.+(\n|$))*)", text, re.MULTILINE)
            tables = list()
            for match in matches:
                print(match)
                objectFile = match.group('object') or pathToObj
                symbols = match.group('symbols')
                symMatch = re.search(
                    r"\*ABS\*\s+[0-9a-fA-F]*\s+(?P<source>.*)", symbols, re.MULTILINE)
                sourceFile = symMatch.group('source')
                fuNMatches = re.finditer(r"", symbols, re.MULTILINE)

                tables.append(SymbolTable(objectFile, None))
                print(objectFile)
                print(sourceFile)
                print(symbols)
        except OSError as e:
            raise RuntimeError("Unable to invoke 'readelf'.  " +
                               'Make sure its path was passed correctly\n' +
                               ('Original error: %s' % e)) 
Example 39
Project: invenio-openaire   Author: inveniosoftware   File: conftest.py    MIT License 5 votes vote down vote up
def app(request):
    """Flask application fixture."""
    # Set temporary instance path for sqlite
    instance_path = tempfile.mkdtemp()
    app = Flask('testapp', instance_path=instance_path)
    app.config.update(
        SQLALCHEMY_DATABASE_URI=os.environ.get(
            'SQLALCHEMY_DATABASE_URI', 'sqlite:///test.db'),
        INDEXER_REPLACE_REFS=True,
        CELERY_ALWAYS_EAGER=True,
        CELERY_RESULT_BACKEND="cache",
        CELERY_CACHE_BACKEND="memory",
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
        JSONSCHEMAS_HOST='inveniosoftware.org',
        OPENAIRE_OAI_LOCAL_SOURCE='invenio_openaire/data/oaire_local.sqlite',
        TESTING=True,
    )

    app.url_map.converters['pid'] = PIDConverter
    app.url_map.converters['pidpath'] = PIDPathConverter

    LoginManager(app)
    InvenioDB(app)
    InvenioIndexer(app)
    InvenioRecords(app)
    InvenioCelery(app)
    InvenioPIDStore(app)
    InvenioOpenAIRE(app)
    InvenioSearch(app)
    InvenioJSONSchemas(app)

    with app.app_context():
        yield app

    shutil.rmtree(instance_path) 
Example 40
Project: invenio-openaire   Author: inveniosoftware   File: conftest.py    MIT License 5 votes vote down vote up
def sqlite_tmpdb():
    """Create a temporary sqlite database file."""
    fd, path = tempfile.mkstemp("_db.sqlite")

    yield path

    os.remove(path) 
Example 41
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def __init__(self, filepath, defaults=None):
        """Create new :class:`Settings` object."""
        super(Settings, self).__init__()
        self._filepath = filepath
        self._nosave = False
        self._original = {}
        if os.path.exists(self._filepath):
            self._load()
        elif defaults:
            for key, val in defaults.items():
                self[key] = val
            self.save()  # save default settings 
Example 42
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def _default_cachedir(self):
        """Alfred 2's default cache directory."""
        return os.path.join(
            os.path.expanduser(
                '~/Library/Caches/com.runningwithcrayons.Alfred-2/'
                'Workflow Data/'),
            self.bundleid) 
Example 43
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def _default_datadir(self):
        """Alfred 2's default data directory."""
        return os.path.join(os.path.expanduser(
            '~/Library/Application Support/Alfred 2/Workflow Data/'),
            self.bundleid) 
Example 44
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def workflowdir(self):
        """Path to workflow's root directory (where ``info.plist`` is).

        :returns: full path to workflow root directory
        :rtype: ``unicode``

        """
        if not self._workflowdir:
            # Try the working directory first, then the directory
            # the library is in. CWD will be the workflow root if
            # a workflow is being run in Alfred
            candidates = [
                os.path.abspath(os.getcwdu()),
                os.path.dirname(os.path.abspath(os.path.dirname(__file__)))]

            # climb the directory tree until we find `info.plist`
            for dirpath in candidates:

                # Ensure directory path is Unicode
                dirpath = self.decode(dirpath)

                while True:
                    if os.path.exists(os.path.join(dirpath, 'info.plist')):
                        self._workflowdir = dirpath
                        break

                    elif dirpath == '/':
                        # no `info.plist` found
                        break

                    # Check the parent directory
                    dirpath = os.path.dirname(dirpath)

                # No need to check other candidates
                if self._workflowdir:
                    break

            if not self._workflowdir:
                raise IOError("'info.plist' not found in directory tree")

        return self._workflowdir 
Example 45
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def cachefile(self, filename):
        """Path to ``filename`` in workflow's cache directory.

        Return absolute path to ``filename`` within your workflow's
        :attr:`cache directory <Workflow.cachedir>`.

        :param filename: basename of file
        :type filename: ``unicode``
        :returns: full path to file within cache directory
        :rtype: ``unicode``

        """
        return os.path.join(self.cachedir, filename) 
Example 46
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def datafile(self, filename):
        """Path to ``filename`` in workflow's data directory.

        Return absolute path to ``filename`` within your workflow's
        :attr:`data directory <Workflow.datadir>`.

        :param filename: basename of file
        :type filename: ``unicode``
        :returns: full path to file within data directory
        :rtype: ``unicode``

        """
        return os.path.join(self.datadir, filename) 
Example 47
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def workflowfile(self, filename):
        """Return full path to ``filename`` in workflow's root directory.

        :param filename: basename of file
        :type filename: ``unicode``
        :returns: full path to file within data directory
        :rtype: ``unicode``

        """
        return os.path.join(self.workflowdir, filename) 
Example 48
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def settings_path(self):
        """Path to settings file within workflow's data directory.

        :returns: path to ``settings.json`` file
        :rtype: ``unicode``

        """
        if not self._settings_path:
            self._settings_path = self.datafile('settings.json')
        return self._settings_path 
Example 49
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def cached_data(self, name, data_func=None, max_age=60):
        """Return cached data if younger than ``max_age`` seconds.

        Retrieve data from cache or re-generate and re-cache data if
        stale/non-existant. If ``max_age`` is 0, return cached data no
        matter how old.

        :param name: name of datastore
        :param data_func: function to (re-)generate data.
        :type data_func: ``callable``
        :param max_age: maximum age of cached data in seconds
        :type max_age: ``int``
        :returns: cached data, return value of ``data_func`` or ``None``
            if ``data_func`` is not set

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
        age = self.cached_data_age(name)

        if (age < max_age or max_age == 0) and os.path.exists(cache_path):

            with open(cache_path, 'rb') as file_obj:
                self.logger.debug('loading cached data: %s', cache_path)
                return serializer.load(file_obj)

        if not data_func:
            return None

        data = data_func()
        self.cache_data(name, data)

        return data 
Example 50
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def cached_data_age(self, name):
        """Return age in seconds of cache `name` or 0 if cache doesn't exist.

        :param name: name of datastore
        :type name: ``unicode``
        :returns: age of datastore in seconds
        :rtype: ``int``

        """
        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))

        if not os.path.exists(cache_path):
            return 0

        return time.time() - os.stat(cache_path).st_mtime