Python sys.argv() Examples

The following are code examples for showing how to use sys.argv(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: fs_image   Author: facebookincubator   File: test_rpm_build.py    MIT License 7 votes vote down vote up
def test_rpm_build_item(self):
        parent_subvol = find_built_subvol(load_location(
            'fs_image.compiler.items', 'toy-rpmbuild-setup',
        ))
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            assert os.path.isfile(
                parent_subvol.path('/rpmbuild/SOURCES/toy_src_file')
            )
            assert os.path.isfile(
                parent_subvol.path('/rpmbuild/SPECS/specfile.spec')
            )

            subvol = temp_subvolumes.snapshot(parent_subvol, 'rpm_build')
            item = RpmBuildItem(from_target='t', rpmbuild_dir='/rpmbuild')
            RpmBuildItem.get_phase_builder([item], DUMMY_LAYER_OPTS)(subvol)

            self.assertEqual(item.phase_order(), PhaseOrder.RPM_BUILD)
            assert os.path.isfile(
                subvol.path('/rpmbuild/RPMS/toy.rpm')
            ) 
Example 2
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def weather_icons():
    try:

        if argv[1] == 'loop':

            loop()

        elif argv[1] in os.listdir(folder_path):

            print('Drawing Image: {}'.format(argv[1]))

            img = Image.open(folder_path + argv[1])

            draw_animation(img)
            unicorn.off()

        else:
            help()

    except IndexError:
        help() 
Example 3
Project: fs_image   Author: facebookincubator   File: artifacts_dir.py    MIT License 6 votes vote down vote up
def find_repo_root(path_in_repo: str) -> str:
    '''
    The caller is responsible for providing a path known to be in the repo.
    We cannot just use __file__, because that will never in the repo in
    tests running under @mode/opt. For that reason, tests should just pass
    sys.argv[0].

    This is intended to work:
     - under Buck's internal macro interpreter, and
     - using the system python from `facebookexperimental/buckit`.
    '''
    repo_path = os.path.abspath(path_in_repo)
    while True:
        repo_path = os.path.dirname(repo_path)
        if repo_path == '/':
            raise RuntimeError(
                'Could not find .buckconfig in any ancestor of '
                f'{os.path.dirname(os.path.realpath(__file__))}'
            )
        if os.path.exists(os.path.join(repo_path, '.buckconfig')):
            return repo_path
    # Not reached 
Example 4
Project: fs_image   Author: facebookincubator   File: test_dep_graph.py    MIT License 6 votes vote down vote up
def test_gen_dependency_graph(self):
        dg = DependencyGraph(PATH_TO_ITEM.values(), layer_target='t-72')
        self.assertEqual(
            _fs_root_phases(FilesystemRootItem(from_target='t-72')),
            list(dg.ordered_phases()),
        )
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('subvol')
            self.assertIn(
                tuple(dg.gen_dependency_order_items(
                    PhasesProvideItem(from_target='t', subvol=subvol),
                )),
                {
                    tuple(PATH_TO_ITEM[p] for p in paths) for paths in [
                        # A few orders are valid, don't make the test fragile.
                        ['/a/b/c', '/a/b/c/F', '/a/d/e', '/a/d/e/G'],
                        ['/a/b/c', '/a/d/e', '/a/b/c/F', '/a/d/e/G'],
                        ['/a/b/c', '/a/d/e', '/a/d/e/G', '/a/b/c/F'],
                    ]
                },
            ) 
Example 5
Project: fs_image   Author: facebookincubator   File: test_phases_provide.py    MIT License 6 votes vote down vote up
def test_phases_provide(self):
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            parent = temp_subvolumes.create('parent')
            # Permit _populate_temp_filesystem to make writes.
            parent.run_as_root([
                'chown', '--no-dereference', f'{os.geteuid()}:{os.getegid()}',
                parent.path(),
            ])
            populate_temp_filesystem(parent.path().decode())
            for create_meta in [False, True]:
                # Check that we properly handle ignoring a /meta if it's present
                if create_meta:
                    parent.run_as_root(['mkdir', parent.path('meta')])
                self._check_item(
                    PhasesProvideItem(from_target='t', subvol=parent),
                    temp_filesystem_provides() | {
                        ProvidesDirectory(path='/'),
                        ProvidesDoNotAccess(path='/meta'),
                    },
                    set(),
                ) 
Example 6
Project: autolims   Author: scottbecker   File: utils.py    MIT License 6 votes vote down vote up
def initialize_config():
    global TSC_HEADERS, CONFIG_INITIALIZED, ORG_NAME
    
    if CONFIG_INITIALIZED: return
    
    if "--test" in sys.argv:
        auth_file = '../test_mode_auth.json'
    else:
        auth_file = '../auth.json'
    
    auth_file_path = os.path.join(os.path.dirname(__file__), auth_file)
    
    auth_config = json.load(open(auth_file_path))
    TSC_HEADERS = {k:v for k,v in auth_config.items() if k in ["X_User_Email","X_User_Token"]}
    
    ORG_NAME = auth_config['org_name']
    
    CONFIG_INITIALIZED = True

# Correction to Transcriptic-specific dead volumes 
Example 7
Project: mlbv   Author: kmac   File: config.py    GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, defaults, args):
        script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
        self.defaults = defaults
        self.dir = self.__find_config_dir(script_name)
        self.parser = self.__init_configparser(script_name)
        global DEBUG
        DEBUG = self.parser.getboolean('debug', DEBUG) or args.debug
        global VERBOSE
        VERBOSE = self.parser.getboolean('verbose', VERBOSE) or args.verbose
        global VERIFY_SSL
        VERIFY_SSL = self.parser.getboolean('verify_ssl', VERIFY_SSL)
        global UNICODE
        UNICODE = self.parser.getboolean('unicode', UNICODE)
        if DEBUG:
            # Turn on some extras
            global SAVE_PLAYLIST_FILE
            SAVE_PLAYLIST_FILE = True 
Example 8
Project: pyblish-win   Author: pyblish   File: webchecker.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def main():
    checkext = CHECKEXT
    verbose = VERBOSE
    maxpage = MAXPAGE
    roundsize = ROUNDSIZE
    dumpfile = DUMPFILE
    restart = 0
    norun = 0

    try:
        opts, args = getopt.getopt(sys.argv[1:], 'Rd:m:nqr:t:vxa')
    except getopt.error, msg:
        sys.stdout = sys.stderr
        print msg
        print __doc__%globals()
        sys.exit(2)

    # The extra_roots variable collects extra roots. 
Example 9
Project: pyblish-win   Author: pyblish   File: google.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def main():
    args = sys.argv[1:]
    if not args:
        print "Usage: %s querystring" % sys.argv[0]
        return
    list = []
    for arg in args:
        if '+' in arg:
            arg = arg.replace('+', '%2B')
        if ' ' in arg:
            arg = '"%s"' % arg
        arg = arg.replace(' ', '+')
        list.append(arg)
    s = '+'.join(list)
    url = "http://www.google.com/search?q=%s" % s
    webbrowser.open(url) 
Example 10
Project: pyblish-win   Author: pyblish   File: mkreal.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def main():
    sys.stdout = sys.stderr
    progname = os.path.basename(sys.argv[0])
    if progname == '-c': progname = 'mkreal'
    args = sys.argv[1:]
    if not args:
        print 'usage:', progname, 'path ...'
        sys.exit(2)
    status = 0
    for name in args:
        if not os.path.islink(name):
            print progname+':', name+':', 'not a symlink'
            status = 1
        else:
            if os.path.isdir(name):
                mkrealdir(name)
            else:
                mkrealfile(name)
    sys.exit(status) 
Example 11
Project: multi-embedding-cws   Author: wangjksjtu   File: replace_unk.py    MIT License 5 votes vote down vote up
def main(argc, argv):
  if argc < 4:
    print("Usage:%s <vob> <input> <output>" % (argv[0]))
    sys.exit(1)
  vp = open(argv[1], "r")
  inp = open(argv[2], "r")
  oup = open(argv[3], "w")
  vobsMap = {}
  for line in vp:
    line = line.strip()
    ss = line.split(" ")
    vobsMap[ss[0]] = 1
  while True:
    line = inp.readline()
    if not line:
      break
    line = line.strip()
    if not line:
      continue
    ss = line.split(" ")
    tokens = []
    for s in ss:
      if s in vobsMap:
        tokens.append(s)
      else:
        tokens.append("<UNK>")
    oup.write("%s\n" % (" ".join(tokens)))
  oup.close()
  inp.close()
  vp.close() 
Example 12
Project: fs_image   Author: facebookincubator   File: dump_sendstream.py    MIT License 5 votes vote down vote up
def main(argv):
    if len(argv) != 1:
        print(__doc__, file=sys.stderr)
        return 1

    for item in parse_send_stream(sys.stdin.buffer):
        print(item) 
Example 13
Project: fs_image   Author: facebookincubator   File: sendstream_has_loop_device.py    MIT License 5 votes vote down vote up
def main(argv):
    if len(argv) != 1:
        print(__doc__, file=sys.stderr)
        return 1
    for item in parse_send_stream(sys.stdin.buffer):
        if isinstance(item, SendStreamItems.mknod) and (
            os.major(item.dev) == 7 or item.dev == os.makedev(10, 237)
        ):
            # Not printing the path here since it'd be `o123-78-456` or some
            # similarly meaningless temporary emitted by `btrfs send`.
            #
            # To get the path, we would instead apply the send-stream to a
            # `Subvolume`, and use `.inodes()` to look for loops.  The
            # downside of that style of check is that it requires us to
            # process a sequence of send-streams in dependency order (or
            # we'd hit dependency errors), whereas just scanning the
            # send-stream is cheap.
            #
            # There are a couple of other approaches to getting the path:
            #  -  Roll some special logic for resolving what names
            #     send-stream temporaries ultimately map to.  Probably not
            #     worth it.
            #  -  Add the capability to `Subvolume` to apply items even when
            #     the dependency is not there, and instead to record some
            #     kind of placeholder / dependency object in the tree.  The
            #     semantics would take some thought to get right, but the
            #     upside is significant, since we would then be able to
            #     handle filesystem diffs almost as easily as full
            #     filesystems.
            print(os.major(item.dev), os.minor(item.dev))
            return 0
    return 2  # Python would return 1 on raised parse exceptions :) 
Example 14
Project: fs_image   Author: facebookincubator   File: print_gold_demo_sendstreams.py    MIT License 5 votes vote down vote up
def main(argv):
    if len(argv) != 2:
        print(__doc__, file=sys.stderr)
        return 1

    with open(os.path.join(
        os.path.dirname(__file__), 'gold_demo_sendstreams.pickle',
    ), "rb") as infile:
        sys.stdout.buffer.write(pickle.load(infile)[argv[1]]["sendstream"])
    return 0 
Example 15
Project: fs_image   Author: facebookincubator   File: send_fds_and_run.py    MIT License 5 votes vote down vote up
def parse_opts(argv):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        '--fd', type=int, action='append', default=[],
        help='FDs will be provided to the wrapped process with sequential '
            'numbers starting from 3, in the order they were listed on '
            'the command-line. Repeat to pass multiple FDs.',
    )
    parser.add_argument(
        '--sudo', action='store_true',
        help='Wrap `recv-fds-and-run` with `sudo`, effectively emulating the '
            'behavior of `sudo -C`. See `--sudo-arg` if you need to '
            'pass arguments to `sudo`. Without this option, this CLI is a '
            'very elaborate way of remapping the listed FDs and closing all '
            'others.',
    )
    parser.add_argument(
        '--sudo-arg', action='append', default=[],
        help='Pass this argument to `sudo` on the command-line.'
    )
    parser.add_argument(
        '--no-set-listen-fds', action='store_false', dest='set_listen_fds',
        help='Do not set LISTEN_FDS and LISTEN_PID on the wrapped process. By '
            'default we set these just in case this wraps `systemd-nspawn` -- '
            'that tells it to forward our FDS to the container. If the extra '
            'environment variables are a problem for you, pass this option.',
    )
    parser.add_argument(
        'cmd', nargs='+', help='The command to wrap and supply with FDs.',
    )
    opts = parser.parse_args(argv)
    assert not opts.sudo_arg or opts.sudo, '--sudo-arg requires --sudo'
    return opts 
Example 16
Project: fs_image   Author: facebookincubator   File: recv_fds_and_run.py    MIT License 5 votes vote down vote up
def parse_opts(argv):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        '--unix-sock', required=True,
        help='Connect to the unix socket at this path to receive FDs.',
    )
    parser.add_argument(
        '--num-fds', type=int, required=True,
        help='The number of FDs to inject, from 3 through (3 + NUM_FDS - 1).',
    )
    parser.add_argument(
        '--no-set-listen-fds', action='store_false', dest='set_listen_fds',
        help='Do not set LISTEN_FDS and LISTEN_PID on the wrapped process. By '
            'default we set these just in case this wraps `systemd-nspawn` -- '
            'that tells it to forward our FDS to the container. If the extra '
            'environment variables are a problem for you, pass this option.',
    )
    parser.add_argument(
        'cmd', nargs='+', help='The command to wrap and supply with FDs.',
    )
    return parser.parse_args(argv)


# This cannot be tested as a library since it `exec`s and rewrites the file
# descriptor table.  However, `test_send_fds_and_run.py` covers this fully. 
Example 17
Project: fs_image   Author: facebookincubator   File: snapshot_repos.py    MIT License 5 votes vote down vote up
def snapshot_repos_from_args(argv: List[str]):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    add_standard_args(parser)
    parser.add_argument(
        '--dnf-conf', type=Path.from_argparse,
        help='Snapshot this `dnf.conf`, and all the repos that it lists. '
            'Can be set together with `--yum-conf`, in which case repos from '
            'both configs must be identical. At least one of these `--*-conf` '
            'options is required.',
    )
    parser.add_argument(
        '--yum-conf', type=Path.from_argparse,
        help='Snapshot this `yum.conf`; see help for `--dnf-conf`',
    )
    args = parser.parse_args(argv)

    init_logging(debug=args.debug)

    with populate_temp_dir_and_rename(args.snapshot_dir, overwrite=True) as td:
        snapshot_repos(
            dest=td,
            yum_conf_content=args.yum_conf.read_text()
                if args.yum_conf else None,
            dnf_conf_content=args.dnf_conf.read_text()
                if args.dnf_conf else None,
            repo_db_ctx=RepoDBContext(args.db, args.db.SQL_DIALECT),
            storage=args.storage,
            rpm_shard=args.rpm_shard,
            gpg_key_whitelist_dir=args.gpg_key_whitelist_dir,
            retries=args.retries,
        ) 
Example 18
Project: fs_image   Author: facebookincubator   File: cli.py    MIT License 5 votes vote down vote up
def main(argv, from_file: BytesIO, to_file: BytesIO):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    Storage.add_argparse_arg(
        parser, '--storage', required=True,
        help='JSON blob for creating a Storage instance.',
    )
    parser.add_argument('--debug', action='store_true', help='Log more?')
    subparsers = parser.add_subparsers(help='Sub-commands have help.')

    parser_get = subparsers.add_parser('get', help='Download blob to stdout')
    parser_get.add_argument('storage_id', help='String of the form KEY:ID')
    parser_get.set_defaults(to_file=to_file)
    parser_get.set_defaults(func=get)

    parser_put = subparsers.add_parser(
        'put', help='Write a blob from stdin, print its ID to stdout',
    )
    parser_put.set_defaults(from_file=from_file)
    parser_put.set_defaults(to_file=to_file)  # For the storage ID
    parser_put.set_defaults(func=put)

    args = parser.parse_args(argv)
    init_logging(debug=args.debug)

    args.func(args) 
Example 19
Project: fs_image   Author: facebookincubator   File: test_nspawn_in_subvol.py    MIT License 5 votes vote down vote up
def test_bind_repo(self):
        self._nspawn_in('host', [
            '--bind-repo-ro', '--',
            'grep', 'supercalifragilisticexpialidocious',
            os.path.join(
                os.path.realpath(find_repo_root(sys.argv[0])),
                'fs_image/tests',
                os.path.basename(__file__),
            ),
        ]) 
Example 20
Project: fs_image   Author: facebookincubator   File: test_fs_utils.py    MIT License 5 votes vote down vote up
def test_path_from_argparse(self):
        res = subprocess.run([
            sys.executable, '-c', 'import sys;print(repr(sys.argv[1]))',
            _BAD_UTF,
        ], stdout=subprocess.PIPE)
        # Demangle non-UTF bytes in the same way that `sys.argv` mangles them.
        self.assertEqual(_BAD_UTF, Path.from_argparse(
            ast.literal_eval(res.stdout.rstrip(b'\n').decode())
        )) 
Example 21
Project: fs_image   Author: facebookincubator   File: test_package_image.py    MIT License 5 votes vote down vote up
def setUp(self):
        # More output for easier debugging
        unittest.util._MAX_LENGTH = 12345
        self.maxDiff = 12345

        self.subvolumes_dir = subvolumes_dir(sys.argv[0])
        # Works in @mode/opt since the files of interest are baked into the XAR
        self.my_dir = os.path.dirname(__file__) 
Example 22
Project: fs_image   Author: facebookincubator   File: subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def parse_args(argv):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        '--refcounts-dir', required=True,
        help='We will create a hardlink to `--new-subvolume-json` in this '
            'directory. For that reason, this needs to be on same device, '
            'and thus cannot be under `--subvolumes-dir`',
    )
    parser.add_argument(
        '--subvolumes-dir', required=True,
        help='A directory on a btrfs volume, where all the subvolume wrapper '
            'directories reside.',
    )
    parser.add_argument(
        '--new-subvolume-wrapper-dir',
        help='Subvolumes live inside wrapper directories, following the '
            'convention <name>:<version>/<name>. This parameter should '
            'consist just of the <name>:<version> part.',
    )
    parser.add_argument(
        '--new-subvolume-json',
        help='We will delete any file at this path, then create an empty one, '
            'and hard-link into `--refcounts-dir` for refcounting purposes. '
            'The image compiler will then write data into this file.',
    )
    return parser.parse_args(argv) 
Example 23
Project: fs_image   Author: facebookincubator   File: find_built_subvol.py    MIT License 5 votes vote down vote up
def volume_dir(path_in_repo=None):
    if path_in_repo is None:
        # This is the right default for unit tests and other things that get
        # run directly from the repo's `buck-out`.
        path_in_repo = sys.argv[0]
    lots_of_bytes = 1e8  # Our loopback is sparse, so just make it huge.
    return get_volume_for_current_repo(
        lots_of_bytes, ensure_per_repo_artifacts_dir_exists(path_in_repo),
    ) 
Example 24
Project: fs_image   Author: facebookincubator   File: test_dep_graph.py    MIT License 5 votes vote down vote up
def test_item_predecessors(self):
        dg = DependencyGraph(PATH_TO_ITEM.values(), layer_target='t-34')
        self.assertEqual(
            _fs_root_phases(FilesystemRootItem(from_target='t-34')),
            list(dg.ordered_phases()),
        )
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('subvol')
            phases_provide = PhasesProvideItem(from_target='t', subvol=subvol)
            ns = dg._prep_item_predecessors(phases_provide)
        path_to_item = {'/': phases_provide, **PATH_TO_ITEM}
        self.assertEqual(ns.item_to_predecessors, {
            path_to_item[k]: {path_to_item[v] for v in vs} for k, vs in {
                '/a/b/c': {'/'},
                '/a/d/e': {'/a/b/c'},
                '/a/b/c/F': {'/a/b/c'},
                '/a/d/e/G': {'/a/d/e'},
            }.items()
        })
        self.assertEqual(ns.predecessor_to_items, {
            path_to_item[k]: {path_to_item[v] for v in vs} for k, vs in {
                '/': {'/a/b/c'},
                '/a/b/c': {'/a/d/e', '/a/b/c/F'},
                '/a/b/c/F': set(),
                '/a/d/e': {'/a/d/e/G'},
                '/a/d/e/G': set(),
            }.items()
        })
        self.assertEqual(ns.items_without_predecessors, {path_to_item['/']}) 
Example 25
Project: fs_image   Author: facebookincubator   File: test_dep_graph.py    MIT License 5 votes vote down vote up
def test_cycle_detection(self):

        def requires_provides_directory_class(requires_dir, provides_dir):

            class RequiresProvidesDirectory(metaclass=ImageItem):
                def requires(self):
                    yield require_directory(requires_dir)

                def provides(self):
                    yield ProvidesDirectory(path=provides_dir)

            return RequiresProvidesDirectory

        # `dg_ok`: dependency-sorting will work without a cycle
        first = FilesystemRootItem(from_target='')
        second = requires_provides_directory_class('/', 'a')(from_target='')
        third = MakeDirsItem(from_target='', into_dir='a', path_to_make='b/c')
        dg_ok = DependencyGraph([second, first, third], layer_target='t')
        self.assertEqual(_fs_root_phases(first), list(dg_ok.ordered_phases()))

        # `dg_bad`: changes `second` to get a cycle
        dg_bad = DependencyGraph([
            requires_provides_directory_class('a/b', 'a')(from_target=''),
            first, third,
        ], layer_target='t')
        self.assertEqual(_fs_root_phases(first), list(dg_bad.ordered_phases()))

        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('subvol')
            provides_root = PhasesProvideItem(from_target='t', subvol=subvol)
            self.assertEqual(
                [second, third],
                list(dg_ok.gen_dependency_order_items(provides_root)),
            )
            with self.assertRaisesRegex(AssertionError, '^Cycle in '):
                list(dg_bad.gen_dependency_order_items(provides_root)) 
Example 26
Project: fs_image   Author: facebookincubator   File: test_make_dirs.py    MIT License 5 votes vote down vote up
def test_make_dirs_command(self):
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('tar-sv')
            subvol.run_as_root(['mkdir', subvol.path('d')])

            MakeDirsItem(
                from_target='t', path_to_make='/a/b/', into_dir='/d',
                user_group='77:88', mode='u+rx',
            ).build(subvol, DUMMY_LAYER_OPTS)
            self.assertEqual(['(Dir)', {
                'd': ['(Dir)', {
                    'a': ['(Dir m500 o77:88)', {
                        'b': ['(Dir m500 o77:88)', {}],
                    }],
                }],
            }], render_subvol(subvol))

            # The "should never happen" cases -- since we have build-time
            # checks, for simplicity/speed, our runtime clobbers permissions
            # of preexisting directories, and quietly creates non-existent
            # ones with default permissions.
            MakeDirsItem(
                from_target='t', path_to_make='a', into_dir='/no_dir',
                user_group='4:0'
            ).build(subvol, DUMMY_LAYER_OPTS)
            MakeDirsItem(
                from_target='t', path_to_make='a/new', into_dir='/d',
                user_group='5:0'
            ).build(subvol, DUMMY_LAYER_OPTS)
            self.assertEqual(['(Dir)', {
                'd': ['(Dir)', {
                    # permissions overwritten for this whole tree
                    'a': ['(Dir o5:0)', {
                        'b': ['(Dir o5:0)', {}], 'new': ['(Dir o5:0)', {}],
                    }],
                }],
                'no_dir': ['(Dir)', {  # default permissions!
                    'a': ['(Dir o4:0)', {}],
                }],
            }], render_subvol(subvol)) 
Example 27
Project: fs_image   Author: facebookincubator   File: test_rpm_action.py    MIT License 5 votes vote down vote up
def _check_cheese_removal(self, local_rpm_path: Path):
        parent_subvol = self._subvol_from_resource(
            'fs_image.compiler.items', 'test-with-one-local-rpm',
        )
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            # ensure cheese2 is installed in the parent from rpm-test-cheese-2-1
            assert os.path.isfile(
                parent_subvol.path('/usr/share/rpm_test/cheese2.txt')
            )
            subvol = temp_subvolumes.snapshot(parent_subvol, 'remove_cheese')
            RpmActionItem.get_phase_builder(
                [RpmActionItem(
                    from_target='t',
                    source=local_rpm_path,
                    action=RpmAction.remove_if_exists,
                )],
                self._opts()._replace(
                    build_appliance=self._subvol_from_resource(
                        'fs_image.compiler.items', 'host-test-build-appliance',
                    ).path(),
                ),
            )(subvol)
            subvol.run_as_root([
                'rm', '-rf',
                subvol.path('dev'),
                subvol.path('etc'),
                subvol.path('meta'),
                subvol.path('var'),
            ])
            self.assertEqual(['(Dir)', {
                # No more `usr/share/rpm_test/cheese2.txt` here.
            }], render_subvol(subvol)) 
Example 28
Project: fs_image   Author: facebookincubator   File: test_make_subvol.py    MIT License 5 votes vote down vote up
def test_filesystem_root(self):
        item = FilesystemRootItem(from_target='t')
        self.assertEqual(PhaseOrder.MAKE_SUBVOL, item.phase_order())
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.caller_will_create('fs-root')
            item.get_phase_builder([item], DUMMY_LAYER_OPTS)(subvol)
            self.assertEqual(
                ['(Dir)', {'meta': ['(Dir)', {'private': ['(Dir)', {
                    'opts': ['(Dir)', {
                        'artifacts_may_require_repo': ['(File d2)'],
                    }],
                }]}]}], render_subvol(subvol),
            ) 
Example 29
Project: fs_image   Author: facebookincubator   File: test_make_subvol.py    MIT License 5 votes vote down vote up
def test_receive_sendstream(self):
        item = ReceiveSendstreamItem(
            from_target='t',
            source=Path(__file__).dirname() / 'create_ops.sendstream',
        )
        self.assertEqual(PhaseOrder.MAKE_SUBVOL, item.phase_order())
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            new_subvol_name = 'differs_from_create_ops'
            subvol = temp_subvolumes.caller_will_create(new_subvol_name)
            item.get_phase_builder([item], DUMMY_LAYER_OPTS)(subvol)
            self.assertEqual(
                render_demo_subvols(create_ops=new_subvol_name),
                render_sendstream(subvol.mark_readonly_and_get_sendstream()),
            ) 
Example 30
Project: fs_image   Author: facebookincubator   File: test_install_file.py    MIT License 5 votes vote down vote up
def test_install_file_command(self):
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes, \
                tempfile.NamedTemporaryFile() as empty_tf:
            subvol = temp_subvolumes.create('tar-sv')
            subvol.run_as_root(['mkdir', subvol.path('d')])

            _install_file_item(
                from_target='t', source={'source': empty_tf.name},
                dest='/d/empty',
            ).build(subvol, DUMMY_LAYER_OPTS)
            self.assertEqual(
                ['(Dir)', {'d': ['(Dir)', {'empty': ['(File m444)']}]}],
                render_subvol(subvol),
            )

            # Fail to write to a nonexistent dir
            with self.assertRaises(subprocess.CalledProcessError):
                _install_file_item(
                    from_target='t', source={'source': empty_tf.name},
                    dest='/no_dir/empty',
                ).build(subvol, DUMMY_LAYER_OPTS)

            # Running a second copy to the same destination. This just
            # overwrites the previous file, because we have a build-time
            # check for this, and a run-time check would add overhead.
            _install_file_item(
                from_target='t', source={'source': empty_tf.name},
                dest='/d/empty',
                # A non-default mode & owner shows that the file was
                # overwritten, and also exercises HasStatOptions.
                mode='u+rw', user_group='12:34',
            ).build(subvol, DUMMY_LAYER_OPTS)
            self.assertEqual(
                ['(Dir)', {'d': ['(Dir)', {'empty': ['(File m600 o12:34)']}]}],
                render_subvol(subvol),
            ) 
Example 31
Project: odorik   Author: nijel   File: test_main.py    GNU General Public License v3.0 5 votes vote down vote up
def test_argv(self):
        """Test sys.argv processing."""
        backup = sys.argv
        try:
            sys.argv = ['odorik', 'version']
            output = execute(None)
            self.assertIn('version: {0}'.format(odorik.__version__), output)
        finally:
            sys.argv = backup 
Example 32
Project: mutatest   Author: EvanKepner   File: cli.py    MIT License 5 votes vote down vote up
def cli_main() -> None:
    """Entry point to run CLI args and execute main function."""
    # Run a quick check at the beginning in case of later OS errors.
    cache.check_cache_invalidation_mode()
    args = cli_args(sys.argv[1:])
    main(args) 
Example 33
Project: incubator-spot   Author: apache   File: collector.py    Apache License 2.0 5 votes vote down vote up
def _parse_args():
    '''
        Parse command-line options found in 'args' (default: sys.argv[1:]).

    :returns: On success, a namedtuple of Values instances.
    '''
    parser   = ArgumentParser('Distributed Collector Daemon of Apache Spot', epilog='END')
    required = parser.add_argument_group('mandatory arguments')

    # .................................state optional arguments
    parser.add_argument('-c', '--config-file',
        default='ingest_conf.json',
        type=file,
        help='path of configuration file',
        metavar='')

    parser.add_argument('-l', '--log-level',
        default='INFO',
        help='determine the level of the logger',
        metavar='')

    parser.add_argument('--skip-conversion',
        action='store_true',
        default=False,
        help='no transformation will be applied to the data; useful for importing CSV files')

    # .................................state mandatory arguments
    required.add_argument('--topic',
        required=True,
        help='name of topic where the messages will be published')

    required.add_argument('-t', '--type',
        choices=pipelines.__all__,
        required=True,
        help='type of data that will be collected')

    return parser.parse_args() 
Example 34
Project: incubator-spot   Author: apache   File: spot_conf_migration.py    Apache License 2.0 5 votes vote down vote up
def main():

    if len(sys.argv[1:]) < 2:
        print "Please provide paths to: old_spot.conf , new_spot.conf"
        sys.exit(1)
        
    old_conf_file = sys.argv[1]
    new_conf_file = sys.argv[2]

    log = util.get_logger('SPOT.MIGRATE.CONF')

    old_path = os.path.dirname(os.path.realpath(old_conf_file))

    # create backup for the current configuration file.
    log.info("Create a backup of /etc/spot.conf before changing it") 
    util.execute_cmd('sudo cp {0} {1}/spot.conf.bkp_0_9'.format(old_conf_file, old_path),log)

    # create configuration objects.
    old_config = ConfigParser.ConfigParser()
    current_config = ConfigParser.ConfigParser()
    new_config = ConfigParser.ConfigParser()
    
    old_config.readfp(SecHead(open(old_conf_file)))
    current_config.readfp(SecHead(open(new_conf_file)))

    # create the new conf file.
    new_config.add_section('conf')
    for (k,v) in current_config.items("conf"):      
        if old_config.has_option('conf',k):
            new_config.set('conf',k, old_config.get('conf',k))
        else:
            new_config.set('conf',k,v)    
   
    new_path = os.path.dirname(os.path.realpath(new_conf_file)) 
    updated_conf_file = '{0}/spot.conf.new'.format(new_path)
    log.info("Generating merged configuration file in {0}".format(updated_conf_file)) 
    formatter(updated_conf_file,new_config)

    log.info("Updating original spot.conf with new and migrated variables and values") 
    util.execute_cmd('sudo cp {0} {1}/spot.conf'.format(updated_conf_file, old_path),log)
    util.execute_cmd('sudo chmod 0755 {0}/spot.conf'.format(old_path),log) 
Example 35
Project: clikit   Author: sdispater   File: argv_args.py    MIT License 5 votes vote down vote up
def __init__(self, argv=None):  # type: (Optional[List[str]]) -> None
        if argv is None:
            argv = list(sys.argv)

        argv = argv[:]
        self._script_name = argv.pop(0)
        self._tokens = argv
        self._option_tokens = list(
            itertools.takewhile(lambda arg: arg != "--", self.tokens)
        ) 
Example 36
Project: clikit   Author: sdispater   File: test_argv_args.py    MIT License 5 votes vote down vote up
def argv():
    original_argv = sys.argv

    yield

    sys.argv = original_argv 
Example 37
Project: clikit   Author: sdispater   File: test_argv_args.py    MIT License 5 votes vote down vote up
def test_create(argv):
    sys.argv = ("console", "server", "add", "--port", "80", "localhost")
    args = ArgvArgs()

    assert args.script_name == "console"
    assert ["server", "add", "--port", "80", "localhost"] == args.tokens 
Example 38
Project: clikit   Author: sdispater   File: test_argv_args.py    MIT License 5 votes vote down vote up
def test_create_with_custom_tokens(argv):
    sys.argv = ("console", "server", "add", "localhost")
    args = ArgvArgs(["console", "server", "add", "--port", "80", "localhost"])

    assert args.script_name == "console"
    assert ["server", "add", "--port", "80", "localhost"] == args.tokens 
Example 39
Project: payroll   Author: andela-sjames   File: manage.py    MIT License 5 votes vote down vote up
def main():
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'payroll.settings')
    try:
        from django.core.management import execute_from_command_line
    except ImportError as exc:
        raise ImportError(
            "Couldn't import Django. Are you sure it's installed and "
            "available on your PYTHONPATH environment variable? Did you "
            "forget to activate a virtual environment?"
        ) from exc
    execute_from_command_line(sys.argv) 
Example 40
Project: mlbv   Author: kmac   File: util.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, msg='', include_doc=False):
        if msg is None:
            msg = ''
        self.msg = msg
        if include_doc:
            self.msg += '\n' + __doc__ % (sys.argv[0], ) 
Example 41
Project: mlbv   Author: kmac   File: util.py    GNU General Public License v3.0 5 votes vote down vote up
def get_tempdir():
    """Create a directory for ourselves in the system tempdir."""
    tempdir = config.CONFIG.parser.get('tempdir', None)
    if tempdir:
        if '<timestamp>' in tempdir:
            tempdir = tempdir.replace('<timestamp>', time.strftime('%Y-%m-%d-%H%M'))
    else:
        script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
        tempdir = os.path.join(tempfile.gettempdir(), script_name)
    if not os.path.exists(tempdir):
        os.makedirs(tempdir)
    return tempdir 
Example 42
Project: mlbv   Author: kmac   File: config.py    GNU General Public License v3.0 5 votes vote down vote up
def generate_config(username=None, password=None, servicename="MLB.tv"):
        """Creates config file from template + user prompts."""
        script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
        # use the script name minus any extension for the config directory
        config_dir = None
        config_dir = os.path.join(Config.config_dir_roots[1], script_name)
        if not os.path.exists(config_dir):
            print("Creating config directory: {}".format(config_dir))
            os.makedirs(config_dir)
        config_file = os.path.join(config_dir, 'config')
        if os.path.exists(config_file):
            print("Aborting: The config file already exists at '{}'".format(config_file))
            return False

        # copy the template config file
        print("Generating basic config file at: {}".format(config_dir))
        current_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
        template_config_path = os.path.abspath(os.path.join(current_dir, '../../..', 'config.template'))
        if not os.path.exists(template_config_path):
            print("Could not find template config file [expected at: {}]".format(template_config_path))
            return False

        if username is None:
            username = input('Enter {} username: '.format(servicename))
        if password is None:
            password = input('Enter {} password: '.format(servicename))

        with open(template_config_path, 'r') as infile, open(config_file, 'w') as outfile:
            for line in infile:
                if line.startswith('# username='):
                    outfile.write("username={}\n".format(username))
                elif line.startswith('# password='):
                    outfile.write("password={}\n".format(password))
                else:
                    outfile.write(line)
        print("Finished creating config file: {}".format(config_file))
        print("You may want to edit it now to set up favourites, etc.")
        return True 
Example 43
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def args(self):
        """Return command line args as normalised unicode.

        Args are decoded and normalised via :meth:`~Workflow.decode`.

        The encoding and normalisation are the ``input_encoding`` and
        ``normalization`` arguments passed to :class:`Workflow` (``UTF-8``
        and ``NFC`` are the defaults).

        If :class:`Workflow` is called with ``capture_args=True``
        (the default), :class:`Workflow` will look for certain
        ``workflow:*`` args and, if found, perform the corresponding
        actions and exit the workflow.

        See :ref:`Magic arguments <magic-arguments>` for details.

        """
        msg = None
        args = [self.decode(arg) for arg in sys.argv[1:]]

        # Handle magic args
        if len(args) and self._capture_args:
            for name in self.magic_arguments:
                key = '{0}{1}'.format(self.magic_prefix, name)
                if key in args:
                    msg = self.magic_arguments[name]()

            if msg:
                self.logger.debug(msg)
                if not sys.stdout.isatty():  # Show message in Alfred
                    self.add_item(msg, valid=False, icon=ICON_INFO)
                    self.send_feedback()
                sys.exit(0)
        return args 
Example 44
Project: apm-python-agent-principle   Author: mozillazg   File: agent.py    MIT License 5 votes vote down vote up
def main():
    args = sys.argv[1:]
    os.environ['PYTHONPATH'] = boot_dir
    # 执行后面的 python 程序命令
    # sys.executable 是 python 解释器程序的绝对路径 ``which python``
    # >>> sys.executable
    # '/usr/local/var/pyenv/versions/3.5.1/bin/python3.5'
    os.execl(sys.executable, sys.executable, *args) 
Example 45
Project: phrydy   Author: Josef-Friedrich   File: versioneer.py    MIT License 5 votes vote down vote up
def get_root():
    """Get the project root directory.

    We require that all commands are run from the project root, i.e. the
    directory that contains setup.py, setup.cfg, and versioneer.py .
    """
    root = os.path.realpath(os.path.abspath(os.getcwd()))
    setup_py = os.path.join(root, "setup.py")
    versioneer_py = os.path.join(root, "versioneer.py")
    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
        # allow 'python path/to/setup.py COMMAND'
        root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
        setup_py = os.path.join(root, "setup.py")
        versioneer_py = os.path.join(root, "versioneer.py")
    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
        err = ("Versioneer was unable to run the project root directory. "
               "Versioneer requires setup.py to be executed from "
               "its immediate directory (like 'python setup.py COMMAND'), "
               "or in a way that lets it use sys.argv[0] to find the root "
               "(like 'python path/to/setup.py COMMAND').")
        raise VersioneerBadRootError(err)
    try:
        # Certain runtime workflows (setup.py install/develop in a setuptools
        # tree) execute all dependencies in a single python process, so
        # "versioneer" may be imported multiple times, and python's shared
        # module-import table will cache the first one. So we can't use
        # os.path.dirname(__file__), as that will find whichever
        # versioneer.py was first imported, even in later projects.
        me = os.path.realpath(os.path.abspath(__file__))
        me_dir = os.path.normcase(os.path.splitext(me)[0])
        vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
        if me_dir != vsr_dir:
            print("Warning: build in %s is using versioneer.py from %s"
                  % (os.path.dirname(me), versioneer_py))
    except NameError:
        pass
    return root 
Example 46
Project: IS2Proyecto19Grupo7   Author: Gilberto-Martinez   File: manage.py    GNU General Public License v3.0 5 votes vote down vote up
def main():
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'inge2.settings')
    try:
        from django.core.management import execute_from_command_line
    except ImportError as exc:
        raise ImportError(
            "Couldn't import Django. Are you sure it's installed and "
            "available on your PYTHONPATH environment variable? Did you "
            "forget to activate a virtual environment?"
        ) from exc
    execute_from_command_line(sys.argv) 
Example 47
Project: pyblish-win   Author: pyblish   File: wcgui.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    try:
        opts, args = getopt.getopt(sys.argv[1:], 't:m:qva')
    except getopt.error, msg:
        sys.stdout = sys.stderr
        print msg
        print __doc__%vars(webchecker)
        sys.exit(2) 
Example 48
Project: pyblish-win   Author: pyblish   File: tktools.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test():
    """Test make_text_box(), make_form_entry(), flatten(), boolean()."""
    import sys
    root = Tk()
    entry, eframe = make_form_entry(root, 'Boolean:')
    text, tframe = make_text_box(root)
    def enter(event, entry=entry, text=text):
        s = boolean(entry.get()) and '\nyes' or '\nno'
        text.insert('end', s)
    entry.bind('<Return>', enter)
    entry.insert(END, flatten(sys.argv))
    root.mainloop() 
Example 49
Project: pyblish-win   Author: pyblish   File: websucker.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    verbose = webchecker.VERBOSE
    try:
        opts, args = getopt.getopt(sys.argv[1:], "qv")
    except getopt.error, msg:
        print msg
        print "usage:", sys.argv[0], "[-qv] ... [rooturl] ..."
        return 2 
Example 50
Project: pyblish-win   Author: pyblish   File: Main.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    try:
        opts, args = getopt.getopt(
            sys.argv[1:],
            'hd:i:Xv',
            ['database=', 'initfile=', 'ignore', 'help', 'version'])
    except getopt.error, msg:
        usage(1, msg) 
Example 51
Project: pyblish-win   Author: pyblish   File: pindent.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test():
    import getopt
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'cdrs:t:e')
    except getopt.error, msg:
        sys.stderr.write('Error: %s\n' % msg)
        sys.stderr.write(usage)
        sys.exit(2)
    # end try 
Example 52
Project: pyblish-win   Author: pyblish   File: ftpmirror.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    global verbose, interactive, mac, rmok, nologin
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'a:bil:mnp:qrs:v')
    except getopt.error, msg:
        usage(msg) 
Example 53
Project: pyblish-win   Author: pyblish   File: logmerge.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    """Main program"""
    truncate_last = 0
    reverse = 0
    branch = None
    opts, args = getopt.getopt(sys.argv[1:], "trb:h")
    for o, a in opts:
        if o == '-t':
            truncate_last = 1
        elif o == '-r':
            reverse = 1
        elif o == '-b':
            branch = a
        elif o == '-h':
            print __doc__
            sys.exit(0)
    database = []
    while 1:
        chunk = read_chunk(sys.stdin)
        if not chunk:
            break
        records = digest_chunk(chunk, branch)
        if truncate_last:
            del records[-1]
        database[len(database):] = records
    database.sort()
    if not reverse:
        database.reverse()
    format_output(database) 
Example 54
Project: pyblish-win   Author: pyblish   File: treesync.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    global always_no, always_yes
    global create_directories, write_master, write_slave
    opts, args = getopt.getopt(sys.argv[1:], "nym:s:d:f:a:")
    for o, a in opts:
        if o == '-y':
            default_answer = "yes"
        if o == '-n':
            default_answer = "no"
        if o == '-s':
            write_slave = a
        if o == '-m':
            write_master = a
        if o == '-d':
            create_directories = a
        if o == '-f':
            create_files = a
        if o == '-a':
            create_files = create_directories = write_slave = write_master = a
    try:
        [slave, master] = args
    except ValueError:
        print "usage: python", sys.argv[0] or "treesync.py",
        print "[-n] [-y] [-m y|n|a] [-s y|n|a] [-d y|n|a] [-f n|y|a]",
        print "slavedir masterdir"
        return
    process(slave, master) 
Example 55
Project: pyblish-win   Author: pyblish   File: suff.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    files = sys.argv[1:]
    suffixes = {}
    for filename in files:
        suff = getsuffix(filename)
        if not suffixes.has_key(suff):
            suffixes[suff] = []
        suffixes[suff].append(filename)
    keys = suffixes.keys()
    keys.sort()
    for suff in keys:
        print repr(suff), len(suffixes[suff]) 
Example 56
Project: pyblish-win   Author: pyblish   File: fixdiv.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    try:
        opts, args = getopt.getopt(sys.argv[1:], "hm")
    except getopt.error, msg:
        usage(msg)
        return 2 
Example 57
Project: pyblish-win   Author: pyblish   File: fixdiv.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def usage(msg):
    sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
    sys.stderr.write("Usage: %s [-m] warnings\n" % sys.argv[0])
    sys.stderr.write("Try `%s -h' for more information.\n" % sys.argv[0]) 
Example 58
Project: pyblish-win   Author: pyblish   File: lfcr.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    for filename in sys.argv[1:]:
        if os.path.isdir(filename):
            print filename, "Directory!"
            continue
        data = open(filename, "rb").read()
        if '\0' in data:
            print filename, "Binary!"
            continue
        newdata = re.sub("\r?\n", "\r\n", data)
        if newdata != data:
            print filename
            f = open(filename, "wb")
            f.write(newdata)
            f.close() 
Example 59
Project: pyblish-win   Author: pyblish   File: ptags.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    args = sys.argv[1:]
    for filename in args:
        treat_file(filename)
    if tags:
        fp = open('tags', 'w')
        tags.sort()
        for s in tags: fp.write(s) 
Example 60
Project: pyblish-win   Author: pyblish   File: fixheader.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    args = sys.argv[1:]
    for filename in args:
        process(filename) 
Example 61
Project: pyblish-win   Author: pyblish   File: classfix.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    bad = 0
    if not sys.argv[1:]: # No arguments
        err('usage: ' + sys.argv[0] + ' file-or-directory ...\n')
        sys.exit(2)
    for arg in sys.argv[1:]:
        if os.path.isdir(arg):
            if recursedown(arg): bad = 1
        elif os.path.islink(arg):
            err(arg + ': will not process symbolic links\n')
            bad = 1
        else:
            if fix(arg): bad = 1
    sys.exit(bad) 
Example 62
Project: pyblish-win   Author: pyblish   File: fixnotice.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main():
    global DRYRUN, OLD_NOTICE, NEW_NOTICE, VERBOSE
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'hv',
                                   ['help', 'oldnotice=', 'newnotice=',
                                    'dry-run', 'verbose'])
    except getopt.error, msg:
        usage(1, msg) 
Example 63
Project: pyblish-win   Author: pyblish   File: ndiff.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def fcompare(f1name, f2name):
    f1 = fopen(f1name)
    f2 = fopen(f2name)
    if not f1 or not f2:
        return 0

    a = f1.readlines(); f1.close()
    b = f2.readlines(); f2.close()
    for line in difflib.ndiff(a, b):
        print line,

    return 1

# crack args (sys.argv[1:] is normal) & compare;
# return false iff a problem 
Example 64
Project: fs_image   Author: facebookincubator   File: demo_sendstreams.py    MIT License 4 votes vote down vote up
def _make_create_ops_subvolume(subvols: TempSubvolumes, path: bytes) -> Subvol:
    'Exercise all the send-stream ops that can occur on a new subvolume.'
    subvol = subvols.create(path)
    run = subvol.run_as_root

    # `cwd` is intentionally prohibited with `run_as_root`
    def p(sv_path):
        return subvol.path(sv_path).decode()

    # Due to an odd `btrfs send` implementation detail, creating a file or
    # directory emits a rename from a temporary name to the final one.
    run(['mkdir', p('hello')])                      # mkdir,rename
    run(['mkdir', p('dir_to_remove')])
    run(['touch', p('hello/world')])                # mkfile,utimes,chmod,chown
    run([                                           # set_xattr
        'setfattr', '-n', 'user.test_attr', '-v', 'chickens', p('hello/'),
    ])
    run(['mknod', p('buffered'), 'b', '1337', '31415'])  # mknod
    run(['chmod', 'og-r', p('buffered')])           # chmod a device
    run(['mknod', p('unbuffered'), 'c', '1337', '31415'])
    run(['mkfifo', p('fifo')])                      # mkfifo
    run(['python3', '-c', (
        'import os, sys, socket as s\n'
        'dir, base = os.path.split(sys.argv[1])\n'
        # Otherwise, we can easily get "AF_UNIX path too long"
        'os.chdir(os.path.join(".", dir))\n'
        'with s.socket(s.AF_UNIX, s.SOCK_STREAM) as sock:\n'
        '    sock.bind(base)\n'                     # mksock
    ), p('unix_sock')])
    run(['ln', p('hello/world'), p('goodbye')])     # link
    run(['ln', '-s', 'hello/world', p('bye_symlink')])  # symlink
    run([                                           # update_extent
        # 56KB was chosen so that `btrfs send` emits more than 1 write,
        # specifically 48KB + 8KB.
        'dd', 'if=/dev/zero', 'of=' + p('56KB_nuls'), 'bs=1024', 'count=56',
    ])
    run([                                           # clone
        'cp', '--reflink=always', p('56KB_nuls'), p('56KB_nuls_clone'),
    ])

    # Make a file with a 16KB hole in the middle.
    run([
        'dd', 'if=/dev/zero', 'of=' + p('zeros_hole_zeros'),
        'bs=1024', 'count=16',
    ])
    run(['truncate', '-s', str(32 * 1024), p('zeros_hole_zeros')])
    run([
        'dd', 'if=/dev/zero', 'of=' + p('zeros_hole_zeros'),
        'oflag=append', 'conv=notrunc', 'bs=1024', 'count=16',
    ])
    # A trailing hole exercises the `truncate` sendstream command.
    run(['bash', '-c', 'echo hello > ' + shlex.quote(p('hello_big_hole'))])
    run(['truncate', '-s', '1G', p('hello_big_hole')])

    # This just serves to show that `btrfs send` ignores nested subvolumes.
    # There is no mention of `nested_subvol` in the send-stream.
    nested_subvol = subvols.create(p('nested_subvol'))
    nested_subvol.run_as_root(['touch', nested_subvol.path('borf')])
    nested_subvol.run_as_root(['mkdir', nested_subvol.path('beep')])

    return subvol 
Example 65
Project: fs_image   Author: facebookincubator   File: test_parse_dump.py    MIT License 4 votes vote down vote up
def test_ensure_demo_sendstreams_cover_all_operations(self):
        # Ensure we have implemented all the operations from here:
        # https://github.com/kdave/btrfs-progs/blob/master/send-dump.c#L319
        expected_ops = {
            'chmod',
            'chown',
            'clone',
            'link',
            'mkdir',
            'mkfifo',
            'mkfile',
            'mknod',
            'mksock',
            'remove_xattr',
            'rename',
            'rmdir',
            'set_xattr',
            'snapshot',
            'subvol',
            'symlink',
            'truncate',
            'unlink',
            'update_extent',
            'utimes',
            # Omitted since `--dump` never prints data: 'write',
        }
        self.assertEqual(
            {n.decode() for n in NAME_TO_PARSER_TYPE.keys()},
            expected_ops,
        )

        # Now check that `demo_sendstream.py` also exercises those operations.
        stream_dict = make_demo_sendstreams(sys.argv[0])
        out_lines = [
            *stream_dict['create_ops']['dump'],
            *stream_dict['mutate_ops']['dump'],
        ]
        self.assertEqual(expected_ops, {
            l.split(b' ', 1)[0].decode().replace('write', 'update_extent')
                for l in out_lines if l
        })
        items = [
            *_parse_lines_to_list(stream_dict['create_ops']['dump']),
            *_parse_lines_to_list(stream_dict['mutate_ops']['dump']),
        ]
        # We an item per line, and the items cover the expected operations.
        self.assertEqual(len(items), len(out_lines))
        self.assertEqual(
            {getattr(SendStreamItems, op_name) for op_name in expected_ops},
            {i.__class__ for i in items},
        )

    # The reason we want to parse a gold file instead of, as above, running
    # `demo_sendstreams.py` is explained in its top docblock. 
Example 66
Project: fs_image   Author: facebookincubator   File: snapshot_repo.py    MIT License 4 votes vote down vote up
def snapshot_repo(argv):
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    add_standard_args(parser)
    parser.add_argument(
        '--repo-name', required=True,
        help="Used to distinguish this repo's metadata from others' in the DB.",
    )
    parser.add_argument(
        '--repo-url', required=True,
        help='The base URL of the repo -- the part before repodata/repomd.xml. '
            'Supported protocols include file://, https://, and http://.',
    )
    parser.add_argument(
        '--gpg-url', required=True, action='append',
        help='(May be repeated) Yum will need to import this key to gpgcheck '
            'the repo. To avoid placing blind trust in these keys (e.g. in '
            'case this is an HTTP URL), they are verified against '
            '`--gpg-key-whitelist-dir`',
    )
    args = parser.parse_args(argv)

    init_logging(debug=args.debug)

    with populate_temp_dir_and_rename(
        args.snapshot_dir, overwrite=True,
    ) as td, RepoSnapshot.add_sqlite_to_storage(args.storage, td) as db:
        sizer = RepoSizer()
        # This is outside the retry_fn not to mask transient verification
        # failures.  I don't expect many infra failures.
        snapshot_gpg_keys(
            key_urls=args.gpg_url,
            whitelist_dir=args.gpg_key_whitelist_dir,
            snapshot_dir=td,
        )
        retry_fn(
            lambda: RepoDownloader(
                args.repo_name,
                args.repo_url,
                RepoDBContext(args.db, args.db.SQL_DIALECT),
                args.storage,
            ).download(rpm_shard=args.rpm_shard),
            delays=[0] * args.retries,
            what=f'Downloading {args.repo_name} from {args.repo_url} failed',
        ).visit(sizer).to_sqlite(args.repo_name, db)
        log.info(sizer.get_report(f'This {args.rpm_shard} snapshot weighs')) 
Example 67
Project: fs_image   Author: facebookincubator   File: test_package_image.py    MIT License 4 votes vote down vote up
def test_package_image_as_squashfs(self):
        with self._package_image(
            self._sibling_path('create_ops.layer'), 'squashfs',
        ) as out_path, TempSubvolumes(sys.argv[0]) as temp_subvolumes, \
                tempfile.NamedTemporaryFile() as temp_sendstream:
            subvol = temp_subvolumes.create('subvol')
            with Unshare([Namespace.MOUNT, Namespace.PID]) as unshare, \
                    tempfile.TemporaryDirectory() as mount_dir:
                subprocess.check_call(nsenter_as_root(
                    unshare, 'mount', '-t', 'squashfs', '-o', 'loop',
                    out_path, mount_dir,
                ))
                # `unsquashfs` would have been cleaner than `mount` +
                # `rsync`, and faster too, but unfortunately it corrupts
                # device nodes as of v4.3.
                subprocess.check_call(nsenter_as_root(
                    unshare, 'rsync', '--archive', '--hard-links',
                    '--sparse', '--xattrs', mount_dir + '/', subvol.path(),
                ))
            with subvol.mark_readonly_and_write_sendstream_to_file(
                temp_sendstream
            ):
                pass
            original_render = _render_sendstream_path(
                self._sibling_path('create_ops-original.sendstream'),
            )
            # SquashFS does not preserve the original's cloned extents of
            # zeros, nor the zero-hole-zero patter.  In all cases, it
            # (efficiently) transmutes the whole file into 1 sparse hole.
            self.assertEqual(original_render[1].pop('56KB_nuls'), [
                '(File d57344(create_ops@56KB_nuls_clone:0+49152@0/' +
                'create_ops@56KB_nuls_clone:49152+8192@49152))'
            ])
            original_render[1]['56KB_nuls'] = ['(File h57344)']
            self.assertEqual(original_render[1].pop('56KB_nuls_clone'), [
                '(File d57344(create_ops@56KB_nuls:0+49152@0/' +
                'create_ops@56KB_nuls:49152+8192@49152))'
            ])
            original_render[1]['56KB_nuls_clone'] = ['(File h57344)']
            self.assertEqual(original_render[1].pop('zeros_hole_zeros'), [
                '(File d16384h16384d16384)'
            ])
            original_render[1]['zeros_hole_zeros'] = ['(File h49152)']
            self.assertEqual(
                original_render, _render_sendstream_path(temp_sendstream.name),
            ) 
Example 68
Project: fs_image   Author: facebookincubator   File: test_mount.py    MIT License 4 votes vote down vote up
def test_mount_item_file_from_host(self):
        mount_config = {
            'is_directory': False,
            'build_source': {'type': 'host', 'source': '/dev/null'},
        }

        def _mount_item(from_target):
            return MountItem(
                from_target=from_target,
                mountpoint='/lala',
                target=None,
                mount_config=mount_config,
            )

        with self.assertRaisesRegex(AssertionError, 'must be located under'):
            _mount_item('t')

        mount_item = _mount_item('//fs_image/features/host_mounts:t')

        bad_mount_config = mount_config.copy()
        bad_mount_config['runtime_source'] = bad_mount_config['build_source']
        with self.assertRaisesRegex(AssertionError, 'Only `build_source` may '):
            MountItem(
                from_target='//fs_image/features/host_mounts:t',
                mountpoint='/lala',
                target=None,
                mount_config=bad_mount_config,
            )

        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('mounter')
            mount_item.build(subvol, DUMMY_LAYER_OPTS._replace(
                target_to_path={},
                subvolumes_dir='unused',
            ))

            self.assertEqual(['(Dir)', {
                'lala': ['(File)'],  # An empty mountpoint for /dev/null
                'meta': ['(Dir)', {'private': ['(Dir)', {
                    # No `opts/artifacts_may_require_repo` here because we
                    # directly created the subvol instead of using an Item.
                    'mount': ['(Dir)', {'lala': ['(Dir)', {'MOUNT': ['(Dir)', {
                        'is_directory': ['(File d2)'],
                        'build_source': ['(Dir)', {
                            'type': ['(File d5)'],
                            'source': [f'(File d{len("/dev/null") + 1})'],
                        }],
                    }]}]}],
                }]}],
            }], render_subvol(subvol))
            for filename, contents in (
                ('is_directory', '0\n'),
                ('build_source/type', 'host\n'),
                ('build_source/source', '/dev/null\n'),
            ):
                with open(subvol.path(os.path.join(
                    'meta/private/mount/lala/MOUNT', filename,
                ))) as f:
                    self.assertEqual(contents, f.read()) 
Example 69
Project: fs_image   Author: facebookincubator   File: test_rpm_action.py    MIT License 4 votes vote down vote up
def test_rpm_action_item_auto_downgrade(self):
        parent_subvol = self._subvol_from_resource(
            'fs_image.compiler.items', 'test-with-one-local-rpm',
        )
        src_rpm = Path(__file__).dirname() / "rpm-test-cheese-1-1.rpm"

        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            # ensure cheese2 is installed in the parent from rpm-test-cheese-2-1
            assert os.path.isfile(
                parent_subvol.path('/usr/share/rpm_test/cheese2.txt')
            )
            # make sure the RPM we are installing is older in order to
            # trigger the downgrade
            src_data = RpmMetadata.from_file(src_rpm)
            subvol_data = RpmMetadata.from_subvol(parent_subvol, src_data.name)
            assert compare_rpm_versions(src_data, subvol_data) < 0

            subvol = temp_subvolumes.snapshot(parent_subvol, 'rpm_action')
            RpmActionItem.get_phase_builder(
                [RpmActionItem(
                    from_target='t',
                    source=src_rpm,
                    action=RpmAction.install,
                )],
                self._opts()._replace(
                    build_appliance=self._subvol_from_resource(
                        'fs_image.compiler.items', 'host-test-build-appliance',
                    ).path(),
                ),
            )(subvol)
            subvol.run_as_root([
                'rm', '-rf',
                subvol.path('dev'),
                subvol.path('etc'),
                subvol.path('meta'),
                subvol.path('var'),
            ])
            self.assertEqual(['(Dir)', {
                'usr': ['(Dir)', {
                    'share': ['(Dir)', {
                        'rpm_test': ['(Dir)', {
                            'cheese1.txt': ['(File d36)'],
                        }],
                    }],
                }],
            }], render_subvol(subvol)) 
Example 70
Project: fs_image   Author: facebookincubator   File: test_install_file.py    MIT License 4 votes vote down vote up
def test_install_file_command_recursive(self):
        with TempSubvolumes(sys.argv[0]) as temp_subvolumes:
            subvol = temp_subvolumes.create('tar-sv')
            subvol.run_as_root(['mkdir', subvol.path('d')])

            with temp_dir() as td:
                with open(td / 'data.txt', 'w') as df:
                    print('Hello', file=df)
                os.mkdir(td / 'subdir')
                with open(td / 'subdir/exe.sh', 'w') as ef:
                    print('#!/bin/sh\necho "Hello"', file=ef)
                os.chmod(td / 'subdir/exe.sh', 0o100)

                dir_item = _install_file_item(
                    from_target='t', source={'source': td}, dest='/d/a',
                )

                ps = [
                    _InstallablePath(
                        td,
                        ProvidesDirectory(path='d/a'),
                        'u+rwx,og+rx',
                    ),
                    _InstallablePath(
                        td / 'data.txt',
                        ProvidesFile(path='d/a/data.txt'),
                        'a+r',
                    ),
                    _InstallablePath(
                        td / 'subdir',
                        ProvidesDirectory(path='d/a/subdir'),
                        'u+rwx,og+rx',
                    ),
                    _InstallablePath(
                        td / 'subdir/exe.sh',
                        ProvidesFile(path='d/a/subdir/exe.sh'),
                        'a+rx',
                    ),
                ]
                self.assertEqual(sorted(ps), sorted(dir_item.paths))
                self.assertEqual(td, dir_item.source)
                self._check_item(
                    dir_item, {p.provides for p in ps}, {require_directory('d')}
                )

                # This implicitly checks that `a` precedes its contents.
                dir_item.build(subvol, DUMMY_LAYER_OPTS)

            self.assertEqual(
                ['(Dir)', {'d': ['(Dir)', {'a': ['(Dir)', {
                    'data.txt': ['(File m444 d6)'],
                    'subdir': ['(Dir)', {
                        'exe.sh': ['(File m555 d23)'],
                    }],
                }]}]}],
                render_subvol(subvol),
            ) 
Example 71
Project: odorik   Author: nijel   File: main.py    GNU General Public License v3.0 4 votes vote down vote up
def main(settings=None, stdout=None, args=None):
    """Execution entry point."""
    parser = get_parser()
    if args is None:
        args = sys.argv[1:]
    args = parser.parse_args(args)

    config = OdorikConfig(args.config_section)
    if settings is None:
        config.load(args.config)
    else:
        for section, key, value in settings:
            config.set(section, key, value)

    for override in ('user', 'password', 'url'):
        value = getattr(args, override)
        if value is not None:
            config.set(args.config_section, override, value)

    command = COMMANDS[args.cmd](args, config, stdout)
    try:
        command.run()
    except (CommandError, odorik.OdorikException) as error:
        print('Error: {0}'.format(error), file=sys.stderr)
        sys.exit(1) 
Example 72
Project: incubator-spot   Author: apache   File: start_listener.py    Apache License 2.0 4 votes vote down vote up
def parse_args():
    '''
        Parse command-line options found in 'args' (default: sys.argv[1:]).

    :returns: On success, a namedtuple of Values instances.
    '''
    parser   = ArgumentParser('Start Spark Job for Streaming Listener Daemon', epilog='END')
    required = parser.add_argument_group('mandatory arguments')

    # .................................state optional arguments
    parser.add_argument('-c', '--config-file',
        default='ingest_conf.json',
        type=file,
        help='path of configuration file',
        metavar='')

    parser.add_argument('-d', '--deploy-mode',
        default='client',
        help='Whether to launch the driver program locally ("client") or on one of the '
            'worker machines inside the cluster ("cluster")',
        metavar='')

    parser.add_argument('-g', '--group-id',
        help='name of the consumer group to join for dynamic partition assignment',
        metavar='')

    parser.add_argument('-l', '--log-level',
        default='INFO',
        help='determine the level of the logger',
        metavar='')

    parser.add_argument('-m', '--master',
        default='yarn',
        help='spark://host:port, mesos://host:port, yarn, or local',
        metavar='')

    parser.add_argument('-n', '--app-name',
        help='name of the Spark Job to display on the cluster web UI',
        metavar='')

    parser.add_argument('-r', '--redirect-spark-logs',
        help='redirect output of spark to specific file',
        metavar='')

    # .................................state mandatory arguments
    required.add_argument('-p', '--partitions',
        required=True,
        help='number of partitions to consume; each partition is consumed in its own thread')

    required.add_argument('-t', '--type',
        choices=pipelines.__all__,
        required=True,
        help='type of the data that will be ingested')

    required.add_argument('--topic',
        required=True,
        help='topic to listen for new messages')

    return parser.parse_args() 
Example 73
Project: incubator-spot   Author: apache   File: listener.py    Apache License 2.0 4 votes vote down vote up
def parse_args():
    '''
        Parse command-line options found in 'args' (default: sys.argv[1:]).

    :returns: On success, a namedtuple of Values instances.
    '''
    parser   = ArgumentParser('Streaming Listener Daemon of Spot Ingest Framework', epilog='END')
    required = parser.add_argument_group('mandatory arguments')

    # .................................state optional arguments
    parser.add_argument('-b', '--batch-duration',
        default=30,
        type=int,
        help='time interval (in seconds) at which streaming data will be divided into batches',
        metavar='')

    parser.add_argument('-g', '--group-id',
        help='name of the consumer group to join for dynamic partition assignment',
        metavar='')

    parser.add_argument('-l', '--log-level',
        default='INFO',
        help='determine the level of the logger',
        metavar='')

    parser.add_argument('-n', '--app-name',
        help='name of the Spark Job to display on the cluster web UI',
        metavar='')

    # .................................state mandatory arguments
    required.add_argument('-d', '--database',
        required=True,
        help='name of the database in Hive, where the data will be stored')

    required.add_argument('-p', '--partitions',
        required=True,
        help='number of partitions to consume; each partition is consumed in its own thread')

    required.add_argument('-t', '--type',
        required=True,
        help='type of the data that will be ingested')

    required.add_argument('--topic',
        required=True,
        help='topic to listen for new messages')

    required.add_argument('-z', '--zkquorum',
        required=True,
        help='the connection string for the zookeeper in the form \'host[:port]\'',
        metavar='')

    return parser.parse_args() 
Example 74
Project: model-api-sequence   Author: evandowning   File: attack-config.py    GNU General Public License v3.0 4 votes vote down vote up
def _main():
    if len(sys.argv) != 4:
        usage()

    original = sys.argv[1]
    attack = sys.argv[2]
    output = sys.argv[3]

    # Read contents of files
    seq_original = read_seq(original)
    seq_attack = read_seq(attack)

    pc,a = seq_original.next().split(' ')
    b = next(seq_attack)

    # Dictionary to hold calls to insert
    shells = dict()

    # Don't insert multiple API calls after a unique PC
    pc_set = set()

    # Determine what needs to be inserted where
    while True:
        try:
            # Find the next mismatch
            while (a == b):
                pc,a = seq_original.next().split(' ')
                b = next(seq_attack)

            # Find the next match
            while (a != b):
                b = b.lower()
                if b not in shells:
                    shells[b] = dict()

                # Only add API call if it's never been added before this PC before
                if pc not in pc_set:
                    shells[b][pc] = a
                    pc_set.add(pc)

                b = next(seq_attack)

        except StopIteration as e:
            break

    # Create shellcode config file
    with open(output,'w') as fw:
        for k,v in shells.items():
            fw.write('[shellcode_{0}]\n'.format(k))
            fw.write('target_addr = (\n')

            for k2,v2 in v.items():
                fw.write('    # {0}\n'.format(v2))
                fw.write('    {0},\n'.format(hex(int(k2))))

            fw.write('              )\n\n') 
Example 75
Project: dcos-proxy   Author: dparrish   File: build-config.py    Apache License 2.0 4 votes vote down vote up
def main(argv):
    try:
        old_config = None
        while True:
            params = {
                'vhosts': {},
            }

            s = requests.Session()
            apps = json.loads(s.get('http://master.mesos:8080/v2/apps').text)
            for app in apps['apps']:
                try:
                    vhost = app['labels']['VIRTUAL_HOST']
                except KeyError:
                    continue
                tasks = json.loads(s.get('http://master.mesos:8080/v2/apps%s/tasks' % app['id'],
                                         headers={'Accept': 'application/json'}).text)
                backends = []
                for task in tasks['tasks']:
                    try:
                        ip = socket.gethostbyname(task['host'])
                    except socket.gaierror:
                        print "Can't look up host %s" % task['host']
                        continue
                    backends.append('%s:%s' % (ip, task['ports'][0]))
                if backends:
                    params['vhosts'][vhost] = {
                        'backends': backends,
                    }

            template = Template(TEMPLATE)
            new_config = template.render(params)
            if new_config != old_config:
                with file('/etc/nginx/sites-available/default', 'w') as fh:
                    fh.write(new_config)
                test = subprocess.Popen(['/usr/sbin/nginx', '-t'], stderr=subprocess.PIPE)
                output = test.communicate()
                if test.returncode != 0:
                    if old_config:
                        print 'Error generating new NGINX configuration, not reloading'
                        return
                    else:
                        raise RuntimeError('Error generating NGINX configuration')
                subprocess.call(['/usr/sbin/nginx', '-s', 'reload'])
                old_config = new_config
            time.sleep(10)
    except KeyboardInterrupt:
        return 1 
Example 76
Project: client   Author: Scorched-Moon   File: scorched_moon_client.py    GNU General Public License v3.0 4 votes vote down vote up
def main():

    logready = False
    loglevel = 0
    debug = False
    skip = False
    check = 0

    for argument in sys.argv:
        if argument == "--no-intro":
            skip = True
        elif argument == "--debug" or argument == "-d":
            debug = True
        elif argument == "--help" or argument == "-h":
            usage()
        elif argument == "--skip" or argument == "-s":
            skip = True
        elif argument == "--log" or argument == "-l":
            logready = True
        elif argument == "1":
            if logready == True: #don't combine if statements to make it easier to add further 
                logready = False
                loglevel = 1
            else:
                print("Unknown argument: {}" .format(argument))
                usage()
        elif argument == "2":
            if logready == True: #don't combine if statements to make it easier to add further 
                logready = False
                loglevel = 2
            else:
                print("Unknown argument: {}" .format(argument))
                usage()
        elif argument == "3":
            if logready == True: #don't combine if statements to make it easier to add further 
                logready = False
                loglevel = 3
            else:
                print("Unknown argument: {}" .format(argument))
                usage()
        elif argument == "4":
            if logready == True: #don't combine if statements to make it easier to add further 
                logready = False
                loglevel = 4
            else:
                print("Unknown argument: {}" .format(argument))
                usage()
        elif argument == "--create" or argument == "-c":
            makesettings = True
        elif check > 0:
            print("Unknown argument: {}" .format(argument))
            usage()
        elif check > 0:
            print("Unknown argument: {}" .format(argument))
            usage()
        check += 1

    import client.main
    client = client.main.Main(debug, loglevel, skip) 
Example 77
Project: pyblish-win   Author: pyblish   File: byteyears.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():

    # Use lstat() to stat files if it exists, else stat()
    try:
        statfunc = os.lstat
    except AttributeError:
        statfunc = os.stat

    # Parse options
    if sys.argv[1] == '-m':
        itime = ST_MTIME
        del sys.argv[1]
    elif sys.argv[1] == '-c':
        itime = ST_CTIME
        del sys.argv[1]
    elif sys.argv[1] == '-a':
        itime = ST_CTIME
        del sys.argv[1]
    else:
        itime = ST_MTIME

    secs_per_year = 365.0 * 24.0 * 3600.0   # Scale factor
    now = time.time()                       # Current time, for age computations
    status = 0                              # Exit status, set to 1 on errors

    # Compute max file name length
    maxlen = 1
    for filename in sys.argv[1:]:
        maxlen = max(maxlen, len(filename))

    # Process each argument in turn
    for filename in sys.argv[1:]:
        try:
            st = statfunc(filename)
        except os.error, msg:
            sys.stderr.write("can't stat %r: %r\n" % (filename, msg))
            status = 1
            st = ()
        if st:
            anytime = st[itime]
            size = st[ST_SIZE]
            age = now - anytime
            byteyears = float(size) * float(age) / secs_per_year
            print filename.ljust(maxlen),
            print repr(int(byteyears)).rjust(8) 
Example 78
Project: pyblish-win   Author: pyblish   File: objgraph.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():
    try:
        optlist, args = getopt.getopt(sys.argv[1:], 'cdu')
    except getopt.error:
        sys.stdout = sys.stderr
        print 'Usage:', os.path.basename(sys.argv[0]),
        print           '[-cdu] [file] ...'
        print '-c: print callers per objectfile'
        print '-d: print callees per objectfile'
        print '-u: print usage of undefined symbols'
        print 'If none of -cdu is specified, all are assumed.'
        print 'Use "nm -o" to generate the input (on IRIX: "nm -Bo"),'
        print 'e.g.: nm -o /lib/libc.a | objgraph'
        return 1
    optu = optc = optd = 0
    for opt, void in optlist:
        if opt == '-u':
            optu = 1
        elif opt == '-c':
            optc = 1
        elif opt == '-d':
            optd = 1
    if optu == optc == optd == 0:
        optu = optc = optd = 1
    if not args:
        args = ['-']
    for filename in args:
        if filename == '-':
            readinput(sys.stdin)
        else:
            readinput(open(filename, 'r'))
    #
    warndups()
    #
    more = (optu + optc + optd > 1)
    if optd:
        if more:
            print '---------------All callees------------------'
        printcallee()
    if optu:
        if more:
            print '---------------Undefined callees------------'
        printundef()
    if optc:
        if more:
            print '---------------All Callers------------------'
        printcaller()
    return 0

# Call the main program.
# Use its return value as exit status.
# Catch interrupts to avoid stack trace.
# 
Example 79
Project: pyblish-win   Author: pyblish   File: which.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():
    pathlist = os.environ['PATH'].split(os.pathsep)

    sts = 0
    longlist = ''

    if sys.argv[1:] and sys.argv[1][:2] == '-l':
        longlist = sys.argv[1]
        del sys.argv[1]

    for prog in sys.argv[1:]:
        ident = ()
        for dir in pathlist:
            filename = os.path.join(dir, prog)
            try:
                st = os.stat(filename)
            except os.error:
                continue
            if not S_ISREG(st[ST_MODE]):
                msg(filename + ': not a disk file')
            else:
                mode = S_IMODE(st[ST_MODE])
                if mode & 0111:
                    if not ident:
                        print filename
                        ident = st[:3]
                    else:
                        if st[:3] == ident:
                            s = 'same as: '
                        else:
                            s = 'also: '
                        msg(s + filename)
                else:
                    msg(filename + ': not executable')
            if longlist:
                sts = os.system('ls ' + longlist + ' ' + filename)
                if sts: msg('"ls -l" exit status: ' + repr(sts))
        if not ident:
            msg(prog + ': not found')
            sts = 1

    sys.exit(sts) 
Example 80
Project: pyblish-win   Author: pyblish   File: checkpyc.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():
    silent = 0
    verbose = 0
    if sys.argv[1:]:
        if sys.argv[1] == '-v':
            verbose = 1
        elif sys.argv[1] == '-s':
            silent = 1
    MAGIC = imp.get_magic()
    if not silent:
        print 'Using MAGIC word', repr(MAGIC)
    for dirname in sys.path:
        try:
            names = os.listdir(dirname)
        except os.error:
            print 'Cannot list directory', repr(dirname)
            continue
        if not silent:
            print 'Checking ', repr(dirname), '...'
        names.sort()
        for name in names:
            if name[-3:] == '.py':
                name = os.path.join(dirname, name)
                try:
                    st = os.stat(name)
                except os.error:
                    print 'Cannot stat', repr(name)
                    continue
                if verbose:
                    print 'Check', repr(name), '...'
                name_c = name + 'c'
                try:
                    f = open(name_c, 'r')
                except IOError:
                    print 'Cannot open', repr(name_c)
                    continue
                magic_str = f.read(4)
                mtime_str = f.read(4)
                f.close()
                if magic_str <> MAGIC:
                    print 'Bad MAGIC word in ".pyc" file',
                    print repr(name_c)
                    continue
                mtime = get_long(mtime_str)
                if mtime == 0 or mtime == -1:
                    print 'Bad ".pyc" file', repr(name_c)
                elif mtime <> st[ST_MTIME]:
                    print 'Out-of-date ".pyc" file',
                    print repr(name_c)