Python daemon.DaemonContext() Examples

The following are 23 code examples of daemon.DaemonContext(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module daemon , or try the search function .
Example #1
Source File: cli.py    From ATM with MIT License 8 votes vote down vote up
def _start(args):
    """Create a new process of ATM pointing the process to a certain ``pid`` file."""
    pid_path = _get_pid_path(args.pid)
    process = _get_atm_process(pid_path)

    if process:
        print('ATM is already running!')

    else:
        print('Starting ATM')

        if args.foreground:
            _start_background(args)

        else:
            pidfile = PIDLockFile(pid_path, timeout=1.0)

            with DaemonContext(pidfile=pidfile, working_directory=os.getcwd()):
                # Set up default log file if not already set
                if not args.logfile:
                    _logging_setup(args.verbose, 'atm.log')

                _start_background(args) 
Example #2
Source File: queue_monitor.py    From EnergyPATHWAYS with MIT License 8 votes vote down vote up
def start_queue_monitor(poll_frequency, max_workers, user, group, directory):
    pw = pwd.getpwnam(user)
    gid = pw.pw_gid if group is None else grp.getgrnam(group).gr_gid
    # This will capture stderr from this process as well as all child
    # energyPATHWAYS processes. Normally it will be empty, but it can
    # help capture model startup problems that would otherwise be hard to see.
    err = open('/var/log/queue_monitor/qm_stderr_%s.log' % start_time, 'w+')

    with daemon.DaemonContext(
        files_preserve=[logging.root.handlers[0].stream.fileno()],
        pidfile=daemon.pidfile.PIDLockFile('/var/run/queue_monitor/queue_monitor.pid'),
        uid=pw.pw_uid,
        gid=gid,
        working_directory=directory,
        stderr=err
    ):
        logger.info('My process id is %i' % os.getpid())
        qm = QueueMonitor(poll_frequency, max_workers)
        qm.start() 
Example #3
Source File: executor.py    From funcX with Apache License 2.0 7 votes vote down vote up
def executor_starter(htex, logdir, endpoint_id, logging_level=logging.DEBUG):

    from funcx import set_file_logger

    stdout = open(os.path.join(logdir, "executor.{}.stdout".format(endpoint_id)), 'w')
    stderr = open(os.path.join(logdir, "executor.{}.stderr".format(endpoint_id)), 'w')

    logdir = os.path.abspath(logdir)
    with daemon.DaemonContext(stdout=stdout, stderr=stderr):
        global logger
        print("cwd: ", os.getcwd())
        logger = set_file_logger(os.path.join(logdir, "executor.{}.log".format(endpoint_id)),
                                 level=logging_level)
        htex.start()

    stdout.close()
    stderr.close() 
Example #4
Source File: daemon.py    From vaping with Apache License 2.0 6 votes vote down vote up
def _exec(self, detach=True):
        """
        daemonize and exec main()
        """
        kwargs = {
            'pidfile': self.pidfile,
            'working_directory': self.home_dir,
            }

        # FIXME - doesn't work
        if not detach:
            kwargs.update({
                'detach_process': False,
                'files_preserve': [0,1,2],
                'stdout': sys.stdout,
                'stderr': sys.stderr,
                })

        ctx = daemon.DaemonContext(**kwargs)

        with ctx:
            self._main() 
Example #5
Source File: kerberos_command.py    From airflow with Apache License 2.0 6 votes vote down vote up
def kerberos(args):
    """Start a kerberos ticket renewer"""
    print(settings.HEADER)

    if args.daemon:
        pid, stdout, stderr, _ = setup_locations(
            "kerberos", args.pid, args.stdout, args.stderr, args.log_file
        )
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            krb.run(principal=args.principal, keytab=args.keytab)

        stdout.close()
        stderr.close()
    else:
        krb.run(principal=args.principal, keytab=args.keytab) 
Example #6
Source File: rundaemon.py    From fiscalberry with Apache License 2.0 6 votes vote down vote up
def start_daemon(pidf, logf):
    ### This launches the daemon in its context
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.INFO)
    handler = logging.FileHandler('/var/log/fiscalberry_daemon.log')
    logger.addHandler(handler)



    rootpath = os.path.dirname(os.path.abspath(__file__))
    ### XXX pidfile is a context
    with daemon.DaemonContext(
        stdout=handler.stream,
        stderr=handler.stream,
        working_directory=rootpath,
        umask=0o002,
        pidfile=pidfile.TimeoutPIDLockFile(pidf),
        files_preserve=[handler.stream]
        ) as context:
        do_something() 
Example #7
Source File: daemon.py    From ceph-lcm with Apache License 2.0 5 votes vote down vote up
def main_daemon(options):
    pidfile = lockfile.pidlockfile.PIDLockFile(options.pid_file)

    context = daemon.DaemonContext(pidfile=pidfile)
    context.signal_map = {
        signal.SIGTERM: mainloop.shutdown_callback,
        signal.SIGINT: mainloop.shutdown_callback
    }

    with context:
        return main_script(options, False) 
Example #8
Source File: core.py    From python-rtmbot with MIT License 5 votes vote down vote up
def start(self):
        if 'DAEMON' in self.config:
            if self.config.get('DAEMON'):
                import daemon
                with daemon.DaemonContext():
                    self._start()
        self._start() 
Example #9
Source File: kytosd.py    From kytos with MIT License 5 votes vote down vote up
def main():
    """Read config and start Kytos in foreground or daemon mode."""
    # data_files is not enough when installing from PyPI

    _create_pid_dir()

    config = KytosConfig().options['daemon']

    if config.foreground:
        async_main(config)
    else:
        with daemon.DaemonContext():
            async_main(config) 
Example #10
Source File: helpers.py    From touchandgo with GNU General Public License v3.0 5 votes vote down vote up
def daemonize(args, callback):
    with DaemonContext():
        from touchandgo.logger import log_set_up
        log_set_up(True)
        log = logging.getLogger('touchandgo.daemon')
        try:
            log.info("running daemon")
            create_process = False
            pid = os.getpid()
            log.debug("%s, %s, %s", LOCK_FILE, pid, args)
            lock = Lock(LOCK_FILE, pid, args.name, args.season_number,
                        args.episode_number, args.port)
            if lock.is_locked():
                log.debug("lock active")
                lock_pid = lock.get_pid()
                is_same = lock.is_same_file(args.name, args.season_number,
                                            args.episode_number)
                if (not is_same or not is_process_running(lock_pid)):
                    try:
                        log.debug("killing process %s" % lock_pid)
                        os.kill(lock_pid, signal.SIGQUIT)
                    except OSError:
                        pass
                    except TypeError:
                        pass
                    lock.break_lock()
                    create_process = True
            else:
                log.debug("Will create process")
                create_process = True

            if create_process:
                log.debug("creating proccess")
                lock.acquire()
                callback()
                lock.release()
            else:
                log.debug("same daemon process")
        except Exception as e:
            log.error(e) 
Example #11
Source File: server.py    From compoundpi with GNU General Public License v2.0 5 votes vote down vote up
def main(self, args):
        warnings.showwarning = self.showwarning
        warnings.filterwarnings('ignore', category=CompoundPiStaleSequence)
        warnings.filterwarnings('ignore', category=CompoundPiStaleClientTime)
        if args.debug:
            # Don't bother with daemon context in debug mode; we generally
            # want to debug protocol stuff anyway...
            signal.signal(signal.SIGINT, self.interrupt)
            signal.signal(signal.SIGTERM, self.terminate)
            self.privileged_setup(args)
            self.serve_forever()
        else:
            pidfile = daemon.runner.make_pidlockfile(args.pidfile, 5)
            if daemon.runner.is_pidfile_stale(pidfile):
                pidfile.break_lock()
            self.privileged_setup(args)
            # Ensure the server's socket, any log file, and stderr are preserved
            # (if not forking)
            files_preserve = [self.server.socket]
            for handler in logging.getLogger().handlers:
                if isinstance(handler, logging.FileHandler):
                    files_preserve.append(handler.stream)
            logging.info('Entering daemon context')
            with daemon.DaemonContext(
                    # The following odd construct is to ensure detachment only
                    # where sensible (see default setting of detach_process)
                    detach_process=None if args.daemon else False,
                    stderr=None if args.daemon else sys.stderr,
                    uid=args.user, gid=args.group,
                    files_preserve=files_preserve,
                    pidfile=pidfile,
                    signal_map={
                        signal.SIGTERM: self.terminate,
                        signal.SIGINT:  self.interrupt,
                        }
                    ):
                self.serve_forever()
            logging.info('Exiting daemon context') 
Example #12
Source File: runner.py    From luscan-devel with GNU General Public License v2.0 5 votes vote down vote up
def __init__(self, app):
        """ Set up the parameters of a new runner.

            The `app` argument must have the following attributes:

            * `stdin_path`, `stdout_path`, `stderr_path`: Filesystem
              paths to open and replace the existing `sys.stdin`,
              `sys.stdout`, `sys.stderr`.

            * `pidfile_path`: Absolute filesystem path to a file that
              will be used as the PID file for the daemon. If
              ``None``, no PID file will be used.

            * `pidfile_timeout`: Used as the default acquisition
              timeout value supplied to the runner's PID lock file.

            * `run`: Callable that will be invoked when the daemon is
              started.
            
            """
        self.parse_args()
        self.app = app
        self.daemon_context = DaemonContext()
        self.daemon_context.stdin = open(app.stdin_path, 'r')
        self.daemon_context.stdout = open(app.stdout_path, 'w+')
        self.daemon_context.stderr = open(
            app.stderr_path, 'w+', buffering=0)

        self.pidfile = None
        if app.pidfile_path is not None:
            self.pidfile = make_pidlockfile(
                app.pidfile_path, app.pidfile_timeout)
        self.daemon_context.pidfile = self.pidfile 
Example #13
Source File: recipe-577442.py    From code with MIT License 5 votes vote down vote up
def open(self): 
        self._addLoggerFiles() 
        daemon.DaemonContext.open(self)
        if self.stdout_logger:
            fileLikeObj = FileLikeLogger(self.stdout_logger)
            sys.stdout = fileLikeObj
        if self.stderr_logger:
            fileLikeObj = FileLikeLogger(self.stderr_logger)
            sys.stderr = fileLikeObj


#--------------------------------------------------------------- 
Example #14
Source File: needl.py    From Needl with MIT License 5 votes vote down vote up
def daemonize(logfile, pidfile):
    needl.log.info('Daemonizing and logging to %s', logfile)

    with daemon.DaemonContext(working_directory=os.getcwd(),
                              stderr=logfile,
                              umask=0o002,
                              pidfile=daemon.pidfile.PIDLockFile(pidfile)) as dc:

        start() 
Example #15
Source File: main.py    From janus-cloud with GNU Affero General Public License v3.0 5 votes vote down vote up
def main():
    if len(sys.argv) == 2:
        config = load_conf(sys.argv[1])
    else:
        config = load_conf('/opt/janus-cloud/conf/janus-proxy.yml')

    if config['general']['daemonize']:
        with DaemonContext(stdin=sys.stdin,
                           stdout=sys.stdout,
                           # working_directory=os.getcwd(),
                           files_preserve=list(range(3, 100))):
            do_main(config)
    else:
        do_main(config) 
Example #16
Source File: main.py    From janus-cloud with GNU Affero General Public License v3.0 5 votes vote down vote up
def main():
    if len(sys.argv) == 2:
        config = load_conf(sys.argv[1])
    else:
        config = load_conf('/opt/janus-cloud/conf/janus-sentinel.yml')

    if config['general']['daemonize']:
        with DaemonContext(stdin=sys.stdin,
                           stdout=sys.stdout,
                           # working_directory=os.getcwd(),
                           files_preserve=list(range(3, 100))):
            do_main(config)
    else:
        do_main(config) 
Example #17
Source File: celery_command.py    From airflow with Apache License 2.0 5 votes vote down vote up
def flower(args):
    """Starts Flower, Celery monitoring tool"""
    options = [
        conf.get('celery', 'BROKER_URL'),
        f"--address={args.hostname}",
        f"--port={args.port}",
    ]

    if args.broker_api:
        options.append(f"--broker-api={args.broker_api}")

    if args.url_prefix:
        options.append(f"--url-prefix={args.url_prefix}")

    if args.basic_auth:
        options.append(f"--basic-auth={args.basic_auth}")

    if args.flower_conf:
        options.append(f"--conf={args.flower_conf}")

    flower_cmd = FlowerCommand()

    if args.daemon:
        pidfile, stdout, stderr, _ = setup_locations(
            process="flower",
            pid=args.pid,
            stdout=args.stdout,
            stderr=args.stderr,
            log=args.log_file,
        )
        with open(stdout, "w+") as stdout, open(stderr, "w+") as stderr:
            ctx = daemon.DaemonContext(
                pidfile=TimeoutPIDLockFile(pidfile, -1),
                stdout=stdout,
                stderr=stderr,
            )
            with ctx:
                flower_cmd.execute_from_commandline(argv=options)
    else:
        flower_cmd.execute_from_commandline(argv=options) 
Example #18
Source File: scheduler_command.py    From airflow with Apache License 2.0 5 votes vote down vote up
def scheduler(args):
    """Starts Airflow Scheduler"""
    print(settings.HEADER)
    job = SchedulerJob(
        dag_id=args.dag_id,
        subdir=process_subdir(args.subdir),
        num_runs=args.num_runs,
        do_pickle=args.do_pickle)

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("scheduler",
                                                        args.pid,
                                                        args.stdout,
                                                        args.stderr,
                                                        args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            job.run()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)
        signal.signal(signal.SIGQUIT, sigquit_handler)
        job.run() 
Example #19
Source File: kubeops.py    From KubeOperator with Apache License 2.0 5 votes vote down vote up
def get_daemon_context():
    daemon_pid_file = get_pid_file_path('jms')
    context = daemon.DaemonContext(
        pidfile=pidfile.TimeoutPIDLockFile(daemon_pid_file),
        signal_map={
            signal.SIGTERM: lambda x, y: clean_up(),
            signal.SIGHUP: 'terminate',
        },
        files_preserve=files_preserve,
        detach_process=True,
    )
    return context 
Example #20
Source File: __init__.py    From trezor-agent with GNU Lesser General Public License v3.0 4 votes vote down vote up
def main(device_type):
    """Run ssh-agent using given hardware client factory."""
    args = create_agent_parser(device_type=device_type).parse_args()
    util.setup_logging(verbosity=args.verbose, filename=args.log_file)

    public_keys = None
    filename = None
    if args.identity.startswith('/'):
        filename = args.identity
        contents = open(filename, 'rb').read().decode('utf-8')
        # Allow loading previously exported SSH public keys
        if filename.endswith('.pub'):
            public_keys = list(import_public_keys(contents))
        identities = list(parse_config(contents))
    else:
        identities = [device.interface.Identity(
            identity_str=args.identity, curve_name=args.ecdsa_curve_name)]
    for index, identity in enumerate(identities):
        identity.identity_dict['proto'] = u'ssh'
        log.info('identity #%d: %s', index, identity.to_string())

    # override default PIN/passphrase entry tools (relevant for TREZOR/Keepkey):
    device_type.ui = device.ui.UI(device_type=device_type, config=vars(args))

    conn = JustInTimeConnection(
        conn_factory=lambda: client.Client(device_type()),
        identities=identities, public_keys=public_keys)

    sock_path = _get_sock_path(args)
    command = args.command
    context = _dummy_context()
    if args.connect:
        command = ['ssh'] + ssh_args(conn) + args.command
    elif args.mosh:
        command = ['mosh'] + mosh_args(conn) + args.command
    elif args.daemonize:
        out = 'SSH_AUTH_SOCK={0}; export SSH_AUTH_SOCK;\n'.format(sock_path)
        sys.stdout.write(out)
        sys.stdout.flush()
        context = daemon.DaemonContext()
        log.info('running the agent as a daemon on %s', sock_path)
    elif args.foreground:
        log.info('running the agent on %s', sock_path)

    use_shell = bool(args.shell)
    if use_shell:
        command = os.environ['SHELL']
        sys.stdin.close()

    if command or args.daemonize or args.foreground:
        with context:
            return run_server(conn=conn, command=command, sock_path=sock_path,
                              debug=args.debug, timeout=args.timeout)
    else:
        for pk in conn.public_keys():
            sys.stdout.write(pk)
        return 0  # success exit code 
Example #21
Source File: recipe-577442.py    From code with MIT License 4 votes vote down vote up
def __init__(
        self,
        chroot_directory=None,
        working_directory='/',
        umask=0,
        uid=None,
        gid=None,
        prevent_core=True,
        detach_process=None,
        files_preserve=[],   # changed default
        loggers_preserve=[], # new
        pidfile=None,
        stdout_logger = None,  # new
        stderr_logger = None,  # new
        #stdin,   omitted!
        #stdout,  omitted!
        #sterr,   omitted!
        signal_map=None,
        ):

        self.stdout_logger = stdout_logger
        self.stderr_logger = stderr_logger
        self.loggers_preserve = loggers_preserve

        devnull_in = open(os.devnull, 'r+')
        devnull_out = open(os.devnull, 'w+')
        files_preserve.extend([devnull_in, devnull_out])

        daemon.DaemonContext.__init__(self,
            chroot_directory = chroot_directory,
            working_directory = working_directory,
            umask = umask,
            uid = uid,
            gid = gid,
            prevent_core = prevent_core,
            detach_process = detach_process,
            files_preserve = files_preserve, 
            pidfile = pidfile,
            stdin = devnull_in,
            stdout = devnull_out,
            stderr = devnull_out,
            signal_map = signal_map) 
Example #22
Source File: pshitt.py    From pshitt with GNU General Public License v3.0 4 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(
        description='Passwords of SSH Intruders Transferred to Text')
    parser.add_argument(
        '-o',
        '--output',
        default='passwords.log',
        help='File to export collected data')
    parser.add_argument(
        '-k', '--key', default='test_rsa.key', help='Host RSA key')
    parser.add_argument(
        '-l', '--log', default='pshitt.log', help='File to log info and debug')
    parser.add_argument(
        '-p', '--port', type=int, default=2200, help='TCP port to listen to')
    parser.add_argument(
        '-t',
        '--threads',
        type=int,
        default=50,
        help='Maximum number of client threads')
    parser.add_argument(
        '-V',
        '--version',
        default='SSH-2.0-OpenSSH_6.6.1p1 Debian-5',
        help='SSH local version to advertise')
    parser.add_argument(
        '-v',
        '--verbose',
        default=False,
        action="count",
        help="Show verbose output, use multiple times increase verbosity")
    parser.add_argument(
        '-D',
        '--daemon',
        default=False,
        action="store_true",
        help="Run as unix daemon")

    args = parser.parse_args()
    if not os.path.isabs(args.output):
        args.output = os.path.join(os.getcwd(), args.output)

    if not os.path.isabs(args.key):
        args.key = os.path.join(os.getcwd(), args.key)

    if not os.path.isabs(args.log):
        args.log = os.path.join(os.getcwd(), args.log)

    server = Pshitt(args)
    if args.daemon:
        with daemon.DaemonContext():
            server.run(args)
    else:
        server.run(args) 
Example #23
Source File: interchange.py    From funcX with Apache License 2.0 4 votes vote down vote up
def cli_run():

    parser = argparse.ArgumentParser()
    parser.add_argument("-c", "--client_address", required=True,
                        help="Client address")
    parser.add_argument("--client_ports", required=True,
                        help="client ports as a triple of outgoing,incoming,command")
    parser.add_argument("--worker_port_range",
                        help="Worker port range as a tuple")
    parser.add_argument("-l", "--logdir", default="./parsl_worker_logs",
                        help="Parsl worker log directory")
    parser.add_argument("-p", "--poll_period",
                        help="REQUIRED: poll period used for main thread")
    parser.add_argument("--worker_ports", default=None,
                        help="OPTIONAL, pair of workers ports to listen on, eg --worker_ports=50001,50005")
    parser.add_argument("--suppress_failure", action='store_true',
                        help="Enables suppression of failures")
    parser.add_argument("--endpoint_id", default=None,
                        help="Endpoint ID, used to identify the endpoint to the remote broker")
    parser.add_argument("--hb_threshold",
                        help="Heartbeat threshold in seconds")
    parser.add_argument("--config", default=None,
                        help="Configuration object that describes provisioning")
    parser.add_argument("-d", "--debug", action='store_true',
                        help="Enables debug logging")

    print("Starting HTEX Intechange")
    args = parser.parse_args()

    optionals = {}
    optionals['suppress_failure'] = args.suppress_failure
    optionals['logdir'] = os.path.abspath(args.logdir)
    optionals['client_address'] = args.client_address
    optionals['client_ports'] = [int(i) for i in args.client_ports.split(',')]
    optionals['endpoint_id'] = args.endpoint_id
    optionals['config'] = args.config

    if args.debug:
        optionals['logging_level'] = logging.DEBUG
    if args.worker_ports:
        optionals['worker_ports'] = [int(i) for i in args.worker_ports.split(',')]
    if args.worker_port_range:
        optionals['worker_port_range'] = [int(i) for i in args.worker_port_range.split(',')]

    with daemon.DaemonContext():
        ic = Interchange(**optionals)
        ic.start()