Python logging.info() Examples

The following are code examples for showing how to use logging.info(). They are extracted from open source Python projects. You can vote up the examples you like or vote down the ones you don't like. You can also save this page to your account.

Example 1
Project: kas   Author: siemens   File: kas.py    (license) View Source Project 8 votes vote down vote up
def kas(argv):
    """
        The main entry point of kas.
    """
    create_logger()

    parser = kas_get_argparser()
    args = parser.parse_args(argv)

    if args.debug:
        logging.getLogger().setLevel(logging.DEBUG)

    logging.info('%s %s started', os.path.basename(sys.argv[0]), __version__)

    loop = asyncio.get_event_loop()

    for sig in (signal.SIGINT, signal.SIGTERM):
        loop.add_signal_handler(sig, interruption)
    atexit.register(_atexit_handler)

    for plugin in getattr(kasplugin, 'plugins', []):
        if plugin().run(args):
            return

    parser.print_help() 
Example 2
Project: PyPlanet   Author: PyPlanet   File: releases.py    (license) View Source Project 8 votes vote down vote up
def check(self, first_check=False):
		from pyplanet import __version__ as current_version

		logging.debug('Checking for new versions...')

		async with aiohttp.ClientSession() as session:
			async with session.get(self.url) as resp:
				for release in await resp.json():
					if not release['draft'] and not release['prerelease']:
						self.latest = release['tag_name']
						break

				self.current = current_version
				logging.debug('Version check, your version: {}, online version: {}'.format(self.current, self.latest))

				if first_check and self.update_available:
					logging.info('New version of PyPlanet available, consider updating: {}'.format(self.latest))
					await self.instance.chat(
						'\uf1e6 $FD4$oPy$369Planet$z$s$fff \uf0e7 new version available: v{}. Consider updating!'.format(self.latest)
					) 
Example 3
Project: decouvrez_django   Author: oc-courses   File: add_albums.py    (license) View Source Project 6 votes vote down vote up
def handle(self, *args, **options):
        reference = 0
        # open file with data
        directory = os.path.dirname(os.path.dirname(__file__))
        path = os.path.join(directory, 'data', 'albums.yml')
        with open(path, 'r') as file:
            data = yaml.load(file)
            albums = data['albums']
            for album in albums:
                # Create artists
                artists = []
                for artist in album['artists']:
                    try:
                        stored_artist = Artist.objects.get(name=artist)
                        lg.info('Artist found: %s'%stored_artist)
                    except ObjectDoesNotExist:
                        stored_artist = Artist.objects.create(name=artist)
                        lg.info('Artist created: %s'%stored_artist)
                    artists.append(stored_artist)
                # Find or create album
                try:
                    stored_album = Album.objects.get(title=album['title'])
                    lg.info('Album found: %s'%stored_album.title)
                except ObjectDoesNotExist:
                    reference += 1
                    album = Album.objects.create(
                        title=album['title'],
                        reference=reference,
                        picture=album['picture']
                    )
                    album.artists = artists
                    lg.info('New album: %s'%stored_artist) 
Example 4
Project: nanoQC   Author: wdecoster   File: nanoQC.py    (GNU General Public License v3.0) View Source Project 6 votes vote down vote up
def main():
    args = get_args()
    logging.basicConfig(
        format='%(asctime)s %(message)s',
        filename=os.path.join(args.outdir, "NanoQC.log"),
        level=logging.INFO)
    logging.info("NanoQC started.")
    sizeRange = length_histogram(
        fqin=gzip.open(args.fastq, 'rt'),
        name=os.path.join(args.outdir, "SequenceLengthDistribution.png"))
    fq = get_bin(gzip.open(args.fastq, 'rt'), sizeRange)
    logging.info("Using {} reads for plotting".format(len(fq)))
    fqbin = [dat[0] for dat in fq]
    qualbin = [dat[1] for dat in fq]
    logging.info("Creating plots...")
    per_base_sequence_content_and_quality(fqbin, qualbin, args.outdir, args.format)
    logging.info("per base sequence content and quality completed.")
    logging.info("Finished!") 
Example 5
Project: tsproxy   Author: WPO-Foundation   File: tsproxy.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def run(self):
    global lock, background_activity_count
    try:
      logging.debug('[{0:d}] AsyncDNS - calling getaddrinfo for {1}:{2:d}'.format(self.client_id, self.hostname, self.port))
      addresses = socket.getaddrinfo(self.hostname, self.port)
      logging.info('[{0:d}] Resolving {1}:{2:d} Completed'.format(self.client_id, self.hostname, self.port))
    except:
      addresses = ()
      logging.info('[{0:d}] Resolving {1}:{2:d} Failed'.format(self.client_id, self.hostname, self.port))
    message = {'message': 'resolved', 'connection': self.client_id, 'addresses': addresses, 'localhost': self.is_localhost}
    self.result_pipe.SendMessage(message, False)
    lock.acquire()
    if background_activity_count > 0:
      background_activity_count -= 1
    lock.release()
    # open and close a local socket which will interrupt the long polling loop to process the message
    s = socket.socket()
    s.connect((server.ipaddr, server.port))
    s.close()


########################################################################################################################
#   TCP Client
######################################################################################################################## 
Example 6
Project: tsproxy   Author: WPO-Foundation   File: tsproxy.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def HandleResolve(self, message):
    global in_pipe,  map_localhost, lock, background_activity_count
    self.did_resolve = True
    is_localhost = False
    if 'hostname' in message:
      self.hostname = message['hostname']
    self.port = 0
    if 'port' in message:
      self.port = message['port']
    logging.info('[{0:d}] Resolving {1}:{2:d}'.format(self.client_id, self.hostname, self.port))
    if self.hostname == 'localhost':
      self.hostname = '127.0.0.1'
    if self.hostname == '127.0.0.1':
      logging.info('[{0:d}] Connection to localhost detected'.format(self.client_id))
      is_localhost = True
    if (dest_addresses is not None) and (not is_localhost or map_localhost):
      logging.info('[{0:d}] Resolving {1}:{2:d} to mapped address {3}'.format(self.client_id, self.hostname, self.port, dest_addresses))
      self.SendMessage('resolved', {'addresses': dest_addresses, 'localhost': False})
    else:
      lock.acquire()
      background_activity_count += 1
      lock.release()
      self.state = self.STATE_RESOLVING
      self.dns_thread = AsyncDNS(self.client_id, self.hostname, self.port, is_localhost, in_pipe)
      self.dns_thread.start() 
Example 7
Project: tsproxy   Author: WPO-Foundation   File: tsproxy.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def handle_message(self, message):
    if message['message'] == 'data' and 'data' in message and len(message['data']) > 0:
      self.buffer += message['data']
      if self.state == self.STATE_CONNECTED:
        self.handle_write()
    elif message['message'] == 'resolved':
      self.HandleResolved(message)
    elif message['message'] == 'connected':
      self.HandleConnected(message)
      self.handle_write()
    elif message['message'] == 'closed':
      if len(self.buffer) == 0:
        logging.info('[{0:d}] Server connection close being processed, closing Browser connection'.format(self.client_id))
        self.handle_close()
      else:
        logging.info('[{0:d}] Server connection close being processed, queuing browser connection close'.format(self.client_id))
        self.needs_close = True 
Example 8
Project: emscripten-docker   Author: apiaryio   File: emccbuild.py    (license) View Source Project 6 votes vote down vote up
def get_tags(versions):
    logging.info("Reading tags from repo %s" % (TAGS_URL))
    r = requests.get(TAGS_URL)
    if r.status_code != 200:
        raise RuntimeError(r.text)
    else:
        tags = [i['name'] for i in json.loads(
            r.text) if any(x in i['name'] for x in versions)]
        while 'next' in r.links:
            r = requests.get(r.links['next']['url'])
            if r.status_code != 200:
                raise RuntimeError(r.text)
            else:
                tags.extend([i['name'] for i in json.loads(r.text)
                             if any(x in i['name'] for x in versions)])
    return tags 
Example 9
Project: OldMunkiPackages   Author: aysiu   File: OldMunkiPackages.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def trash_old_stuff(trashlist, trashpath, newpath):
	if isinstance(trashlist, list):
		for old_location in trashlist:
			# Get the subfolders needed to be created
			path_within_destination=os.path.relpath(old_location, trashpath)
			# Create what will be the destination path
			new_location=os.path.join(newpath, path_within_destination)
			# Make sure all the relevant subfolders exist in the destination
			if not os.path.exists(os.path.dirname(new_location)):
				os.makedirs(os.path.dirname(new_location))
			# Even though we've been double-checking paths all along, let's just make one last check
			if os.path.exists(old_location) and os.path.isdir(newpath):
				os.rename(old_location, new_location)
				logging.info("Moving %s to %s\n" % (old_location, new_location))
			else:
				logging.error("One of %s or %s does not exist\n" % (old_location, new_location))
	else:
		logging.error("%s is not a valid list\n" % trashlist)

# Function that checks paths are writable 
Example 10
Project: PyPlanet   Author: PyPlanet   File: database.py    (license) View Source Project 6 votes vote down vote up
def create_from_settings(cls, instance, conf):
		try:
			engine_path, _, cls_name = conf['ENGINE'].rpartition('.')
			db_name = conf['NAME']
			db_options = conf['OPTIONS'] if 'OPTIONS' in conf and conf['OPTIONS'] else dict()

			# FIX for #331. Replace utf8 by utf8mb4 in the mysql driver encoding.
			if conf['ENGINE'] == 'peewee_async.MySQLDatabase' and 'charset' in db_options and db_options['charset'] == 'utf8':
				logging.info('Forcing to use \'utf8mb4\' instead of \'utf8\' for the MySQL charset option! (Fix #331).')
				db_options['charset'] = 'utf8mb4'

			# We will try to load it so we have the validation inside this class.
			engine = getattr(importlib.import_module(engine_path), cls_name)
		except ImportError:
			raise ImproperlyConfigured('Database engine doesn\'t exist!')
		except Exception as e:
			raise ImproperlyConfigured('Database configuration isn\'t complete or engine could\'t be found!')

		return cls(engine, instance, db_name, **db_options) 
Example 11
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: button.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def use_box(cli):
    log.info("[use_box] configuring magic buttons.")
    red_button.when_pressed = red_push
    red_button.when_released = red_release
    green_button.when_pressed = green_push
    green_button.when_released = green_release
    white_button.when_pressed = white_push
    white_button.when_released = white_release
    white_led.on()
    log.info("[use_box] configured buttons. White LED should now be on.")
    try:
        while 1:
            time.sleep(0.2)
    except KeyboardInterrupt:
        log.info(
            "[use_box] KeyboardInterrupt ... exiting box monitoring loop")
    red_led.off()
    green_led.off()
    white_led.off() 
Example 12
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: utils.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def mqtt_connect(mqtt_client, core_info):
    connected = False

    # try connecting to all connectivity info objects in the list
    for connectivity_info in core_info.connectivityInfoList:
        core_host = connectivity_info.host
        core_port = connectivity_info.port
        logging.info("Connecting to Core at {0}:{1}".format(
            core_host, core_port))
        mqtt_client.configureEndpoint(core_host, core_port)
        try:
            mqtt_client.connect()
            connected = True
            break
        except socket.error as se:
            print("SE:{0}".format(se))
        except operationTimeoutException as te:
            print("operationTimeoutException:{0}".format(te.message))
            traceback.print_tb(te, limit=25)
        except Exception as e:
            print("Exception caught:{0}".format(e.message))

    return connected 
Example 13
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: utils.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def get_conn_info(core_connectivity_info_list, match):
    """
    Get core connectivity info objects from the list. Matching any the `match`
    argument.

    :param core_connectivity_info_list: the connectivity info object list
    :param match: the value to match against either the Core Connectivity Info
        `id`, `host`, `port`, or `metadata` values
    :return: the list of zero or more matching connectivity info objects
    """
    conn_info = list()

    if not match:
        return conn_info

    for cil in core_connectivity_info_list:
        for ci in cil.connectivityInfoList:
            if match == ci.id or match == ci.host or match == ci.port or \
                            match == ci.metadata:
                conn_info.append(ci)

    return conn_info 
Example 14
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: utils.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def mqtt_connect(mqtt_client, core_info):
    connected = False

    # try connecting to all connectivity info objects in the list
    for connectivity_info in core_info.connectivityInfoList:
        core_host = connectivity_info.host
        core_port = connectivity_info.port
        logging.info("Connecting to Core at {0}:{1}".format(
            core_host, core_port))
        mqtt_client.configureEndpoint(core_host, core_port)
        try:
            mqtt_client.connect()
            connected = True
            break
        except socket.error as se:
            print("SE:{0}".format(se))
        except operationTimeoutException as te:
            print("operationTimeoutException:{0}".format(te.message))
            traceback.print_tb(te, limit=25)
        except Exception as e:
            print("Exception caught:{0}".format(e.message))

    return connected 
Example 15
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: utils.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def discover_configured_core(device_name, dip, config_file):
    cfg = GroupConfigFile(config_file)
    gg_core = None
    # Discover Greengrass Core

    discovered, discovery_info = ggc_discovery(
        device_name, dip, retry_count=10
    )
    logging.info("[discover_cores] Device: {0} discovery success".format(
        device_name)
    )

    # find the configured Group's core
    for group in discovery_info.getAllGroups():
        dump_core_info_list(group.coreConnectivityInfoList)
        gg_core = group.getCoreConnectivityInfo(cfg['core']['thing_arn'])

        if gg_core:
            logging.info('Found the configured core and Group CA.')
            break

    return gg_core, discovery_info 
Example 16
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: utils.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def get_conn_info(core_connectivity_info_list, match):
    """
    Get core connectivity info objects from the list. Matching any the `match`
    argument.

    :param core_connectivity_info_list: the connectivity info object list
    :param match: the value to match against either the Core Connectivity Info
        `id`, `host`, `port`, or `metadata` values
    :return: the list of zero or more matching connectivity info objects
    """
    conn_info = list()

    if not match:
        return conn_info

    for cil in core_connectivity_info_list:
        for ci in cil.connectivityInfoList:
            if match == ci.id or match == ci.host or match == ci.port or \
                            match == ci.metadata:
                conn_info.append(ci)

    return conn_info 
Example 17
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: arm.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def _activate_command(self, cmd):
        """Use the shared `threading.Event` instance to signal a mini
        fulfillment shadow command to the running Control thread.
        """
        self.last_state = self.active_state
        self.active_state = cmd
        log.info("[arm._activate_command] last_state='{0}' state='{1}'".format(
            self.last_state, cmd))

        if self.active_state == 'run':
            log.info("[arm._activate_command] START RUN")
            self.cmd_event.set()
        elif self.active_state == 'stop':
            log.info("[arm._activate_command] STOP")
            self.cmd_event.clear()
        return 
Example 18
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: arm.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def pick(self):
        log.debug("[act.pick] [begin]")
        arm = ArmStages(self.sg)
        self.mqtt_client.publish(
            self.stage_topic, _stage_message("pick", "begin"), 0
        )
        pick_box = self.found_box
        self.found_box = NO_BOX_FOUND
        log.info("[act.pick] pick_box:{0}".format(pick_box))
        log.info("[act.pick] self.found_box:{0}".format(self.found_box))
        stage_result = arm.stage_pick(previous_results=pick_box,
                                      cartesian=False)
        self.mqtt_client.publish(
            self.stage_topic, _stage_message("pick", "end", stage_result), 0
        )
        log.debug("[act.pick] [end]")
        return stage_result 
Example 19
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: arm.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def emergency_stop_arm(self):
        if self.active_state == 'stopped' or \
                self.active_state == 'initialized':
            return

        if 'present_position' in base_servo_cache:
            stop_positions = [
                base_servo_cache['present_position'],
                femur01_servo_cache['present_position'],
                femur02_servo_cache['present_position'],
                tibia_servo_cache['present_position'],
                eff_servo_cache['present_position']
            ]
            log.info("[emergency_stop_arm] stop_positions:{0}".format(
                stop_positions))
            self.sg.write_values(
                register='goal_position', values=stop_positions)
            self.active_state = 'stopped'
            log.info("[emergency_stop_arm] active_state:{0}".format(
                self.active_state))
        else:
            log.error("[emergency_stop_arm] no 'present_position' cache value") 
Example 20
Project: aws-greengrass-mini-fulfillment   Author: awslabs   File: stages.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def stage_stop(self):
        log.info("[stage_stop] _begin_")

        self.sg.goal_position([
            512,  # first servo value
            500,  # second servo value
            500,  # third servo value
            135,  # fourth servo value
            OPEN_EFFECTOR  # fifth servo value
        ], block=True, margin=POSITION_MARGIN)

        # add little sleepy motion in end effector for fun
        self.sg['effector']['goal_position'] = GRAB_EFFECTOR
        time.sleep(0.4)
        self.sg['effector']['goal_position'] = GRAB_EFFECTOR + 100
        time.sleep(0.4)
        self.sg['effector']['goal_position'] = GRAB_EFFECTOR
        time.sleep(0.4)
        self.sg['effector']['goal_position'] = GRAB_EFFECTOR + 30
        time.sleep(0.4)
        self.sg['effector']['goal_position'] = GRAB_EFFECTOR

        log.info("[stage_stop] _end_") 
Example 21
Project: conv2mp4-py   Author: Kameecoding   File: conv2mp4-server.py    (GNU General Public License v3.0) View Source Project 6 votes vote down vote up
def good_output(oldFile,new_file):
	oldSize = get_length(oldFile)
	newSize = get_length(new_file)

	for i in range(0,2):
		if i == 2:
			if abs(oldSize[i]- newSize[i]) > 5:
				Logger.info("ERROR: File Duration difference bigger than 5 seconds, convert failed")
				return False
		else:
			if oldSize[i] != newSize [i]:
				Logger.info("ERROR: File Duration difference bigger than 5 seconds, convert failed")
				return False
	Logger.info("SUCCESS: File Duration difference less than 5 seconds, convert successful")
	return True

#Recursively build a map of folder paths to GDRIVE ids 
Example 22
Project: conv2mp4-py   Author: Kameecoding   File: conv2mp4-server.py    (GNU General Public License v3.0) View Source Project 6 votes vote down vote up
def execute(self):
		check_path(self.target_dir)
		
		if EXTRACT_SRT:
			self.extract_srt()
			self.move_external_subs()

		if self.input_video != self.output_video:
			try:
				self.handbrake_convert();
				if UPLOAD_TO_DRIVE:
					self.upload_to_drive()
			except KeyboardInterrupt:
				Logger.info("KeyBoardInterrupt Detected, Cleaning up and Exiting")
				self.remove_media_file(self.output_video)
				sys.exit(0)
			if REMOVE_CONVERTED:
				Logger.info("Deleting old files")
				self.remove_media_file(self.input_video)
				self.remove_folder(os.path.dirname(self.input_video))
		else:
			Logger.info("{file} already exists, skipping.".format(file=self.input_video)) 
Example 23
Project: bnn-analysis   Author: myshkov   File: experiment.py    (MIT License) View Source Project 6 votes vote down vote up
def configure_env_dropout(self, env, sampler_params=None, dropout=0.01, tau=0.15, length_scale=1e-2):
        def sampler_factory():
            params = env.get_default_sampler_params()
            params['n_epochs'] = 50

            wreg = length_scale ** 2 * (1 - dropout) / (2. * env.get_train_x().shape[0] * tau)
            model = DropoutSampler.model_from_description(env.layers_description, wreg, dropout)
            logging.info(f'Reg: {wreg}')

            if sampler_params is not None:
                params.update(sampler_params)

            sampler = DropoutSampler(model=model, **params)
            sampler.construct()
            return sampler

        env.sampler_factory = sampler_factory 
Example 24
Project: wpw-sdk-python   Author: WPTechInnovation   File: WPWithinWrapperImpl.py    (MIT License) View Source Project 6 votes vote down vote up
def openRpcListener(self):
        try:
            # Make socket
            transport = TSocket.TSocket(self.ipAddress, self.portNumber)
            # Buffering is critical. Raw sockets are very slow
            transport = TTransport.TBufferedTransport(transport)
            # Wrap in a protocol
            protocol = TBinaryProtocol.TBinaryProtocol(transport)
            # Create a client to use the protocol encoder
            client = Client(protocol)
            # Connect!
            transport.open()
            logging.info("STARTED connection to SDK via RPC thrift")
            return client
        except Exception as e:
            logging.info("Error: Couldn't open the RpcListener: " + str(e))
            self.killRpcAgent()
            raise WWTypes.WPWithinGeneralException("Error: Couldn't open the RpcListener", e) 
Example 25
Project: wpw-sdk-python   Author: WPTechInnovation   File: WPWithinWrapperImpl.py    (MIT License) View Source Project 6 votes vote down vote up
def stopRPCAgent(self):
        logging.info('SHOULD STOP RPC AGENT')
        try:
            self.getClient().CloseRPCAgent()
        except socket.error as er:
            time.sleep(2)
            if self.rpcProcess is None or self.rpcProcess.poll() is not None:
                logging.info("RPC agent closed.")
            else:
                self.killRpcAgent()
                raise WWTypes.WPWithinGeneralException("RPC process killed.", er)
        except Exception as e:
            if self.rpcProcess is None or self.rpcProcess.poll() is not None:
                logging.info("RPC agent is closed.")
            else:
                self.killRpcAgent()
                raise WWTypes.WPWithinGeneralException("RPC process killed.", e) 
Example 26
Project: striptls   Author: tintinweb   File: striptls.py    (Creative Commons Zero v1.0 Universal) View Source Project 6 votes vote down vote up
def on_recv_peek(session, s_in):
                if s_in.socket_ssl:
                    return

                ssl_version = session.protocol.detect_peek_tls(s_in)
                if ssl_version:
                    logger.info("SSL Handshake detected - performing ssl/tls conversion")
                    try:
                        context = Vectors.GENERIC.Intercept.create_ssl_context()
                        context.load_cert_chain(certfile=Vectors._TLS_CERTFILE,
                                                keyfile=Vectors._TLS_KEYFILE)
                        session.inbound.ssl_wrap_socket_with_context(context, server_side=True)
                        logger.debug("%s [client] <> [      ]          SSL handshake done: %s"%(session, session.inbound.socket_ssl.cipher()))
                        session.outbound.ssl_wrap_socket_with_context(context, server_side=False)
                        logger.debug("%s [      ] <> [server]          SSL handshake done: %s"%(session, session.outbound.socket_ssl.cipher()))
                    except Exception, e:
                        logger.warning("Exception - not ssl intercepting outbound: %s"%repr(e)) 
Example 27
Project: benchmarks   Author: tensorflow   File: kubectl_util.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def _GetPodNames(pod_name_prefix, job_name=None):
  """Get pod names based on the pod_name_prefix and job_name.

  Args:
    pod_name_prefix: value of 'name-prefix' selector.
    job_name: value of 'job' selector. If None, pod names will be
      selected only based on 'name-prefix' selector.

  Returns:
    List of pod names.
  """
  pod_list_command = [
      _KUBECTL, 'get', 'pods', '-o', 'name', '-a',
      '-l', _GetJobSelector(pod_name_prefix, job_name)]
  logging.info('Command to get pod names: %s', ' '.join(pod_list_command))
  output = subprocess.check_output(pod_list_command, universal_newlines=True)
  pod_names = [name for name in output.strip().split('\n') if name]
  logging.info('Pod names: "%s"', ','.join(pod_names))
  return pod_names 
Example 28
Project: benchmarks   Author: tensorflow   File: kubectl_util.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def CreatePods(pod_name, yaml_file):
  """Creates pods based on the given kubernetes config.

  Args:
    pod_name: 'name-prefix' selector for the pods.
    yaml_file: kubernetes yaml config.

  Raises:
    TimeoutError: if jobs didn't come up for a long time.
  """
  command = [_KUBECTL, 'create', '--filename=%s' % yaml_file]
  logging.info('Creating pods: %s', subprocess.list2cmdline(command))
  subprocess.check_call(command)

  if not _WaitUntil(100, _GetPodNames, pod_name):
    raise TimeoutError(
        'Timed out waiting for %s pod to come up.' % pod_name) 
Example 29
Project: benchmarks   Author: tensorflow   File: kubectl_util.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def DeletePods(pod_name, yaml_file):
  """Deletes pods based on the given kubernetes config.

  Args:
    pod_name: 'name-prefix' selector for the pods.
    yaml_file: kubernetes yaml config.

  Raises:
    TimeoutError: if jobs didn't terminate for a long time.
  """
  command = [_KUBECTL, 'delete', '--filename=%s' % yaml_file]
  logging.info('Deleting pods: %s', ' '.join(command))
  subprocess.call(command)

  def CheckPodsAreTerminated():
    return not _GetPodNames(pod_name)
  if not _WaitUntil(100, CheckPodsAreTerminated):
    raise TimeoutError(
        'Timed out waiting for %s pod to terminate.' % pod_name) 
Example 30
Project: benchmarks   Author: tensorflow   File: kubectl_util.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def _PrintLogs(pod_name_prefix, job_name):
  """Prints pod logs.

  If a pod has been restarted, prints logs from previous run. Otherwise,
  prints the logs from current run. We print logs for pods selected
  based on pod_name_prefix and job_name.

  Args:
    pod_name_prefix: value of 'name-prefix' selector.
    job_name: value of 'job' selector.
  """
  for pod_name in _GetPodNames(pod_name_prefix, job_name):
    try:
      # Get previous logs.
      logs_command = [_KUBECTL, 'logs', '-p', pod_name]
      logging.info('Command to get logs: %s', ' '.join(logs_command))
      output = subprocess.check_output(logs_command, universal_newlines=True)
    except subprocess.CalledProcessError:
      # We couldn't get previous logs, so we will try to get current logs.
      logs_command = [_KUBECTL, 'logs', pod_name]
      logging.info('Command to get logs: %s', ' '.join(logs_command))
      output = subprocess.check_output(logs_command, universal_newlines=True)
    print('%s logs:' % pod_name)
    print(output) 
Example 31
Project: mblog   Author: moling3650   File: orm.py    (MIT License) View Source Project 6 votes vote down vote up
def select(sql, args, size=None):
    log(sql, args)
    # ??????????????????with???????????conn?????????
    async with __pool.get() as conn:
        # ????????DictCursor????dict?????????????????????SQL
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(sql.replace('?', '%s'), args)  # ?sql??'?'???'%s'???mysql????????%s
            # ????size
            if size:
                resultset = await cur.fetchmany(size)  # ???????????
            else:
                resultset = await cur.fetchall()      # ????????
        logging.info('rows returned: %s' % len(resultset))
        return resultset

# ??SQL?INSERT INTO?UPDATE?DELETE???execute??????????????? 
Example 32
Project: mblog   Author: moling3650   File: __init__.py    (MIT License) View Source Project 6 votes vote down vote up
def add_routes(app, module_name):
    try:
        mod = __import__(module_name, fromlist=['get_submodule'])
    except ImportError as e:
        raise e
    # ??mod??????,????????
    # [email protected][email protected]???????????'__method__'?'__route__'??
    for attr in dir(mod):
        # ????'_'??????pass?????????????'_'???
        if attr.startswith('_'):
            continue
        # ????'_'????????
        func = getattr(mod, attr)
        # ???__method___?__route__?????
        if callable(func) and hasattr(func, '__method__') and hasattr(func, '__route__'):
            args = ', '.join(inspect.signature(func).parameters.keys())
            logging.info('add route %s %s => %s(%s)' % (func.__method__, func.__route__, func.__name__, args))
            app.router.add_route(func.__method__, func.__route__, RequestHandler(func))


# ?????????? 
Example 33
Project: mblog   Author: moling3650   File: api.py    (MIT License) View Source Project 6 votes vote down vote up
def oauth2(code):
    url = 'https://api.weibo.com/oauth2/access_token'
    payload = {
        'client_id': '366603916',
        'client_secret': 'b418efbd77094585d0a7f9ccac98a706',
        'grant_type': 'authorization_code',
        'code': code,
        'redirect_uri': 'http://www.qiangtaoli.com'
    }
    with ClientSession() as session:
        async with session.post(url, data=payload) as resp:
            params = await resp.json()
        async with session.get('https://api.weibo.com/2/users/show.json', params=params) as resp:
            info = await resp.json()
        o = await Oauth.find('weibo-' + info['idstr'])
        if not o:
            return 'redirect:/bootstrap/register?oid=weibo-%s&name=%s&image=%s' % (info['idstr'], info['name'], info['avatar_large'])
        user = await User.find(o.user_id)
        if not user:
            return 'oauth user was deleted.'
        return user.signin(web.HTTPFound('/'))


# ???? 
Example 34
Project: mblog   Author: moling3650   File: models.py    (MIT License) View Source Project 6 votes vote down vote up
def find_by_cookie(cls, cookie_str):
        if not cookie_str:
            return None
        try:
            L = cookie_str.split('-')
            if len(L) != 3:
                return None
            uid, expires, sha1 = L
            if int(expires) < time.time():
                return None
            user = await cls.find(uid)
            if not user:
                return None
            s = '%s-%s-%s-%s' % (uid, user.get('password'), expires, COOKIE_KEY)
            if sha1 != hashlib.sha1(s.encode('utf-8')).hexdigest():
                logging.info('invalid sha1')
                return None
            user.password = '******'
            return user
        except Exception as e:
            logging.exception(e)
            return None


# ????? 
Example 35
Project: mblog   Author: moling3650   File: __init__.py    (MIT License) View Source Project 6 votes vote down vote up
def init_jinja2(app, **kw):
    logging.info('init jinja2...')
    options = {
        'autoescape': kw.get('autoescape', True),
        'block_start_string': kw.get('block_start_string', '{%'),
        'block_end_string': kw.get('block_end_string', '%}'),
        'variable_start_string': kw.get('variable_start_string', '{{'),
        'variable_end_string': kw.get('variable_end_string', '}}'),
        'auto_reload': kw.get('auto_reload', True)
    }
    path = kw.get('path', os.path.join(__path__[0], 'templates'))
    logging.info('set jinja2 template path: %s' % path)
    env = Environment(loader=FileSystemLoader(path), **options)
    filters = kw.get('filters')
    if filters is not None:
        for name, ftr in filters.items():
            env.filters[name] = ftr
    app['__templating__'] = env 
Example 36
Project: mblog   Author: moling3650   File: __init__.py    (MIT License) View Source Project 6 votes vote down vote up
def create_server(loop, config_mod_name):
    try:
        config = __import__(config_mod_name, fromlist=['get config'])
    except ImportError as e:
        raise e

    await create_pool(loop, **config.db_config)
    app = web.Application(loop=loop, middlewares=[
        logger_factory, auth_factory, data_factory, response_factory])
    add_routes(app, 'app.route')
    add_routes(app, 'app.api')
    add_routes(app, 'app.api_v2')
    add_static(app)
    init_jinja2(app, filters=dict(datetime=datetime_filter, marked=marked_filter), **config.jinja2_config)
    server = await loop.create_server(app.make_handler(), '127.0.0.1', 9900)
    logging.info('server started at http://127.0.0.1:9900...')
    return server 
Example 37
Project: python-freezerclient   Author: openstack   File: sessions.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def take_action(self, parsed_args):
        try:
            self.app.client.sessions.remove_job(parsed_args.session_id,
                                                parsed_args.job_id)
        except Exception as error:
            # there is an error coming from the api when a job is removed
            # with the following text:
            # Additional properties are not allowed
            # ('job_event' was unexpected)
            # but in reality the job gets removed correctly.
            if 'Additional properties are not allowed' in error.message:
                pass
            else:
                raise exceptions.ApiClientException(error.message)
        else:
            logging.info('Job {0} removed correctly from session {1}'.format(
                parsed_args.job_id, parsed_args.session_id)) 
Example 38
Project: pyku   Author: dubvulture   File: sudoku_steps.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def extract_digits(self, image):
        """
        Extract digits from a binary image representing a sudoku
        :param image: binary image/sudoku
        :return: array of digits and their probabilities
        """
        prob = np.zeros(4, dtype=np.float32)
        digits = np.zeros((4, 9, 9), dtype=object)
        for i in range(4):
            labeled, features = label(image, structure=CROSS)
            objs = find_objects(labeled)
            for obj in objs:
                roi = image[obj]
                # center of bounding box
                cy = (obj[0].stop + obj[0].start) / 2
                cx = (obj[1].stop + obj[1].start) / 2
                dists = cdist([[cy, cx]], CENTROIDS, 'euclidean')
                pos = np.argmin(dists)
                cy, cx = pos % 9, pos / 9
                # 28x28 image, center relative to sudoku
                prediction = self.classifier.classify(morph(roi))
                if digits[i, cy, cx] is 0:
                    # Newly found digit
                    digits[i, cy, cx] = prediction
                    prob[i] += prediction[0, 0]
                elif prediction[0, 0] > digits[i, cy, cx][0, 0]:
                    # Overlapping! (noise), choose the most probable prediction
                    prob[i] -= digits[i, cy, cx][0, 0]
                    digits[i, cy, cx] = prediction
                    prob[i] += prediction[0, 0]
            image = np.rot90(image)
        logging.info(prob)
        return digits[np.argmax(prob)] 
Example 39
Project: charm-plumgrid-gateway   Author: openstack   File: charm_helpers_sync.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def clone_helpers(work_dir, branch):
    dest = os.path.join(work_dir, 'charm-helpers')
    logging.info('Checking out %s to %s.' % (branch, dest))
    cmd = ['bzr', 'checkout', '--lightweight', branch, dest]
    subprocess.check_call(cmd)
    return dest 
Example 40
Project: charm-plumgrid-gateway   Author: openstack   File: charm_helpers_sync.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def ensure_init(path):
    '''
    ensure directories leading up to path are importable, omitting
    parent directory, eg path='/hooks/helpers/foo'/:
        hooks/
        hooks/helpers/__init__.py
        hooks/helpers/foo/__init__.py
    '''
    for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])):
        _i = os.path.join(d, '__init__.py')
        if not os.path.exists(_i):
            logging.info('Adding missing __init__.py: %s' % _i)
            open(_i, 'wb').close() 
Example 41
Project: charm-plumgrid-gateway   Author: openstack   File: charm_helpers_sync.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def sync_pyfile(src, dest):
    src = src + '.py'
    src_dir = os.path.dirname(src)
    logging.info('Syncing pyfile: %s -> %s.' % (src, dest))
    if not os.path.exists(dest):
        os.makedirs(dest)
    shutil.copy(src, dest)
    if os.path.isfile(os.path.join(src_dir, '__init__.py')):
        shutil.copy(os.path.join(src_dir, '__init__.py'),
                    dest)
    ensure_init(dest) 
Example 42
Project: charm-plumgrid-gateway   Author: openstack   File: charm_helpers_sync.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def sync_directory(src, dest, opts=None):
    if os.path.exists(dest):
        logging.debug('Removing existing directory: %s' % dest)
        shutil.rmtree(dest)
    logging.info('Syncing directory: %s -> %s.' % (src, dest))

    shutil.copytree(src, dest, ignore=get_filter(opts))
    ensure_init(dest) 
Example 43
Project: python-driver   Author: bblfsh   File: sendmsg.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def main():
    filesidx = 1
    outbuffer = sys.stdout

    files = sys.argv[filesidx:]

    d = {
        'action': 'ParseAST',
        'filepath': '',
        'content': '',
        'language': 'python',
    }

    for f in files:
        content = ''
        logging.info(f)
        for encoding in ('utf_8', 'iso8859_15', 'iso8859_15', 'gb2313',
                         'cp1251', 'cp1252', 'cp1250', 'shift-jis', 'gbk', 'cp1256',
                         'iso8859-2', 'euc_jp', 'big5', 'cp874', 'euc_kr', 'iso8859_7',
                         'cp1255'):
            with open(f, encoding=encoding) as infile:
                try:
                    content = infile.read()
                    break
                except UnicodeDecodeError:
                    continue

        d.update({
            'filepath': f,
            'content': content,
        })

        json.dump(d, sys.stdout, ensure_ascii=False)
        outbuffer.write('\n')
    outbuffer.close() 
Example 44
Project: AFSCbot   Author: HadManySons   File: helper_functions.py    (MIT License) View Source Project 5 votes vote down vote up
def print_and_log(text, error=False):
    print(text)
    if error:
        logging.error(time.strftime(LOG_TIME_FORMAT) + text)
    else:
        logging.info(time.strftime(LOG_TIME_FORMAT) + text) 
Example 45
Project: AFSCbot   Author: HadManySons   File: DownvoteRemover.py    (MIT License) View Source Project 5 votes vote down vote up
def proccessComments():
    for comment in reddit.redditor(str(reddit.user.me())).comments.new(limit=None):
        #if comment score is below the threshold, delete it
        if comment.score < deleteThreshold:
            comment.delete()

            permalink = "http://www.reddit.com" + \
                                   comment.permalink() + "/"

            print("Deleting comment: " + permalink)
            logging.info(time.strftime("%Y/%m/%d %H:%M:%S ") +
                         "Deleting comment: " + permalink) 
Example 46
Project: defuse_division   Author: lelandbatey   File: server.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def local_address(fallback):
    """Returns the local address of this computer."""
    try:
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        s.connect(("8.8.8.8", 53))
        interface = s.getsockname()[0]
        s.close()
    except OSError:
        interface = fallback
        logging.info(
            'Cannot connect to network determine interface, using fallback "{}"'.
            format(fallback))

    return interface 
Example 47
Project: defuse_division   Author: lelandbatey   File: server.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def localnet_register(host, port):
    '''
    Runs a never-exiting thread which only registers a local network service
    via Zeroconf and then responds to info requests.
    '''
    try:
        from zeroconf import ServiceInfo, Zeroconf
        from time import sleep
    except ImportError as e:
        logging.error(
            'Zeroconf not installed, cannot register this server on the local '
            'network. Other players may still connect, but they must be told '
            'what your hostname and port are (hostname: {}, port: {})'.format(
                host, port))
        return

    advertised_interface = local_address('127.0.0.1')

    info = ServiceInfo(
        "_defusedivision._tcp.local.",
        "{}{}._defusedivision._tcp.local.".format(
            host.replace('.', '-'), advertised_interface.replace('.', '-')),
        address=socket.inet_aton(advertised_interface),
        port=int(port),
        weight=0,
        priority=0,
        properties=b"")

    zc = Zeroconf()
    zc.register_service(info)
    atexit.register(lambda: zc.close())
    while True:
        sleep(0.1) 
Example 48
Project: defuse_division   Author: lelandbatey   File: server.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def send_input(self, inpt):
        # Just pass the input to the parent bout, but with info saying that
        # this input comes from this player
        logging.debug(inpt)
        self.bout.send_input({'player': self.name, 'input': inpt}) 
Example 49
Project: defuse_division   Author: lelandbatey   File: game.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def send_input(self, inpt):
        # Just pass the input to the parent bout, but with info saying that
        # this input comes from this player
        self.bout.send_input({'player': self.name, 'input': inpt}) 
Example 50
Project: defuse_division   Author: lelandbatey   File: game.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def remove_player(self, playername):
        '''
        Method remove_player removes a player with the given name from this
        Bout's collection of players. If no player exists with the given name,
        does nothing.
        '''
        logging.info('Removing player: "{}"'.format(playername))
        if playername in self.players:
            del self.players[playername]
        if len(self.players) < self.max_players:
            self.ready = False
        self._push_state()