Python time.strftime() Examples

The following are 30 code examples of time.strftime(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module time , or try the search function .
Example #1
Source File: mlbgamedata.py    From mlbv with GNU General Public License v3.0 7 votes vote down vote up
def _get_header(border, game_date, show_scores, show_linescore):
        header = list()
        date_hdr = '{:7}{} {}'.format('', game_date, datetime.strftime(datetime.strptime(game_date, "%Y-%m-%d"), "%a"))
        if show_scores:
            if show_linescore:
                header.append("{:56}".format(date_hdr))
                header.append('{c_on}{dash}{c_off}'
                              .format(c_on=border.border_color, dash=border.thickdash*92, c_off=border.color_off))
            else:
                header.append("{:48} {:^7} {pipe} {:^5} {pipe} {:^9} {pipe} {}"
                              .format(date_hdr, 'Series', 'Score', 'State', 'Feeds', pipe=border.pipe))
                header.append("{c_on}{}{pipe}{}{pipe}{}{pipe}{}{c_off}"
                              .format(border.thickdash * 57, border.thickdash * 7, border.thickdash * 11, border.thickdash * 16,
                                      pipe=border.junction, c_on=border.border_color, c_off=border.color_off))
        else:
            header.append("{:48} {:^7} {pipe} {:^9} {pipe} {}".format(date_hdr, 'Series', 'State', 'Feeds', pipe=border.pipe))
            header.append("{c_on}{}{pipe}{}{pipe}{}{c_off}"
                          .format(border.thickdash * 57, border.thickdash * 11, border.thickdash * 16,
                                  pipe=border.junction, c_on=border.border_color, c_off=border.color_off))
        return header 
Example #2
Source File: std.py    From sqliv with GNU General Public License v3.0 7 votes vote down vote up
def stdin(message, params, upper=False, lower=False):
    """ask for option/input from user"""

    symbol = colored("[OPT]", "magenta")
    currentime = colored("[{}]".format(time.strftime("%H:%M:%S")), "green")

    option = raw_input("{} {} {}: ".format(symbol, currentime, message))

    if upper:
        option = option.upper()
    elif lower:
        option = option.lower()

    while option not in params:
        option = raw_input("{} {} {}: ".format(symbol, currentime, message))

        if upper:
            option = option.upper()
        elif lower:
            option = option.lower()

    return option 
Example #3
Source File: getmetrics_nfdump.py    From InsightAgent with Apache License 2.0 6 votes vote down vote up
def getFileNameList():
    currentDate = time.strftime("%Y/%m/%d", time.localtime())
    fileNameList = []
    start_time_epoch = long(time.time())
    chunks = int(reportingConfigVars['reporting_interval'] / 5)
    startMin = time.strftime("%Y%m%d%H%M", time.localtime(start_time_epoch))
    closestMinute = closestNumber(int(startMin[-2:]), 5)
    if closestMinute < 10:
        closestMinStr = '0' + str(closestMinute)
        newDate = startMin[:-2] + str(closestMinStr)
    else:
        newDate = startMin[:-2] + str(closestMinute)
    chunks -= 1
    currentTime = datetime.datetime.strptime(newDate, "%Y%m%d%H%M") - datetime.timedelta(minutes=5)
    closestMinute = time.strftime("%Y%m%d%H%M", currentTime.timetuple())
    filename = os.path.join(currentDate, "nfcapd." + closestMinute)
    fileNameList.append(filename)
    while chunks >= 0:
        chunks -= 1
        currentTime = datetime.datetime.strptime(closestMinute, "%Y%m%d%H%M") - datetime.timedelta(minutes=5)
        closestMinute = time.strftime("%Y%m%d%H%M", currentTime.timetuple())
        filename = os.path.join(currentDate, "nfcapd." + closestMinute)
        fileNameList.append(filename)

    return set(fileNameList) - getLastSentFiles() 
Example #4
Source File: getbms.py    From BatteryMonitor with GNU General Public License v2.0 6 votes vote down vote up
def getbmsdat(self,port,command):
    """ Issue BMS command and return data as byte data """
    """ assumes data port is open and configured """
    for i in range(5):
      try:
        port.write(command)
        reply = port.read(4)
#        raise serial.serialutil.SerialException('hithere')
        break
      except serial.serialutil.SerialException as err:
        errfile=open(config['files']['errfile'],'at')
        errfile.write(time.strftime("%Y%m%d%H%M%S ", time.localtime())+str(err.args)+'\n')
        errfile.close()

  #  print (reply)
    x = int.from_bytes(reply[3:5], byteorder = 'big')
#    print (x)
    data = port.read(x)
    end = port.read(3)
#    print (data)
    return data 
Example #5
Source File: summary.py    From BatteryMonitor with GNU General Public License v2.0 6 votes vote down vote up
def __init__(self):
    self.currenttime = time.localtime()
    printtime = time.strftime("%Y%m%d%H%M%S ", self.currenttime)
    self.logfile = open(config['files']['logfile'],'at',buffering=1)
    self.sampletime = time.time()
    self.prevtime = time.localtime()
    self.summary=loadsummary()

#      summary = open('/media/75cc9171-4331-4f88-ac3f-0278d132fae9/summary','w')
#      pickle.dump(hivolts, summary)
#      pickle.dump(lowvolts, summary)
#      summary.close()
    if self.summary['hour']['timestamp'][0:10] != printtime[0:10]:
      self.summary['hour'] = deepcopy(self.summary['current'])
    if self.summary['currentday']['timestamp'][0:8] != printtime[0:8]:
      self.summary['currentday'] = deepcopy(self.summary['current'])
    if self.summary['monthtodate']['timestamp'][0:6] != printtime[0:6]:
      self.summary['monthtodate'] = deepcopy(self.summary['current'])
    if self.summary['yeartodate']['timestamp'][0:4] != printtime[0:4]:
      self.summary['yeartodate'] = deepcopy(self.summary['current']) 
Example #6
Source File: sifter.py    From sandsifter with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def dump_artifacts(r, injector, command_line):
    global arch
    tee = Tee(LOG, "w")
    tee.write("#\n")
    tee.write("# %s\n" % command_line)
    tee.write("# %s\n" % injector.command)
    tee.write("#\n")
    tee.write("# insn tested: %d\n" % r.ic)
    tee.write("# artf found:  %d\n" % r.ac)
    tee.write("# runtime:     %s\n" % r.elapsed())
    tee.write("# seed:        %d\n" % injector.settings.seed)
    tee.write("# arch:        %s\n" % arch)
    tee.write("# date:        %s\n" % time.strftime("%Y-%m-%d %H:%M:%S"))
    tee.write("#\n")
    tee.write("# cpu:\n")

    cpu = get_cpu_info()
    for l in cpu:
        tee.write("# %s\n" % l) 

    tee.write("# %s  v  l  s  c\n" % (" " * 28))
    for k in sorted(list(r.ad)):
        v = r.ad[k]
        tee.write(result_string(k, v)) 
Example #7
Source File: getmetrics_kvm.py    From InsightAgent with Apache License 2.0 6 votes vote down vote up
def checkNewVMs(vmDomains):
    newVMNames = []
    vmMetaDataFilePath = os.path.join(homePath, dataDirectory + "totalVMs.json")
    for vmDomain in vmDomains:
        newVMNames.append(vmDomain.name())
    if os.path.isfile(vmMetaDataFilePath) == False:
        towritePreviousVM = {}
        towritePreviousVM["allVM"] = newVMNames
        with open(vmMetaDataFilePath, 'w') as vmMetaDataFile:
            json.dump(towritePreviousVM, vmMetaDataFile)
    else:
        with open(vmMetaDataFilePath, 'r') as vmMetaDataFile:
            oldVMDomains = json.load(vmMetaDataFile)["allVM"]
        if cmp(newVMNames, oldVMDomains) != 0:
            towritePreviousVM = {}
            towritePreviousVM["allVM"] = newVMNames
            with open(vmMetaDataFilePath, 'w') as vmMetaDataFile:
                json.dump(towritePreviousVM, vmMetaDataFile)
            if os.path.isfile(os.path.join(homePath, dataDirectory + date + ".csv")) == True:
                oldFile = os.path.join(homePath, dataDirectory + date + ".csv")
                newFile = os.path.join(homePath, dataDirectory + date + "." + time.strftime("%Y%m%d%H%M%S") + ".csv")
                os.rename(oldFile, newFile) 
Example #8
Source File: download.py    From glazier with Apache License 2.0 6 votes vote down vote up
def _StoreDebugInfo(self, file_stream, socket_error=None):
    """Gathers debug information for use when file downloads fail.

    Args:
      file_stream:  The file stream object of the file being downloaded.
      socket_error: Store the error raised from the socket class with
        other debug info.

    Returns:
      debug_info:  A dictionary containing various pieces of debugging
          information.
    """
    if socket_error:
      self._debug_info['socket_error'] = socket_error
    if file_stream:
      for header in file_stream.info().header_items():
        self._debug_info[header[0]] = header[1]
    self._debug_info['current_time'] = time.strftime(
        '%A, %d %B %Y %H:%M:%S UTC') 
Example #9
Source File: webcam.py    From SecPi with GNU General Public License v3.0 6 votes vote down vote up
def take_adv_picture(self, num_of_pic, seconds_between):
		logging.debug("Webcam: Trying to take pictures")
		try:
			self.cam.start()
		except SystemError as se: # device path wrong
			logging.error("Webcam: Wasn't able to find video device at device path: %s" % self.path)
			return
		except AttributeError as ae: # init failed, taking pictures won't work -> shouldn't happen but anyway
			logging.error("Webcam: Couldn't take pictures because video device wasn't initialized properly")
			return

		try:
			for i in range(0,num_of_pic):
				img = self.cam.get_image()
				pygame.image.save(img, "%s/%s_%d.jpg" % (self.data_path, time.strftime("%Y%m%d_%H%M%S"), i))
				time.sleep(seconds_between)
		except Exception as e:
			logging.error("Webcam: Wasn't able to take pictures: %s" % e)

		self.cam.stop()
		logging.debug("Webcam: Finished taking pictures") 
Example #10
Source File: getmetrics_jolokia.py    From InsightAgent with Apache License 2.0 6 votes vote down vote up
def checkNewInstances(instances):
    newInstances = []
    currentDate = time.strftime("%Y%m%d")
    instancesMetaDataFilePath = os.path.join(homePath, dataDirectory + "totalVMs.json")
    for instance in instances:
        newInstances.append(instance[1])
    if os.path.isfile(instancesMetaDataFilePath) == False:
        towritePreviousInstances = {}
        towritePreviousInstances["allInstances"] = newInstances
        with open(instancesMetaDataFilePath, 'w') as instancesMetaDataFile:
            json.dump(towritePreviousInstances, instancesMetaDataFile)
    else:
        with open(instancesMetaDataFilePath, 'r') as instancesMetaDataFile:
            oldInstances = json.load(instancesMetaDataFile)["allInstances"]
        if cmp(newInstances, oldInstances) != 0:
            towritePreviousInstances = {}
            towritePreviousInstances["allInstances"] = newInstances
            with open(instancesMetaDataFilePath, 'w') as instancesMetaDataFile:
                json.dump(towritePreviousInstances, instancesMetaDataFile)
            if os.path.isfile(os.path.join(homePath, dataDirectory + currentDate + ".csv")) == True:
                oldFile = os.path.join(homePath, dataDirectory + currentDate + ".csv")
                newFile = os.path.join(homePath,
                                       dataDirectory + currentDate + "." + time.strftime("%Y%m%d%H%M%S") + ".csv")
                os.rename(oldFile, newFile) 
Example #11
Source File: stopcron.py    From InsightAgent with Apache License 2.0 6 votes vote down vote up
def sshStopCron(retry,hostname):
    global user
    global password
    if retry == 0:
        print "Stop Cron Failed in", hostname
        q.task_done()
        return
    try:
        s = paramiko.SSHClient()
        s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        if os.path.isfile(password) == True:
            s.connect(hostname, username=user, key_filename = password, timeout=60)
        else:
            s.connect(hostname, username=user, password = password, timeout=60)
        transport = s.get_transport()
        session = transport.open_session()
        session.set_combine_stderr(True)
        session.get_pty()
        command = "sudo mv /etc/cron.d/ifagent InsightAgent-master/ifagent."+time.strftime("%Y%m%d%H%M%S")+"\n"
        session.exec_command(command)
        stdin = session.makefile('wb', -1)
        stdout = session.makefile('rb', -1)
        stdin.write(password+'\n')
        stdin.flush()
        session.recv_exit_status() #wait for exec_command to finish
        s.close()
        print "Stopped Cron in ", hostname
        q.task_done()
        return
    except paramiko.SSHException, e:
        print "Invalid Username/Password for %s:"%hostname , e
        return sshStopCron(retry-1,hostname) 
Example #12
Source File: session.py    From mlbv with GNU General Public License v3.0 6 votes vote down vote up
def save_playlist_to_file(self, stream_url):
        headers = {
            "Accept": "*/*",
            "Accept-Encoding": "identity",
            "Accept-Language": "en-US,en;q=0.8",
            "Connection": "keep-alive",
            "User-Agent": self.user_agent,
            "Cookie": self.access_token
        }
        # util.log_http(stream_url, 'get', headers, sys._getframe().f_code.co_name)
        resp = self.session.get(stream_url, headers=headers)
        playlist = resp.text
        playlist_file = os.path.join(util.get_tempdir(), 'playlist-{}.m3u8'.format(time.strftime("%Y-%m-%d")))
        LOG.info('Writing playlist to: %s', playlist_file)
        with open(playlist_file, 'w') as outf:
            outf.write(playlist)
        LOG.debug('save_playlist_to_file: %s', playlist) 
Example #13
Source File: datetime.py    From Writer with MIT License 6 votes vote down vote up
def initUI(self):
 
        self.box = QtGui.QComboBox(self)

        for i in self.formats:
            self.box.addItem(strftime(i))

        insert = QtGui.QPushButton("Insert",self)
        insert.clicked.connect(self.insert)
 
        cancel = QtGui.QPushButton("Cancel",self)
        cancel.clicked.connect(self.close)
 
        layout = QtGui.QGridLayout()

        layout.addWidget(self.box,0,0,1,2)
        layout.addWidget(insert,1,0)
        layout.addWidget(cancel,1,1)
        
        self.setGeometry(300,300,400,80)
        self.setWindowTitle("Date and Time")
        self.setLayout(layout) 
Example #14
Source File: cpp.py    From SublimeKSP with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,lexer=None):
        if lexer is None:
            lexer = lex.lexer
        self.lexer = lexer
        self.macros = { }
        self.path = []
        self.temp_path = []

        # Probe the lexer for selected tokens
        self.lexprobe()

        tm = time.localtime()
        self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
        self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
        self.parser = None

    # -----------------------------------------------------------------------------
    # tokenize()
    #
    # Utility function. Given a string of text, tokenize into a list of tokens
    # ----------------------------------------------------------------------------- 
Example #15
Source File: study_robot.py    From 21tb_robot with MIT License 6 votes vote down vote up
def log(info):
    """simple log"""
    print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), info
    sys.stdout.flush() 
Example #16
Source File: standings.py    From mlbv with GNU General Public License v3.0 6 votes vote down vote up
def get_standings(standings_option='all', date_str=None, args_filter=None):
    """Displays standings."""
    LOG.debug('Getting standings for %s, option=%s', date_str, standings_option)
    if date_str == time.strftime("%Y-%m-%d"):
        # strip out date string from url (issue #5)
        date_str = None
    if util.substring_match(standings_option, 'all') or util.substring_match(standings_option, 'division'):
        display_division_standings(date_str, args_filter, rank_tag='divisionRank', header_tags=('league', 'division'))
        if util.substring_match(standings_option, 'all'):
            print('')
    if util.substring_match(standings_option, 'all') or util.substring_match(standings_option, 'wildcard'):
        _display_standings('wildCard', 'Wildcard', date_str, args_filter, rank_tag='wildCardRank', header_tags=('league', ))
        if util.substring_match(standings_option, 'all'):
            print('')
    if util.substring_match(standings_option, 'all') or util.substring_match(standings_option, 'overall') \
            or util.substring_match(standings_option, 'league') or util.substring_match(standings_option, 'conference'):
        _display_standings('byLeague', 'League', date_str, args_filter, rank_tag='leagueRank', header_tags=('league', ))
        if util.substring_match(standings_option, 'all'):
            print('')

    if util.substring_match(standings_option, 'playoff') or util.substring_match(standings_option, 'postseason'):
        _display_standings('postseason', 'Playoffs', date_str, args_filter)
    if util.substring_match(standings_option, 'preseason'):
        _display_standings('preseason', 'Preseason', date_str, args_filter) 
Example #17
Source File: preprocessor_plugins.py    From SublimeKSP with GNU General Public License v3.0 6 votes vote down vote up
def createBuiltinDefines(lines):
	# Create date-time variables

	timecodes = ['%S', '%M', '%H', '%I', '%p', '%d', '%m', '%Y', '%y', '%B', '%b', '%x', '%X']
	timenames = ['__SEC__','__MIN__','__HOUR__','__HOUR12__','__AMPM__','__DAY__','__MONTH__','__YEAR__','__YEAR2__','__LOCALE_MONTH__','__LOCALE_MONTH_ABBR__','__LOCALE_DATE__','__LOCALE_TIME__']
	defines = ['define {0} := \"{1}\"'.format(timenames[i], strftime(timecodes[i], localtime())) for i in range(len(timecodes))]

	newLines = collections.deque()

	# append our defines on top of the script in a temporary deque
	for string in defines:
		newLines.append(lines[0].copy(string))

	# merge with the original unmodified script
	for line in lines:
		newLines.append(line)

	# replace original deque with modified one
	replaceLines(lines, newLines)

#================================================================================================= 
Example #18
Source File: __init__.py    From script.module.inputstreamhelper with MIT License 5 votes vote down vote up
def _update_widevine(self):
        """Prompts user to upgrade Widevine CDM when a newer version is available."""
        from time import localtime, strftime, time

        last_check = get_setting_float('last_check', 0.0)
        if last_check and not self._first_run():
            if last_check + 3600 * 24 * get_setting_int('update_frequency', 14) >= time():
                log(2, 'Widevine update check was made on {date}', date=strftime('%Y-%m-%d %H:%M', localtime(last_check)))
                return

        wv_config = load_widevine_config()
        if not wv_config:
            log(3, 'Widevine config missing. Could not determine current version, forcing update.')
            current_version = '0'
        elif 'x86' in arch():
            component = 'Widevine CDM'
            current_version = wv_config['version']
        else:
            component = 'Chrome OS'
            current_version = wv_config['version']

        latest_version = latest_widevine_version()
        if not latest_version:
            log(3, 'Updating widevine failed. Could not determine latest version.')
            return

        log(0, 'Latest {component} version is {version}', component=component, version=latest_version)
        log(0, 'Current {component} version installed is {version}', component=component, version=current_version)

        from distutils.version import LooseVersion  # pylint: disable=import-error,no-name-in-module,useless-suppression
        if LooseVersion(latest_version) > LooseVersion(current_version):
            log(2, 'There is an update available for {component}', component=component)
            if yesno_dialog(localize(30040), localize(30033), nolabel=localize(30028), yeslabel=localize(30034)):
                self.install_widevine()
            else:
                log(3, 'User declined to update {component}.', component=component)
        else:
            set_setting('last_check', time())
            log(0, 'User is on the latest available {component} version.', component=component) 
Example #19
Source File: kaggle_mnist_alexnet.py    From tensorflow-alexnet with MIT License 5 votes vote down vote up
def test():
    # build graph
    inputs, labels, dropout_keep_prob, learning_rate = model.input_placeholder(FLAGS.image_size, FLAGS.image_channel,
                                                                               FLAGS.label_cnt)
    logits = model.inference(inputs, dropout_keep_prob)
    predict = tf.argmax(logits, 1)

    # session
    init = tf.initialize_all_variables()
    sess = tf.Session()
    sess.run(init)

    # tf saver
    saver = tf.train.Saver()
    if os.path.isfile(FLAGS.save_name):
        saver.restore(sess, FLAGS.save_name)

    i = 1

    # load test data
    test_images, test_ranges = loader.load_mnist_test(FLAGS.batch_size)

    # ready for result file
    test_result_file = open(FLAGS.test_result, 'wb')
    csv_writer = csv.writer(test_result_file)
    csv_writer.writerow(['ImageId', 'Label'])

    total_start_time = time.time()

    for file_start, file_end in test_ranges:
        test_x = test_images[file_start:file_end]
        predict_label = sess.run(predict, feed_dict={inputs: test_x, dropout_keep_prob: 1.0})

        for cur_predict in predict_label:
            csv_writer.writerow([i, cur_predict])
            print('[Result %s: %s]' % (i, cur_predict))
            i += 1
    print("[%s][total exec %s seconds" % (time.strftime("%Y-%m-%d %H:%M:%S"), (time.time() - total_start_time))) 
Example #20
Source File: get_logs_mssql.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def get_sql_query_time(timestamp, timestamp_format):
    if timestamp_format == Constant.NONE:
        return str(timestamp)
    else:
        return "\'" + time.strftime(timestamp_format, time.gmtime(timestamp / 1000.0)) + "\'" 
Example #21
Source File: __init__.py    From script.module.inputstreamhelper with MIT License 5 votes vote down vote up
def info_dialog(self):
        """ Show an Info box with useful info e.g. for bug reports"""
        text = localize(30800, version=kodi_version(), system=system_os(), arch=arch()) + '\n'  # Kodi information
        text += '\n'

        disabled_str = ' ({disabled})'.format(disabled=localize(30054))
        ishelper_state = disabled_str if get_setting_bool('disabled', False) else ''
        istream_state = disabled_str if not self._inputstream_enabled() else ''
        text += localize(30810, version=addon_version(), state=ishelper_state) + '\n'
        text += localize(30811, version=self._inputstream_version(), state=istream_state) + '\n'
        text += '\n'

        if system_os() == 'Android':
            text += localize(30820) + '\n'
        else:
            from time import localtime, strftime
            if get_setting_float('last_modified', 0.0):
                wv_updated = strftime('%Y-%m-%d %H:%M', localtime(get_setting_float('last_modified', 0.0)))
            else:
                wv_updated = 'Never'
            text += localize(30821, version=self._get_lib_version(widevinecdm_path()), date=wv_updated) + '\n'
            if arch() in ('arm', 'arm64'):  # Chrome OS version
                wv_cfg = load_widevine_config()
                if wv_cfg:
                    text += localize(30822, name=wv_cfg['hwidmatch'].split()[0].lstrip('^'), version=wv_cfg['version']) + '\n'
            if get_setting_float('last_check', 0.0):
                wv_check = strftime('%Y-%m-%d %H:%M', localtime(get_setting_float('last_check', 0.0)))
            else:
                wv_check = 'Never'
            text += localize(30823, date=wv_check) + '\n'
            text += localize(30824, path=ia_cdm_path()) + '\n'

        text += '\n'

        text += localize(30830, url=config.SHORT_ISSUE_URL)  # Report issues

        log(2, '\n{info}'.format(info=kodi_to_ascii(text)))
        textviewer(localize(30901), text) 
Example #22
Source File: MonoTime.py    From rtp_cluster with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def ftime(self, base = None):
        if base != None:
            realt = base.realt - (base.monot - self.monot)
        else:
            realt = self.realt
        return strftime('%Y-%m-%d %H:%M:%S+00', gmtime(round(realt))) 
Example #23
Source File: installer.py    From glazier with Apache License 2.0 5 votes vote down vote up
def Run(self):
    duration = int(self._args[0])
    converted_time = time.strftime('%H:%M:%S', time.gmtime(duration))

    if len(self._args) > 1:
      logging.info('Sleeping for %s (%s).', converted_time, str(self._args[1]))
    else:
      logging.info('Sleeping for %s before continuing...', converted_time)
    time.sleep(duration) 
Example #24
Source File: getmetrics_jolokia.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def writeDataToFile(headerList, currentDataList):
    # write data to csv file
    with open(os.path.join(homePath, dataDirectory, time.strftime("%Y%m%d") + ".csv"), 'a+') as csvDataFile:
        dataFileLines = len(csvDataFile.readlines())
        if (dataFileLines < 1):
            csvDataFile.write(listToCSVRow(headerList))
            csvDataFile.write("\n")
        csvDataFile.write(listToCSVRow(currentDataList))
        csvDataFile.write("\n")
        csvDataFile.close() 
Example #25
Source File: AboutTime.py    From tools_python with Apache License 2.0 5 votes vote down vote up
def get_time():
    """
    获取时间
    :return:
    """
    # time.strftime("%F-%u-%j")
    # F:年月日;u:星期几;j:一年第几天

    # 时间数据
    # [2019, 4, 18, 4, 108]
    time_datas = [int(i) for i in time.strftime("%F-%u-%j").split('-')]

    # 年
    year = time_datas[0]
    # 月
    month = time_datas[1]
    # 日
    day = time_datas[2]
    # 星期几
    week_d = week[time_datas[3]]
    # 一年中的第几天
    time_d = time_datas[4]

    # 判断是否是闰年;闰年:366天,平年:365
    if calendar.isleap(year):
        percent = round(time_d * 100 / 366, 2)
    else:
        percent = round(time_d * 100 / 365, 2)

    # time_content = '今天是:%d 年 %d 月 %d 日,星期%s\n报告主人!今年 %.2f%% 时间已流逝。' % (year, month, day, week_d, percent)
    time_content = ('各位帅哥美女们,大家早上好!\n\n今天是:%d 年 %d 月 %d 日,星期%s~' % (year, month, day, week_d),
                    '\n\n报告主人!报告主人!报告主人!\n\n%d 年,岁月已陪伴主人走过:%.2f%%。' % (year, percent) + "余下时光,请主人们温柔以待哦!")
    return time_content

# print(get_time()) 
Example #26
Source File: getlogs_mysql.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def get_sql_query_time(timestamp, timestamp_format):
    if timestamp_format == Constant.NONE:
        return str(timestamp)
    else:
        return "\'" + time.strftime(timestamp_format, time.gmtime(timestamp / 1000.0)) + "\'" 
Example #27
Source File: collectdReportMetrics.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def update_endtime_in_config(metric_data_l, reporting_interval_l, new_prev_endtime_epoch_l, hostname_l):
    if new_prev_endtime_epoch_l == 0:
        print "No data is reported"
    else:
        new_prev_endtimeinsec = math.ceil(long(new_prev_endtime_epoch_l) / 1000.0)
        new_prev_endtime = time.strftime(
            "%Y%m%d%H%M%S", time.localtime(long(new_prev_endtimeinsec)))
        update_timestamp(new_prev_endtime)
        send_data(metric_data_l, reporting_interval_l, hostname_l)
    return 
Example #28
Source File: collectdReportMetrics.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def set_from_reporting_config_json():
    # global hostname, hostnameShort
    report_file_name = "reporting_config.json"

    # reading file form reporting_config.json
    with open(os.path.join(home_path, report_file_name), 'r') as f:
        config = json.load(f)

    reporting_interval_string = config['reporting_interval']
    # is_second_reporting = False
    if reporting_interval_string[-1:] == 's':
        # is_second_reporting = True
        reporting_interval_l = float(config['reporting_interval'][:-1])
        reporting_interval_l = float(reporting_interval_l / 60)
    else:
        reporting_interval_l = int(config['reporting_interval'])

    # keep_file_days = int(config['keep_file_days'])
    prev_endtime_l = config['prev_endtime']
    # deltaFields_l = config['delta_fields']

    hostname_l = socket.getfqdn()
    hostname_short_l = socket.gethostname().partition(".")[0]
    csvpath_l = "/var/lib/collectd/csv/" + hostname_short_l

    if not os.path.exists(csvpath_l):
        csvpath_l = "/var/lib/collectd/csv/" + hostname_l
    if not os.path.exists(csvpath_l):
        directory_list = os.listdir("/var/lib/collectd/csv")
        if len(directory_list) > 0:
            csvpath_l = "/var/lib/collectd/csv/" + directory_list[0]

    date_l = time.strftime("%Y-%m-%d")
    return reporting_interval_l, hostname_l, hostname_short_l, prev_endtime_l, csvpath_l, date_l


# deletes old csv files from a directory 
Example #29
Source File: model.py    From cs294-112_hws with MIT License 5 votes vote down vote up
def __init__(self, FLAGS, algorithm, expert_returns=None, expert_policy_fn=None):
        print('Initializing the model...')
        if not algorithm.strip().lower() in ['behavioral_cloning', 'dagger']:
            raise NotImplementedError('Algorithm {} not implemented.'.format(algorithm))
        self.FLAGS = FLAGS
        self.algorithm = algorithm.strip().lower()
        self.expert_returns = expert_returns
        self.expert_policy_fn = expert_policy_fn
        if self.algorithm == 'dagger' and self.expert_policy_fn is None:
            raise ValueError('No expert policy found.')
        
        self.scope = self.algorithm + '_' + time.strftime('%Y-%m-%d-%H-%M-%S')
        
        with tf.variable_scope(
            self.scope, 
            initializer=tf.keras.initializers.he_normal(), 
            regularizer=tf.contrib.layers.l2_regularizer(scale=3e-7), 
            reuse=tf.AUTO_REUSE
        ):
            self.add_placeholders()
            self.build_graph()
            self.add_loss()
            
        params = tf.trainable_variables()
        gradients = tf.gradients(self.loss, params)
        self.gradient_norm = tf.global_norm(gradients)
        clipped_gradients, _ = tf.clip_by_global_norm(gradients, self.FLAGS['max_gradient_norm'])
        self.param_norm = tf.global_norm(params)
        
        self.global_step = tf.Variable(0, name="global_step", trainable=False)
        lr = self.FLAGS['learning_rate']
        opt = tf.train.AdamOptimizer(learning_rate=lr, beta1=0.8, beta2=0.999, epsilon=1e-7)
        self.updates = opt.apply_gradients(zip(clipped_gradients, params), global_step=self.global_step)
        
        self.saver = tf.train.Saver(tf.global_variables(), max_to_keep=1)
        self.bestmodel_saver = tf.train.Saver(tf.global_variables(), max_to_keep=1)
        self.summaries = tf.summary.merge_all() 
Example #30
Source File: logger.py    From TradzQAI with Apache License 2.0 5 votes vote down vote up
def check_log_file(self, log_name):
        cdir = os.listdir(self.log_path)
        for d in cdir:
            if time.strftime("%Y_%m_%d") in d and log_name == d.split('_')[0]:
                return d
        return log_name + "_" + time.strftime("%Y_%m_%d") + ".txt"