Python datetime.time.min() Examples

The following are 19 code examples of datetime.time.min(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module datetime.time , or try the search function .
Example #1
Source File: api.py    From selene-backend with GNU Affero General Public License v3.0 6 votes vote down vote up
def create_partition(self, partition_date: date):
        """Create a daily partition for the metric.api_history table."""
        start_ts = datetime.combine(partition_date, time.min)
        end_ts = datetime.combine(partition_date, time.max)
        db_request = self._build_db_request(
            sql_file_name='create_api_metric_partition.sql',
            args=dict(start_ts=str(start_ts), end_ts=str(end_ts)),
            sql_vars=dict(partition=partition_date.strftime('%Y%m%d'))
        )
        self.cursor.execute(db_request)

        db_request = self._build_db_request(
            sql_file_name='create_api_metric_partition_index.sql',
            sql_vars=dict(partition=partition_date.strftime('%Y%m%d'))
        )
        self.cursor.execute(db_request) 
Example #2
Source File: fact_notification_status_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def fetch_notification_status_for_day(process_day, notification_type):
    start_date = convert_bst_to_utc(datetime.combine(process_day, time.min))
    end_date = convert_bst_to_utc(datetime.combine(process_day + timedelta(days=1), time.min))

    current_app.logger.info("Fetch ft_notification_status for {} to {}".format(start_date, end_date))

    all_data_for_process_day = []
    services = Service.query.all()
    # for each service query notifications or notification_history for the day, depending on their data retention
    for service in services:
        table = get_notification_table_to_use(service, notification_type, process_day, has_delete_task_run=False)

        data_for_service_and_type = query_for_fact_status_data(
            table=table,
            start_date=start_date,
            end_date=end_date,
            notification_type=notification_type,
            service_id=service.id
        )

        all_data_for_process_day += data_for_service_and_type

    return all_data_for_process_day 
Example #3
Source File: fact_billing_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def fetch_billing_data_for_day(process_day, service_id=None, check_permissions=False):
    start_date = convert_bst_to_utc(datetime.combine(process_day, time.min))
    end_date = convert_bst_to_utc(datetime.combine(process_day + timedelta(days=1), time.min))
    current_app.logger.info("Populate ft_billing for {} to {}".format(start_date, end_date))
    transit_data = []
    if not service_id:
        services = Service.query.all()
    else:
        services = [Service.query.get(service_id)]

    for service in services:
        for notification_type in (SMS_TYPE, EMAIL_TYPE, LETTER_TYPE):
            if (not check_permissions) or service.has_permission(notification_type):
                table = get_notification_table_to_use(service, notification_type, process_day,
                                                      has_delete_task_run=False)
                results = _query_for_billing_data(
                    table=table,
                    notification_type=notification_type,
                    start_date=start_date,
                    end_date=end_date,
                    service=service
                )
                transit_data += results

    return transit_data 
Example #4
Source File: munkiinstalls.py    From sal with Apache License 2.0 6 votes vote down vote up
def get_context(self, queryset, **kwargs):
        context = self.super_get_context(queryset, **kwargs)

        # Set up 14 days back of time ranges as a generator.
        now = timezone.now()
        days = (now - timedelta(days=d) for d in range(0, 15))
        time_ranges = ((
            timezone.make_aware(datetime.combine(d, time.min)),
            timezone.make_aware(datetime.combine(d, time.max))) for d in days)

        # For each day, get a count of installs, pending, and errors,
        # and the date, as a list of dicts.
        context['data'] = []
        for time_range in time_ranges:
            day_status = {key: self._filter(queryset, key, time_range) for key in STATUSES}
            day_status['date'] = time_range[0].strftime("%Y-%m-%d")
            context['data'].append(day_status)
        return context 
Example #5
Source File: columns.py    From openpyxl-templates with MIT License 5 votes vote down vote up
def to_excel(self, value, row_type=None):
        if type(value) == date:
            value = datetime.combine(value, time.min)
        if not isinstance(value, datetime):
            raise UnableToParseDatetime(value=value)

        delta = (value - datetime(year=1900, month=1, day=1, tzinfo=value.tzinfo))
        value = delta.days + delta.seconds / self.SECONDS_PER_DAY + 2

        # Excel incorrectly assumes 1900 to be a leap year.
        if value < 61:
            if value < 1:
                raise UnableToParseDatetime(value=value)
            value -= 1
        return value 
Example #6
Source File: toplevel.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def add(self, date_range, library_name):
        """
        Adds the library with the given date range to the underlying collection of libraries used by this store.
        The underlying libraries should not overlap as the date ranges are assumed to be CLOSED_CLOSED by this function
        and the rest of the class.

        Arguments:

        date_range: A date range provided on the assumption that it is CLOSED_CLOSED. If for example the underlying
        libraries were split by year, the start of the date range would be datetime.datetime(year, 1, 1) and the end
        would be datetime.datetime(year, 12, 31, 23, 59, 59, 999000). The date range must fall on UTC day boundaries,
        that is the start must be add midnight and the end must be 1 millisecond before midnight.

        library_name: The name of the underlying library. This must be the name of a valid Arctic library
        """
        # check that the library is valid
        try:
            self._arctic_lib.arctic[library_name]
        except Exception as e:
            logger.error("Could not load library")
            raise e
        assert date_range.start and date_range.end, "Date range should have start and end properties {}".format(date_range)
        start = date_range.start.astimezone(mktz('UTC')) if date_range.start.tzinfo is not None else date_range.start.replace(tzinfo=mktz('UTC'))
        end = date_range.end.astimezone(mktz('UTC')) if date_range.end.tzinfo is not None else date_range.end.replace(tzinfo=mktz('UTC'))
        assert start.time() == time.min and end.time() == end_time_min, "Date range should fall on UTC day boundaries {}".format(date_range)
        # check that the date range does not overlap
        library_metadata = self._get_library_metadata(date_range)
        if len(library_metadata) > 1 or (len(library_metadata) == 1 and library_metadata[0] != library_name):
            raise OverlappingDataException("""There are libraries that overlap with the date range:
library: {}
overlapping libraries: {}""".format(library_name, [l.library for l in library_metadata]))
        self._collection.update_one({'library_name': library_name},
                                    {'$set': {'start': start, 'end': end}}, upsert=True) 
Example #7
Source File: models.py    From rankedftw with GNU Affero General Public License v3.0 5 votes vote down vote up
def start_time(self, time_part=time.min):
        return datetime.combine(self.start_date, time_part).replace(tzinfo=timezone.utc) 
Example #8
Source File: models.py    From rankedftw with GNU Affero General Public License v3.0 5 votes vote down vote up
def set_data_time(self, season, cpp):
        """ Calculate and set data_times based off actual data and season. """

        self.min_data_time, self.max_data_time = [from_unix(d) for d in cpp.min_max_data_time()]
        if not self.min_data_time and not self.max_data_time:
            self.min_data_time = self.max_data_time = self.data_time = season.start_time()

        if season.is_open():
            self.data_time = self.max_data_time
        else:
            self.data_time = min(self.max_data_time, season.end_time()) 
Example #9
Source File: date_util.py    From notifications-api with MIT License 5 votes vote down vote up
def get_financial_year_for_datetime(start_date):
    if type(start_date) == date:
        start_date = datetime.combine(start_date, time.min)

    year = int(start_date.strftime('%Y'))
    if start_date < get_april_fools(year):
        return year - 1
    else:
        return year 
Example #10
Source File: main.py    From allura with Apache License 2.0 5 votes vote down vote up
def index(self, **kw):
        q = dict(
            timestamp={
                '$gte': datetime.combine(self.day, time.min),
                '$lte': datetime.combine(self.day, time.max)})
        messages = CM.ChatMessage.query.find(q).sort('timestamp').all()
        prev = c.app.url + (self.day - timedelta(days=1)).strftime('%Y/%m/%d/')
        next = c.app.url + (self.day + timedelta(days=1)).strftime('%Y/%m/%d/')
        return dict(
            day=self.day,
            messages=messages,
            prev=prev,
            next=next) 
Example #11
Source File: contests.py    From online-judge with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_table(self):
        calendar = Calendar(self.firstweekday).monthdatescalendar(self.year, self.month)
        starts, ends, oneday = self.get_contest_data(make_aware(datetime.combine(calendar[0][0], time.min)),
                                                     make_aware(datetime.combine(calendar[-1][-1], time.min)))
        return [[ContestDay(
            date=date, weekday=self.weekday_classes[weekday], is_pad=date.month != self.month,
            is_today=date == self.today, starts=starts[date], ends=ends[date], oneday=oneday[date],
        ) for weekday, date in enumerate(week)] for week in calendar] 
Example #12
Source File: contests.py    From online-judge with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_context_data(self, **kwargs):
        context = super(ContestCalendar, self).get_context_data(**kwargs)

        try:
            month = date(self.year, self.month, 1)
        except ValueError:
            raise Http404()
        else:
            context['title'] = _('Contests in %(month)s') % {'month': date_filter(month, _("F Y"))}

        dates = Contest.objects.aggregate(min=Min('start_time'), max=Max('end_time'))
        min_month = (self.today.year, self.today.month)
        if dates['min'] is not None:
            min_month = dates['min'].year, dates['min'].month
        max_month = (self.today.year, self.today.month)
        if dates['max'] is not None:
            max_month = max((dates['max'].year, dates['max'].month), (self.today.year, self.today.month))

        month = (self.year, self.month)
        if month < min_month or month > max_month:
            # 404 is valid because it merely declares the lack of existence, without any reason
            raise Http404()

        context['now'] = timezone.now()
        context['calendar'] = self.get_table()
        context['curr_month'] = date(self.year, self.month, 1)

        if month > min_month:
            context['prev_month'] = date(self.year - (self.month == 1), 12 if self.month == 1 else self.month - 1, 1)
        else:
            context['prev_month'] = None

        if month < max_month:
            context['next_month'] = date(self.year + (self.month == 12), 1 if self.month == 12 else self.month + 1, 1)
        else:
            context['next_month'] = None
        return context 
Example #13
Source File: fields.py    From Dailyfresh-B2C with Apache License 2.0 5 votes vote down vote up
def compress(self, data_list):
        if data_list:
            start_date, stop_date = data_list
            if start_date:
                start_date = handle_timezone(
                    datetime.combine(start_date, time.min),
                    False
                )
            if stop_date:
                stop_date = handle_timezone(
                    datetime.combine(stop_date, time.max),
                    False
                )
            return slice(start_date, stop_date)
        return None 
Example #14
Source File: generators.py    From appkernel with Apache License 2.0 5 votes vote down vote up
def to_wireformat(self, instance_value: date):
        if isinstance(instance_value, date):
            return datetime.combine(instance_value, dtime.min)
        else:
            return instance_value 
Example #15
Source File: TestSTARRDemographicsConversion.py    From CDSS with GNU General Public License v3.0 5 votes vote down vote up
def death_date_tuple_from(birth_list, row):
        death_list = list(birth_list)
        death_list[5] = "Death"
        death_list[6] = "Death Date"
        death_list[7] = datetime.combine(row[2], time.min).replace(tzinfo=pytz.UTC)
        return tuple(death_list) 
Example #16
Source File: root.py    From allura with Apache License 2.0 4 votes vote down vote up
def stats_data(self, begin=None, end=None, forum=None, **kw):
        end = end or date.today()
        begin = begin or end - timedelta(days=60)

        discussion_id_q = {
            '$in': [d._id for d in c.app.forums
                    if d.shortname == forum or not forum]
        }
        # must be ordered dict, so that sorting by this works properly
        grouping = OrderedDict()
        grouping['year'] = {'$year': '$timestamp'}
        grouping['month'] = {'$month': '$timestamp'}
        grouping['day'] = {'$dayOfMonth': '$timestamp'}
        mongo_data = model.ForumPost.query.aggregate([
            {'$match': {
                'discussion_id': discussion_id_q,
                'status': 'ok',
                'timestamp': {
                    # convert date to datetime to make pymongo happy
                    '$gte': datetime.combine(begin, time.min),
                    '$lte': datetime.combine(end, time.max),
                },
                'deleted': False,
            }},
            {'$group': {
                '_id': grouping,
                'posts': {'$sum': 1},
            }},
            {'$sort': {
                '_id': pymongo.ASCENDING,
            }},
        ], cursor={})

        def reformat_data(mongo_data):
            def item(day, val):
                return [
                    calendar.timegm(day.timetuple()) * 1000,
                    val
                ]

            next_expected_date = begin
            for d in mongo_data:
                this_date = datetime(
                    d['_id']['year'], d['_id']['month'], d['_id']['day'])
                for day in h.daterange(next_expected_date, this_date):
                    yield item(day, 0)
                yield item(this_date, d['posts'])
                next_expected_date = this_date + timedelta(days=1)
            for day in h.daterange(next_expected_date, end + timedelta(days=1)):
                yield item(day, 0)

        return dict(
            begin=begin,
            end=end,
            data=list(reformat_data(mongo_data)),
        ) 
Example #17
Source File: checkup.py    From GarminDB with GNU General Public License v2.0 4 votes vote down vote up
def goals(self):
        """Do a checkup of th euser's goals."""
        look_back_days = GarminDBConfigManager.checkup('look_back_days')
        end_ts = datetime.now()
        start_ts = end_ts - timedelta(days=look_back_days)
        results = GarminDB.DailySummary.get_for_period(self.garmin_db, start_ts, end_ts)
        step_goal_days = 0
        step_goal_days_in_week = 0
        floors_goal_days = 0
        floor_goal_days_in_week = 0
        days_in_week = 0
        intensity_time = time.min
        intensity_time_goal = time.min
        intensity_weeks = 0
        intensity_goal_weeks = 0
        for result in results:
            if result.day.weekday() == 0:
                days_in_week = 0
                step_goal_days_in_week = 0
                floor_goal_days_in_week = 0
                intensity_time = time.min
                intensity_time_goal = time.min
            days_in_week += 1
            if result.steps_goal_percent >= 100:
                step_goal_days += 1
                step_goal_days_in_week += 1
            else:
                logger.debug('Steps: goal not met on %s', result.day)
            if result.floors_goal_percent >= 100:
                floors_goal_days += 1
                floor_goal_days_in_week += 1
            else:
                logger.debug('Floors: goal not met on %s', result.day)
            intensity_time = Fit.conversions.add_time(intensity_time, result.intensity_time)
            intensity_time_goal = Fit.conversions.add_time(intensity_time_goal, result.intensity_time_goal)
            if result.day.weekday() == 6:
                if days_in_week == 7:
                    intensity_weeks += 1
                    if step_goal_days_in_week < days_in_week:
                        logger.info('Steps: goal not met %d days for week ending in %s', days_in_week - step_goal_days_in_week, result.day)
                    if floor_goal_days_in_week < days_in_week:
                        logger.info('Floors: goal not met %d days for week ending in %s', days_in_week - floor_goal_days_in_week, result.day)
                    if intensity_time >= intensity_time_goal:
                        intensity_goal_weeks += 1
                    else:
                        logger.info('Intensity mins: goal not met for week ending in %s', result.day)
        logger.info('Summary:')
        logger.info('Steps: met goal %d of last %d days', step_goal_days, look_back_days)
        logger.info('Floors: met goal %d of last %d days', floors_goal_days, look_back_days)
        logger.info('Intensity mins: met goal %d of last %d weeks', intensity_goal_weeks, intensity_weeks) 
Example #18
Source File: test_fact_notification_status_dao.py    From notifications-api with MIT License 4 votes vote down vote up
def test_update_fact_notification_status(notify_db_session):
    first_service = create_service(service_name='First Service')
    first_template = create_template(service=first_service)
    second_service = create_service(service_name='second Service')
    second_template = create_template(service=second_service, template_type='email')
    third_service = create_service(service_name='third Service')
    third_template = create_template(service=third_service, template_type='letter')

    create_service_data_retention(second_service, 'email', days_of_retention=3)

    process_day = date.today() - timedelta(days=5)
    with freeze_time(datetime.combine(process_day, time.min)):
        create_notification(template=first_template, status='delivered')

        # 2nd service email has 3 day data retention - data has been moved to history and doesn't exist in notifications
        create_notification_history(template=second_template, status='temporary-failure')

        create_notification(template=third_template, status='sending')

    # these created notifications from a different day get ignored
    with freeze_time(datetime.combine(date.today() - timedelta(days=4), time.min)):
        create_notification(template=first_template)
        create_notification_history(template=second_template)
        create_notification(template=third_template)

    for notification_type in ('letter', 'sms', 'email'):
        data = fetch_notification_status_for_day(process_day=process_day, notification_type=notification_type)
        update_fact_notification_status(data=data, process_day=process_day, notification_type=notification_type)

    new_fact_data = FactNotificationStatus.query.order_by(FactNotificationStatus.bst_date,
                                                          FactNotificationStatus.notification_type
                                                          ).all()

    assert len(new_fact_data) == 3
    assert new_fact_data[0].bst_date == process_day
    assert new_fact_data[0].template_id == second_template.id
    assert new_fact_data[0].service_id == second_service.id
    assert new_fact_data[0].job_id == UUID('00000000-0000-0000-0000-000000000000')
    assert new_fact_data[0].notification_type == 'email'
    assert new_fact_data[0].notification_status == 'temporary-failure'
    assert new_fact_data[0].notification_count == 1

    assert new_fact_data[1].bst_date == process_day
    assert new_fact_data[1].template_id == third_template.id
    assert new_fact_data[1].service_id == third_service.id
    assert new_fact_data[1].job_id == UUID('00000000-0000-0000-0000-000000000000')
    assert new_fact_data[1].notification_type == 'letter'
    assert new_fact_data[1].notification_status == 'sending'
    assert new_fact_data[1].notification_count == 1

    assert new_fact_data[2].bst_date == process_day
    assert new_fact_data[2].template_id == first_template.id
    assert new_fact_data[2].service_id == first_service.id
    assert new_fact_data[2].job_id == UUID('00000000-0000-0000-0000-000000000000')
    assert new_fact_data[2].notification_type == 'sms'
    assert new_fact_data[2].notification_status == 'delivered'
    assert new_fact_data[2].notification_count == 1 
Example #19
Source File: cluster.py    From heliopy with GNU General Public License v3.0 4 votes vote down vote up
def _load(probe, starttime, endtime, instrument, product_id,
          try_download):
    dirs = []
    fnames = []
    download_info = []
    for day in util._daysplitinterval(starttime, endtime):
        date = day[0]
        year = str(date.year)
        month = str(date.month).zfill(2)
        day = str(date.day).zfill(2)

        dirs.append(year)
        local_fname = 'C' + probe + '_' + product_id + '__' +\
            year + month + day
        fnames.append(local_fname)
        thisstart = datetime.combine(date, time.min)
        thisend = datetime.combine(date, time.max)
        download_info.append((thisstart, thisend))

    extension = '.cdf'
    local_base_dir = cluster_dir / ('c' + probe) / instrument
    remote_base_url = csa_url

    def download_func(remote_base_url, local_base_dir,
                      directory, fname, remote_fname, extension,
                      download_info):
        starttime, endtime = download_info
        _download(probe, starttime, endtime, instrument, product_id)

    def processing_func(file):
        for non_empty_var in list(file.cdf_info().keys()):
            if 'variable' in non_empty_var.lower():
                if len(file.cdf_info()[non_empty_var]) > 0:
                    var_list = non_empty_var
                    break

        for key in file.cdf_info()[var_list]:
            if 'CDF_EPOCH' in file.varget(key, expand=True).values():
                index_key = key
                break
        return util.cdf2df(file, index_key)

    return util.process(dirs, fnames, extension, local_base_dir,
                        remote_base_url, download_func, processing_func,
                        starttime, endtime, try_download=try_download,
                        units=None,
                        download_info=download_info)