Python datetime.datetime.min() Examples

The following are 30 code examples of datetime.datetime.min(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module datetime.datetime , or try the search function .
Example #1
Source File: fileset.py    From typhon with MIT License 8 votes vote down vote up
def time_coverage(self, value):
        if self.single_file:
            if value is None:
                # The default for single file filesets:
                self._time_coverage = [
                    datetime.min,
                    datetime.max
                ]
            else:
                self._time_coverage = [
                    to_datetime(value[0]),
                    to_datetime(value[1]),
                ]
        elif value is not None:
            self._time_coverage = to_timedelta(value)
        else:
            self._time_coverage = None

        # Reset the info cache because some file information may have changed
        # now
        self.info_cache = {} 
Example #2
Source File: timer.py    From backtrader with GNU General Public License v3.0 6 votes vote down vote up
def start(self, data):
        # write down the 'reset when' value
        if not isinstance(self.p.when, integer_types):  # expect time/datetime
            self._rstwhen = self.p.when
            self._tzdata = self.p.tzdata
        else:
            self._tzdata = data if self.p.tzdata is None else self.p.tzdata

            if self.p.when == SESSION_START:
                self._rstwhen = self._tzdata.p.sessionstart
            elif self.p.when == SESSION_END:
                self._rstwhen = self._tzdata.p.sessionend

        self._isdata = isinstance(self._tzdata, AbstractDataBase)
        self._reset_when()

        self._nexteos = datetime.min
        self._curdate = date.min

        self._curmonth = -1  # non-existent month
        self._monthmask = collections.deque()

        self._curweek = -1  # non-existent week
        self._weekmask = collections.deque() 
Example #3
Source File: test_nfvo_plugin.py    From tacker with Apache License 2.0 6 votes vote down vote up
def _insert_dummy_vim_barbican(self):
        session = self.context.session
        vim_db = nfvo_db.Vim(
            id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='fake_vim',
            description='fake_vim_description',
            type='openstack',
            status='Active',
            deleted_at=datetime.min,
            placement_attr={'regions': ['RegionOne']})
        vim_auth_db = nfvo_db.VimAuth(
            vim_id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            password='encrypted_pw',
            auth_url='http://localhost/identity',
            vim_project={'name': 'test_project'},
            auth_cred={'username': 'test_user', 'user_domain_id': 'default',
                       'project_domain_id': 'default',
                       'key_type': 'barbican_key',
                       'secret_uuid': 'fake-secret-uuid'})
        session.add(vim_db)
        session.add(vim_auth_db)
        session.flush() 
Example #4
Source File: test_nfvo_plugin.py    From tacker with Apache License 2.0 6 votes vote down vote up
def _insert_dummy_vim(self):
        session = self.context.session
        vim_db = nfvo_db.Vim(
            id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='fake_vim',
            description='fake_vim_description',
            type='openstack',
            status='Active',
            deleted_at=datetime.min,
            placement_attr={'regions': ['RegionOne']})
        vim_auth_db = nfvo_db.VimAuth(
            vim_id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            password='encrypted_pw',
            auth_url='http://localhost/identity',
            vim_project={'name': 'test_project'},
            auth_cred={'username': 'test_user', 'user_domain_id': 'default',
                       'project_domain_id': 'default',
                       'key_type': 'fernet_key'})
        session.add(vim_db)
        session.add(vim_auth_db)
        session.flush() 
Example #5
Source File: test_plugin.py    From tacker with Apache License 2.0 6 votes vote down vote up
def _insert_dummy_vnf(self, status="ACTIVE"):
        session = self.context.session
        vnf_db = vnfm_db.VNF(
            id='6261579e-d6f3-49ad-8bc3-a9cb974778fe',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='fake_vnf',
            description='fake_vnf_description',
            instance_id='da85ea1a-4ec4-4201-bbb2-8d9249eca7ec',
            vnfd_id='eb094833-995e-49f0-a047-dfb56aaf7c4e',
            vim_id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            placement_attr={'region': 'RegionOne'},
            status=status,
            deleted_at=datetime.min)
        session.add(vnf_db)
        session.flush()
        return vnf_db 
Example #6
Source File: strong_motion_selector.py    From gmpe-smtk with GNU Affero General Public License v3.0 6 votes vote down vote up
def select_within_time(self, start_time=None, end_time=None, as_db=False):
        """
        Selects records within a specific time
        :param start_time:
            Earliest time as instance of :class: datetime.datetime
        :param end_time:
            Latest time as instance of :class: datetime.datetime
        """
        if start_time:
            assert isinstance(start_time, datetime)
        else:
            start_time = datetime.min

        if end_time:
            assert isinstance(end_time, datetime)
        else:
            end_time = datetime.now()
        idx = []
        for iloc, record in enumerate(self.database.records):
            if (record.event.datetime >= start_time) and\
                (record.event.datetime <= end_time):
                idx.append(iloc)
        return self.select_records(idx, as_db) 
Example #7
Source File: cookies.py    From BlackSheep with MIT License 6 votes vote down vote up
def __init__(self, cookie: Cookie):
        # https://tools.ietf.org/html/rfc6265#section-5.3
        self.cookie = cookie
        self.creation_time = datetime.utcnow()

        expiry = None
        if cookie.max_age:
            # https://tools.ietf.org/html/rfc6265#section-5.2.2
            try:
                max_age = int(cookie.max_age)
            except ValueError:
                pass
            else:
                if max_age <= 0:
                    expiry = datetime.min
                else:
                    expiry = self.creation_time + timedelta(seconds=max_age)
        elif cookie.expires:
            expiry = cookie.expiration

        self.expiry_time = expiry
        self.persistent = True if expiry else False 
Example #8
Source File: test_plugin.py    From tacker with Apache License 2.0 6 votes vote down vote up
def _insert_dummy_vim(self):
        session = self.context.session
        vim_db = nfvo_db.Vim(
            id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='fake_vim',
            description='fake_vim_description',
            type='test_vim',
            status='Active',
            deleted_at=datetime.min,
            placement_attr={'regions': ['RegionOne']})
        vim_auth_db = nfvo_db.VimAuth(
            vim_id='6261579e-d6f3-49ad-8bc3-a9cb974778ff',
            password='encrypted_pw',
            auth_url='http://localhost/identity',
            vim_project={'name': 'test_project'},
            auth_cred={'username': 'test_user', 'user_domain_id': 'default',
                       'project_domain_id': 'default'})
        session.add(vim_db)
        session.add(vim_auth_db)
        session.flush() 
Example #9
Source File: cache.py    From hypy with GNU General Public License v3.0 6 votes vote down vote up
def need_update() -> bool:
    """
    Cheks if cache needs update based on cache file modification date.

    Returns:
        True if the cache file is older than sync interval in hours.
    """
    vms_cache_filename = get_cache_path()
    modified = datetime.min
    if isfile(vms_cache_filename):
        modified = datetime.fromtimestamp(getmtime(vms_cache_filename))

    if modified < datetime.now() - timedelta(hours=int(sync_interval)):
        return True

    return False 
Example #10
Source File: uq_course_utils.py    From uqcsbot with MIT License 6 votes vote down vote up
def get_course_assessment(course_names, cutoff=None, assessment_url=None):
    """
    Returns all the course assessment for the given
    courses that occur after the given cutoff.
    """
    if assessment_url is None:
        joined_assessment_url = get_course_assessment_page(course_names)
    else:
        joined_assessment_url = assessment_url
        http_response = get_uq_request(joined_assessment_url)
    if http_response.status_code != requests.codes.ok:
        raise HttpException(joined_assessment_url, http_response.status_code)
    html = BeautifulSoup(http_response.content, 'html.parser')
    assessment_table = html.find('table', class_='tblborder')
    # Start from 1st index to skip over the row containing column names.
    assessment = assessment_table.findAll('tr')[1:]
    parsed_assessment = map(get_parsed_assessment_item, assessment)
    # If no cutoff is specified, set cutoff to UNIX epoch (i.e. filter nothing).
    cutoff = cutoff or datetime.min
    assessment_filter = partial(is_assessment_after_cutoff, cutoff=cutoff)
    filtered_assessment = filter(assessment_filter, parsed_assessment)
    return list(filtered_assessment) 
Example #11
Source File: base.py    From linode_api4-python with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, client, id, json={}):
        self._set('_populated', False)
        self._set('_last_updated', datetime.min)
        self._set('_client', client)

        #: self._raw_json is a copy of the json received from the API on population,
        #: and cannot be relied upon to be current.  Local changes to mutable fields
        #: that have not been saved will not be present, and volatile fields will not
        #: be updated on access.
        self._set('_raw_json', None)

        for prop in type(self).properties:
            self._set(prop, None)

        self._set('id', id)
        if hasattr(type(self), 'id_attribute'):
            self._set(getattr(type(self), 'id_attribute'), id)

        self._populate(json) 
Example #12
Source File: tweepy_pool_tests.py    From smappPy with GNU General Public License v2.0 6 votes vote down vote up
def test_with_1_api_in_pool_sets_throttle_time_if_rate_limit_error():
    """
    If the tweepy.API throws a rate-limit error, it should note the time of that error
    in apis[x][1].
    """
    api_mock = MagicMock(spec=tweepy.API)
    api_mock.return_value = api_mock
    ut_mock = Mock(return_value=0)
    api_mock.user_timeline = ut_mock

    api_mock.user_timeline.side_effect = raise_error_once()

    with patch('tweepy.API', api_mock):
        with patch('time.sleep'):
            api_pool = tweepy_pool.APIPool([OAUTH_DICT])
            api_pool.user_timeline(user_id=234)
    api_mock.user_timeline.assert_called_with(user_id=234)
    ok_(api_pool._apis[0][1]['user_timeline'] > datetime.min) 
Example #13
Source File: alert.py    From streamalert with Apache License 2.0 6 votes vote down vote up
def can_merge(self, other):
        """Check if two alerts can be merged together.

        Args:
            other (Alert): Check if the instance can merge with this other alert.

        Returns:
            True if these alerts fit in the same merge window and have the same merge key values.
        """
        if not self.merge_enabled or not other.merge_enabled:
            # Merge information is not defined for both of these alerts.
            return False

        older, newer = min(self, other), max(self, other)
        if newer.created > older.created + older.merge_window:
            # These alerts won't fit in a single merge window.
            return False

        if set(self.merge_by_keys) != set(other.merge_by_keys):
            # These alerts have different definitions of merge keys.
            return False

        return all(utils.get_first_key(self.record, key) == utils.get_first_key(other.record, key)
                   for key in self.merge_by_keys) 
Example #14
Source File: tweepy_pool.py    From smappPy with GNU General Public License v2.0 6 votes vote down vote up
def _call_with_throttling_per_method(self, method_name, *args, **kwargs):
        api_struct = self._pick_api_with_shortest_waiting_time_for_method(method_name)
        now = datetime.now()
        throttle_time = api_struct[1].get(method_name, datetime.min)
        time_since_throttle = (now - throttle_time).seconds
        to_wait = self.time_to_wait - time_since_throttle + 1

        if to_wait > 0:
            logger.debug("<{1}>: Rate limits exhausted, waiting {0} seconds".format(
                to_wait, now.strftime('%H:%M:%S')))
            time.sleep(to_wait)

        try:
            return api_struct[0].__getattribute__(method_name)(*args, **kwargs)
        except TweepError as e:
            error_dict = parse_tweepy_error(e)
            if error_dict["code"] in [RATE_LIMIT_ERROR, TOO_MANY_REQUESTS, OVER_CAP_ERROR]:
                api_struct[1][method_name] = now
                logger.debug("Received limit message: {0}".format(error_dict["message"]))
                return self._call_with_throttling_per_method(method_name, *args, **kwargs)
            else:
                raise(e) 
Example #15
Source File: test_dag_processing.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_kill_timed_out_processors_kill(self, mock_kill, mock_pid):
        mock_pid.return_value = 1234
        manager = DagFileProcessorManager(
            dag_directory='directory',
            max_runs=1,
            processor_factory=MagicMock().return_value,
            processor_timeout=timedelta(seconds=5),
            signal_conn=MagicMock(),
            dag_ids=[],
            pickle_dags=False,
            async_mode=True)

        processor = DagFileProcessorProcess('abc.txt', False, [], [])
        processor._start_time = timezone.make_aware(datetime.min)
        manager._processors = {'abc.txt': processor}
        manager._kill_timed_out_processors()
        mock_kill.assert_called_once_with() 
Example #16
Source File: test_sharing.py    From CumulusCI with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_raises_exception_timeout(self):
        task = create_task(
            SetOrgWideDefaults,
            {
                "managed": True,
                "api_version": "47.0",
                "api_names": "bar,foo",
                "org_wide_defaults": [
                    {
                        "api_name": "Account",
                        "internal_sharing_model": "Private",
                        "external_sharing_model": "Private",
                    }
                ],
            },
        )

        task.time_start = datetime.min
        with pytest.raises(CumulusCIException):
            task._poll_action() 
Example #17
Source File: provider_details_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def _get_sms_providers_for_update(time_threshold):
    """
    Returns a list of providers, while holding a for_update lock on the provider details table, guaranteeing that those
    providers won't change (but can still be read) until you've committed/rolled back your current transaction.

    if any of the providers have been changed recently, it returns an empty list - it's still your responsiblity to
    release the transaction in that case
    """
    # get current priority of both providers
    q = ProviderDetails.query.filter(
        ProviderDetails.notification_type == 'sms',
        ProviderDetails.active
    ).with_for_update().all()

    # if something updated recently, don't update again. If the updated_at is null, treat it as min time
    if any((provider.updated_at or datetime.min) > datetime.utcnow() - time_threshold for provider in q):
        current_app.logger.info(f"Not adjusting providers, providers updated less than {time_threshold} ago.")
        return []

    return q 
Example #18
Source File: provider_details_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def dao_reduce_sms_provider_priority(identifier, *, time_threshold):
    """
    Will reduce a chosen sms provider's priority, and increase the other provider's priority by 10 points each.
    If either provider has been updated in the last `time_threshold`, then it won't take any action.
    """
    amount_to_reduce_by = 10
    providers_list = _get_sms_providers_for_update(time_threshold)

    if not providers_list:
        return

    providers = {provider.identifier: provider for provider in providers_list}
    other_identifier = get_alternative_sms_provider(identifier)

    reduced_provider = providers[identifier]
    increased_provider = providers[other_identifier]

    # always keep values between 0 and 100
    reduced_provider_priority = max(0, reduced_provider.priority - amount_to_reduce_by)
    increased_provider_priority = min(100, increased_provider.priority + amount_to_reduce_by)

    _adjust_provider_priority(reduced_provider, reduced_provider_priority)
    _adjust_provider_priority(increased_provider, increased_provider_priority) 
Example #19
Source File: provider_details_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def dao_adjust_provider_priority_back_to_resting_points():
    """
    Provided that neither SMS provider has been modified in the last hour, move both providers by 10 percentage points
    each towards their defined resting points (set in SMS_PROVIDER_RESTING_POINTS in config.py).
    """
    amount_to_reduce_by = 10
    time_threshold = timedelta(hours=1)

    providers = _get_sms_providers_for_update(time_threshold)

    for provider in providers:
        target = current_app.config['SMS_PROVIDER_RESTING_POINTS'][provider.identifier]
        current = provider.priority

        if current != target:
            if current > target:
                new_priority = max(target, provider.priority - amount_to_reduce_by)
            else:
                new_priority = min(target, provider.priority + amount_to_reduce_by)

            _adjust_provider_priority(provider, new_priority) 
Example #20
Source File: test_provider_details_dao.py    From notifications-api with MIT License 6 votes vote down vote up
def test_adjust_provider_priority_adds_history(
    restore_provider_details,
    notify_user,
    mmg_provider,
):
    # need to update these manually to avoid triggering the `onupdate` clause of the updated_at column
    ProviderDetails.query.filter(ProviderDetails.identifier == 'mmg').update({'updated_at': datetime.min})

    old_provider_history_rows = ProviderDetailsHistory.query.filter(
        ProviderDetailsHistory.id == mmg_provider.id
    ).order_by(
        desc(ProviderDetailsHistory.version)
    ).all()

    _adjust_provider_priority(mmg_provider, 50)

    updated_provider_history_rows = ProviderDetailsHistory.query.filter(
        ProviderDetailsHistory.id == mmg_provider.id
    ).order_by(
        desc(ProviderDetailsHistory.version)
    ).all()

    assert len(updated_provider_history_rows) - len(old_provider_history_rows) == 1
    assert updated_provider_history_rows[0].version - old_provider_history_rows[0].version == 1
    assert updated_provider_history_rows[0].priority == 50 
Example #21
Source File: rollover.py    From backtrader with GNU General Public License v3.0 5 votes vote down vote up
def start(self):
        super(RollOver, self).start()
        for d in self._rolls:
            d.setenvironment(self._env)
            d._start()

        # put the references in a separate list to have pops
        self._ds = list(self._rolls)
        self._d = self._ds.pop(0) if self._ds else None
        self._dexp = None
        self._dts = [datetime.min for xx in self._ds] 
Example #22
Source File: timer.py    From backtrader with GNU General Public License v3.0 5 votes vote down vote up
def _reset_when(self, ddate=datetime.min):
        self._when = self._rstwhen
        self._dtwhen = self._dwhen = None

        self._lastcall = ddate 
Example #23
Source File: test_comparisons.py    From predictive-maintenance-using-machine-learning with Apache License 2.0 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp('2012-01-01')

        assert not stamp == datetime.min
        assert not stamp == datetime(1600, 1, 1)
        assert not stamp == datetime(2700, 1, 1)
        assert stamp != datetime.min
        assert stamp != datetime(1600, 1, 1)
        assert stamp != datetime(2700, 1, 1)
        assert stamp > datetime(1600, 1, 1)
        assert stamp >= datetime(1600, 1, 1)
        assert stamp < datetime(2700, 1, 1)
        assert stamp <= datetime(2700, 1, 1) 
Example #24
Source File: accessible_time.py    From INGInious with GNU Affero General Public License v3.0 5 votes vote down vote up
def __init__(self, val=None):
        """
            Parse a string/a boolean to get the correct time period.
            Correct values for val:
            True (task always open)
            False (task always closed)
            2014-07-16 11:24:00 (task is open from 2014-07-16 at 11:24:00)
            2014-07-16 (task is open from 2014-07-16)
            / 2014-07-16 11:24:00 (task is only open before the 2014-07-16 at 11:24:00)
            / 2014-07-16 (task is only open before the 2014-07-16)
            2014-07-16 11:24:00 / 2014-07-20 11:24:00 (task is open from 2014-07-16 11:24:00 and will be closed the 2014-07-20 at 11:24:00)
            2014-07-16 / 2014-07-20 11:24:00 (...)
            2014-07-16 11:24:00 / 2014-07-20 (...)
            2014-07-16 / 2014-07-20 (...)
            2014-07-16 11:24:00 / 2014-07-20 11:24:00 / 2014-07-20 12:24:00 (task is open from 2014-07-16 11:24:00, has a soft deadline set at 2014-07-20 11:24:00 and will be closed the 2014-07-20 at 11:24:00)
            2014-07-16 / 2014-07-20 11:24:00 / 2014-07-21 (...)
            2014-07-16 / 2014-07-20 / 2014-07-21 (...)
        """
        if val is None or val == "" or val is True:
            self._val = [datetime.min, datetime.max]
            self._soft_end = datetime.max
        elif val == False:
            self._val = [datetime.max, datetime.max]
            self._soft_end = datetime.max
        else:  # str
            values = val.split("/")
            if len(values) == 1:
                self._val = [parse_date(values[0].strip(), datetime.min), datetime.max]
                self._soft_end = datetime.max
            elif len(values) == 2:
                # Has start time and hard deadline
                self._val = [parse_date(values[0].strip(), datetime.min), parse_date(values[1].strip(), datetime.max)]
                self._soft_end = self._val[1]
            else:
                # Has start time, soft deadline and hard deadline
                self._val = [parse_date(values[0].strip(), datetime.min), parse_date(values[2].strip(), datetime.max)]
                self._soft_end = parse_date(values[1].strip(), datetime.max)

        # Having a soft deadline after the hard one does not make sense
        if self._soft_end > self._val[1]:
            raise ValueError(_("Cannot have a soft deadline after the hard one")) 
Example #25
Source File: accessible_time.py    From INGInious with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_std_start_date(self):
        """ If the date is custom, return the start datetime with the format %Y-%m-%d %H:%M:%S. Else, returns "". """
        first, _ = self._val
        if first != datetime.min and first != datetime.max:
            return first.strftime("%Y-%m-%d %H:%M:%S")
        else:
            return "" 
Example #26
Source File: spotify.py    From CloudBot with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, client_id=None, client_secret=None):
        self._client_id = client_id
        self._client_secret = client_secret
        self._access_token = None
        self._token_expires = datetime.min
        self._lock = RLock()  # Make sure only one requests is parsed at a time 
Example #27
Source File: test_plugin.py    From tacker with Apache License 2.0 5 votes vote down vote up
def _insert_dummy_vnf_template_inline(self):
        session = self.context.session
        vnf_template = vnfm_db.VNFD(
            id='d58bcc4e-d0cf-11e6-bf26-cec0c932ce01',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='tmpl-koeak4tqgoqo8cr4-dummy_inline_vnf',
            description='inline_fake_template_description',
            deleted_at=datetime.min,
            template_source='inline')
        session.add(vnf_template)
        session.flush()
        return vnf_template 
Example #28
Source File: test_plugin.py    From tacker with Apache License 2.0 5 votes vote down vote up
def _insert_dummy_vnf_template(self):
        session = self.context.session
        vnf_template = vnfm_db.VNFD(
            id='eb094833-995e-49f0-a047-dfb56aaf7c4e',
            tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
            name='fake_template',
            description='fake_template_description',
            template_source='onboarded',
            deleted_at=datetime.min)
        session.add(vnf_template)
        session.flush()
        return vnf_template 
Example #29
Source File: test_sub.py    From pendulum with MIT License 5 votes vote down vote up
def test_subtract_days_max():
    delta = pendulum.now() - pendulum.instance(datetime.min)
    assert pendulum.now().subtract(days=delta.days - 1).year == 1 
Example #30
Source File: timer.py    From backtrader with GNU General Public License v3.0 5 votes vote down vote up
def _check_week(self, ddate=date.min):
        if not self.p.weekdays:
            return True

        _, dweek, dwkday = ddate.isocalendar()

        mask = self._weekmask
        daycarry = False
        if dweek != self._curweek:
            self._curweek = dweek  # write down new month
            daycarry = self.p.weekcarry and bool(mask)
            self._weekmask = mask = collections.deque(self.p.weekdays)

        dc = bisect.bisect_left(mask, dwkday)  # "left" for days before dday
        daycarry = daycarry or (self.p.weekcarry and dc > 0)
        if dc < len(mask):
            curday = bisect.bisect_right(mask, dwkday, lo=dc) > 0  # check dday
            dc += curday
        else:
            curday = False

        while dc:
            mask.popleft()
            dc -= 1

        return daycarry or curday