Python datetime.datetime.min() Examples

The following are code examples for showing how to use datetime.datetime.min(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: where   Author: kartverket   File: trf_snx_soln.py    MIT License 6 votes vote down vote up
def parse_solution_discontinuity(self, data):
        """Parser for SOLUTION/DISCONTINUITY data

        Converts the input data to a dictionary with items for each site, each containing start and end epochs for each
        solution. Only uses the position data, not the velocity data.

        Args:
            data (numpy.array):  Input data, raw data for SOLUTION/DISCONTINUITY block.
        """
        pos_data = data[data["pos_or_vel"] == "P"]
        pos_data["start_epoch"][np.equal(pos_data["start_epoch"], None)] = datetime.min
        pos_data["end_epoch"][np.equal(pos_data["end_epoch"], None)] = datetime.max

        for d, soln in zip(pos_data, pos_data["soln"].astype("i8")):
            site_key = d["site_code"]
            self.data.setdefault(site_key, dict())
            self.data[site_key].setdefault(soln, dict(start=d["start_epoch"], end=d["end_epoch"])) 
Example 2
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 6 votes vote down vote up
def test_overflow(self):
        tiny = timedelta.resolution

        td = timedelta.min + tiny
        td -= tiny  # no problem
        self.assertRaises(OverflowError, td.__sub__, tiny)
        self.assertRaises(OverflowError, td.__add__, -tiny)

        td = timedelta.max - tiny
        td += tiny  # no problem
        self.assertRaises(OverflowError, td.__add__, tiny)
        self.assertRaises(OverflowError, td.__sub__, -tiny)

        self.assertRaises(OverflowError, lambda: -timedelta.max)

        day = timedelta(1)
        self.assertRaises(OverflowError, day.__mul__, 10**9)
        self.assertRaises(OverflowError, day.__mul__, 1e9)
        self.assertRaises(OverflowError, day.__truediv__, 1e-20)
        self.assertRaises(OverflowError, day.__truediv__, 1e-10)
        self.assertRaises(OverflowError, day.__truediv__, 9e-10) 
Example 3
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 6 votes vote down vote up
def test_extreme_ordinals(self):
        a = self.theclass.min
        a = self.theclass(a.year, a.month, a.day)  # get rid of time parts
        aord = a.toordinal()
        b = a.fromordinal(aord)
        self.assertEqual(a, b)

        self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1))

        b = a + timedelta(days=1)
        self.assertEqual(b.toordinal(), aord + 1)
        self.assertEqual(b, self.theclass.fromordinal(aord + 1))

        a = self.theclass.max
        a = self.theclass(a.year, a.month, a.day)  # get rid of time parts
        aord = a.toordinal()
        b = a.fromordinal(aord)
        self.assertEqual(a, b)

        self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1))

        b = a - timedelta(days=1)
        self.assertEqual(b.toordinal(), aord - 1)
        self.assertEqual(b, self.theclass.fromordinal(aord - 1)) 
Example 4
Project: typhon   Author: atmtools   File: fileset.py    MIT License 6 votes vote down vote up
def time_coverage(self, value):
        if self.single_file:
            if value is None:
                # The default for single file filesets:
                self._time_coverage = [
                    datetime.min,
                    datetime.max
                ]
            else:
                self._time_coverage = [
                    to_datetime(value[0]),
                    to_datetime(value[1]),
                ]
        elif value is not None:
            self._time_coverage = to_timedelta(value)
        else:
            self._time_coverage = None

        # Reset the info cache because some file information may have changed
        # now
        self.info_cache = {} 
Example 5
Project: streamalert   Author: airbnb   File: alert.py    Apache License 2.0 6 votes vote down vote up
def can_merge(self, other):
        """Check if two alerts can be merged together.

        Args:
            other (Alert): Check if the instance can merge with this other alert.

        Returns:
            True if these alerts fit in the same merge window and have the same merge key values.
        """
        if not self.merge_enabled or not other.merge_enabled:
            # Merge information is not defined for both of these alerts.
            return False

        older, newer = min(self, other), max(self, other)
        if newer.created > older.created + older.merge_window:
            # These alerts won't fit in a single merge window.
            return False

        if set(self.merge_by_keys) != set(other.merge_by_keys):
            # These alerts have different definitions of merge keys.
            return False

        return all(utils.get_first_key(self.record, key) == utils.get_first_key(other.record, key)
                   for key in self.merge_by_keys) 
Example 6
Project: airflow   Author: apache   File: test_dag_processing.py    Apache License 2.0 6 votes vote down vote up
def test_kill_timed_out_processors_kill(self, mock_kill, mock_pid):
        mock_pid.return_value = 1234
        manager = DagFileProcessorManager(
            dag_directory='directory',
            file_paths=['abc.txt'],
            max_runs=1,
            processor_factory=MagicMock().return_value,
            processor_timeout=timedelta(seconds=5),
            signal_conn=MagicMock(),
            async_mode=True)

        processor = DagFileProcessorProcess('abc.txt', False, [], [])
        processor._start_time = timezone.make_aware(datetime.min)
        manager._processors = {'abc.txt': processor}
        manager._kill_timed_out_processors()
        mock_kill.assert_called_once_with() 
Example 7
Project: backtrader   Author: backtrader   File: timer.py    GNU General Public License v3.0 6 votes vote down vote up
def start(self, data):
        # write down the 'reset when' value
        if not isinstance(self.p.when, integer_types):  # expect time/datetime
            self._rstwhen = self.p.when
            self._tzdata = self.p.tzdata
        else:
            self._tzdata = data if self.p.tzdata is None else self.p.tzdata

            if self.p.when == SESSION_START:
                self._rstwhen = self._tzdata.p.sessionstart
            elif self.p.when == SESSION_END:
                self._rstwhen = self._tzdata.p.sessionend

        self._isdata = isinstance(self._tzdata, AbstractDataBase)
        self._reset_when()

        self._nexteos = datetime.min
        self._curdate = date.min

        self._curmonth = -1  # non-existent month
        self._monthmask = collections.deque()

        self._curweek = -1  # non-existent week
        self._weekmask = collections.deque() 
Example 8
Project: GreenGuard   Author: D3-AI   File: data.py    MIT License 5 votes vote down vote up
def _get_times(target_times, window_size):
    cutoff_times = target_times.cutoff_time
    if window_size:
        window_size = pd.to_timedelta(window_size)
        min_times = cutoff_times - window_size
    else:
        min_times = [datetime.min] * len(cutoff_times)

    return pd.DataFrame({
        'turbine_id': target_times.turbine_id,
        'start': min_times,
        'end': cutoff_times,
    }) 
Example 9
Project: where   Author: kartverket   File: slr_eccentricity.py    MIT License 5 votes vote down vote up
def parse_site_eccentricity(self, data):
        for d in data:
            start_time = datetime.min if d["start_time"] is None else d["start_time"]
            end_time = datetime.max if d["end_time"] is None else d["end_time"]
            key = (start_time, end_time)
            if d["vector_type"] == "UNE":
                # Convert UNE to ENU
                self.data[d["site_code"]].setdefault(key, {}).update(
                    dict(vector=(d["vector_3"], d["vector_2"], d["vector_1"]), coord_type="ENU")
                )
            else:
                self.data[d["site_code"]].setdefault(key, {}).update(
                    dict(vector=(d["vector_1"], d["vector_2"], d["vector_3"]), coord_type=d["vector_type"])
                ) 
Example 10
Project: where   Author: kartverket   File: slr_handling_file.py    MIT License 5 votes vote down vote up
def parse_data_handling(self, data):
        for d in data:
            start_time = datetime.min if d["start_time"] is None else d["start_time"]
            end_time = datetime.max if d["end_time"] is None else d["end_time"]
            interval = (start_time, end_time)
            info = {"unit": d["unit"]}

            if d["e_value"]:
                try:
                    e_value = float(d["e_value"])
                except ValueError:
                    log.fatal("ILRS Data handling: Not able to convert value to float")
                info.update({"e_value": e_value})
            if d["std_dev"]:
                try:
                    std_dev = float(d["std_dev"])
                except ValueError:
                    log.fatal("ILRS Data handling: Not able to convert value to float")
                info.update({"std_dev": std_dev})

            # Unfortunately we have to deal with two different line formats.
            # Split the comments field in the second line format:
            # *CODE PT_ UNIT T _DATA_START_ __DATA_END__ M __E-VALUE___ STD_DEV ___COMMENTS______
            # *CODE PT_ UNIT T _DATA_START_ __DATA_END__ M __E-VALUE___ STD_DEV _E-RATE__ _CMNTS_
            try:
                info.update({"comments": d["comments2"], "e_rate": float(d["comments1"])})
            except ValueError:
                info.update({"comments": d["comments1"] + d["comments2"]})

            self.data.setdefault(d["site_code"], {}).setdefault(d["handling_code"], []).append((interval, info)) 
Example 11
Project: where   Author: kartverket   File: trf_snx.py    MIT License 5 votes vote down vote up
def parse_solution_epochs(self, data):
        for d, soln in zip(data, data["soln"].astype("i8")):
            site_key = d["site_code"]
            self.data[site_key].setdefault("pos_vel", dict())
            self.data[site_key]["pos_vel"][soln] = dict(start=datetime.min, end=datetime.max) 
Example 12
Project: where   Author: kartverket   File: trf_ssc.py    MIT License 5 votes vote down vote up
def parse_position(self, line, cache):
        """Parse the position line of ITRF data

        This gives the position (x,y,z) of the station. Converting position float.

        Args:
            line (Dict):  The fields of a line.
            cache (Dict): Dict that persists information.
        """
        data_fields = ("STAX", "STAY", "STAZ", "sigma_x", "sigma_y", "sigma_z", "soln", "start", "end")
        data_values = line.pop("data")
        line.update({k: v for k, v in itertools.zip_longest(data_fields, data_values.split())})
        line["ref_epoch"] = self.meta["ref_epoch"]
        cache["antenna_id"] = line["antenna_id"]
        line["soln"] = int(line["soln"]) if line["soln"] else 1
        cache["soln"] = line["soln"]
        pos_vel = dict()
        pos_vel.update({k: float(line.pop(k)) for k in list(line.keys()) if k.startswith("STA")})

        start = line.pop("start")
        if start and start[3:6] != "000":
            pos_vel["start"] = datetime.strptime(start[0:6], "%y:%j") + timedelta(seconds=int(start[7:]))
        else:
            pos_vel["start"] = datetime.min

        end = line.pop("end")
        if end and end[3:6] != "000":
            pos_vel["end"] = datetime.strptime(end[0:6], "%y:%j") + timedelta(seconds=int(end[7:]))
        else:
            pos_vel["end"] = datetime.max

        self.data.setdefault(cache["antenna_id"], dict())
        self.data[cache["antenna_id"]].update(line)
        self.data[cache["antenna_id"]].setdefault("pos_vel", dict())
        self.data[cache["antenna_id"]]["pos_vel"][line["soln"]] = pos_vel 
Example 13
Project: botbuilder-python   Author: microsoft   File: microsoft_app_credentials.py    MIT License 5 votes vote down vote up
def _is_trusted_url(host: str) -> bool:
        expiration = MicrosoftAppCredentials.trustedHostNames.get(host, datetime.min)
        return expiration > (datetime.now() - timedelta(minutes=5)) 
Example 14
Project: WhooshSearch   Author: rokartnaz   File: times.py    BSD 2-Clause "Simplified" License 5 votes vote down vote up
def datetime_to_long(dt):
    """Converts a datetime object to a long integer representing the number
    of microseconds since ``datetime.min``.
    """

    return timedelta_to_usecs(dt.replace(tzinfo=None) - dt.min) 
Example 15
Project: WhooshSearch   Author: rokartnaz   File: times.py    BSD 2-Clause "Simplified" License 5 votes vote down vote up
def long_to_datetime(x):
    """Converts a long integer representing the number of microseconds since
    ``datetime.min`` to a datetime object.
    """

    days = x // 86400000000  # Microseconds in a day
    x -= days * 86400000000

    seconds = x // 1000000  # Microseconds in a second
    x -= seconds * 1000000

    return datetime.min + timedelta(days=days, seconds=seconds, microseconds=x)


# Ambiguous datetime object 
Example 16
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_str(self):
        for tz in [self.ACDT, self.EST, timezone.utc,
                   timezone.min, timezone.max]:
            self.assertEqual(str(tz), tz.tzname(None)) 
Example 17
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_repr(self):
        datetime = datetime_module
        for tz in [self.ACDT, self.EST, timezone.utc,
                   timezone.min, timezone.max]:
            # test round-trip
            tzrep = repr(tz)
            self.assertEqual(tz, eval(tzrep)) 
Example 18
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_class_members(self):
        limit = timedelta(hours=23, minutes=59)
        self.assertEqual(timezone.utc.utcoffset(None), ZERO)
        self.assertEqual(timezone.min.utcoffset(None), -limit)
        self.assertEqual(timezone.max.utcoffset(None), limit) 
Example 19
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_aware_datetime(self):
        # test that timezone instances can be used by datetime
        t = datetime(1, 1, 1)
        for tz in [timezone.min, timezone.max, timezone.utc]:
            self.assertEqual(tz.tzname(t),
                             t.replace(tzinfo=tz).tzname())
            self.assertEqual(tz.utcoffset(t),
                             t.replace(tzinfo=tz).utcoffset())
            self.assertEqual(tz.dst(t),
                             t.replace(tzinfo=tz).dst()) 
Example 20
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_pickle(self):
        for tz in self.ACDT, self.EST, timezone.min, timezone.max:
            for pickler, unpickler, proto in pickle_choices:
                tz_copy = unpickler.loads(pickler.dumps(tz, proto))
                self.assertEqual(tz_copy, tz)
        tz = timezone.utc
        for pickler, unpickler, proto in pickle_choices:
            tz_copy = unpickler.loads(pickler.dumps(tz, proto))
            self.assertIs(tz_copy, tz) 
Example 21
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_deepcopy(self):
        for tz in self.ACDT, self.EST, timezone.min, timezone.max:
            tz_copy = copy.deepcopy(tz)
            self.assertEqual(tz_copy, tz)
        tz = timezone.utc
        tz_copy = copy.deepcopy(tz)
        self.assertIs(tz_copy, tz)


#############################################################################
# Base class for testing a particular aspect of timedelta, time, date and
# datetime comparisons. 
Example 22
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_resolution_info(self):
        self.assertIsInstance(timedelta.min, timedelta)
        self.assertIsInstance(timedelta.max, timedelta)
        self.assertIsInstance(timedelta.resolution, timedelta)
        self.assertTrue(timedelta.max > timedelta.min)
        self.assertEqual(timedelta.min, timedelta(-999999999))
        self.assertEqual(timedelta.max, timedelta(999999999, 24*3600-1, 1e6-1))
        self.assertEqual(timedelta.resolution, timedelta(0, 0, 1)) 
Example 23
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_overflow(self):
        tiny = self.theclass.resolution

        for delta in [tiny, timedelta(1), timedelta(2)]:
            dt = self.theclass.min + delta
            dt -= delta  # no problem
            self.assertRaises(OverflowError, dt.__sub__, delta)
            self.assertRaises(OverflowError, dt.__add__, -delta)

            dt = self.theclass.max - delta
            dt += delta  # no problem
            self.assertRaises(OverflowError, dt.__add__, delta)
            self.assertRaises(OverflowError, dt.__sub__, -delta) 
Example 24
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_extreme_timedelta(self):
        big = self.theclass.max - self.theclass.min
        # 3652058 days, 23 hours, 59 minutes, 59 seconds, 999999 microseconds
        n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds
        # n == 315537897599999999 ~= 2**58.13
        justasbig = timedelta(0, 0, n)
        self.assertEqual(big, justasbig)
        self.assertEqual(self.theclass.min + big, self.theclass.max)
        self.assertEqual(self.theclass.max - big, self.theclass.min) 
Example 25
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_bool(self):
        # All dates are considered true.
        self.assertTrue(self.theclass.min)
        self.assertTrue(self.theclass.max) 
Example 26
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_timestamp_limits(self):
        # minimum timestamp
        min_dt = self.theclass.min.replace(tzinfo=timezone.utc)
        min_ts = min_dt.timestamp()
        try:
            # date 0001-01-01 00:00:00+00:00: timestamp=-62135596800
            self.assertEqual(self.theclass.fromtimestamp(min_ts, tz=timezone.utc),
                             min_dt)
        except (OverflowError, OSError) as exc:
            # the date 0001-01-01 doesn't fit into 32-bit time_t,
            # or platform doesn't support such very old date
            self.skipTest(str(exc))

        # maximum timestamp: set seconds to zero to avoid rounding issues
        max_dt = self.theclass.max.replace(tzinfo=timezone.utc,
                                           second=0, microsecond=0)
        max_ts = max_dt.timestamp()
        # date 9999-12-31 23:59:00+00:00: timestamp 253402300740
        self.assertEqual(self.theclass.fromtimestamp(max_ts, tz=timezone.utc),
                         max_dt)

        # number of seconds greater than 1 year: make sure that the new date
        # is not valid in datetime.datetime limits
        delta = 3600 * 24 * 400

        # too small
        ts = min_ts - delta
        # converting a Python int to C time_t can raise a OverflowError,
        # especially on 32-bit platforms.
        with self.assertRaises((ValueError, OverflowError)):
            self.theclass.fromtimestamp(ts)
        with self.assertRaises((ValueError, OverflowError)):
            self.theclass.utcfromtimestamp(ts)

        # too big
        ts = max_dt.timestamp() + delta
        with self.assertRaises((ValueError, OverflowError)):
            self.theclass.fromtimestamp(ts)
        with self.assertRaises((ValueError, OverflowError)):
            self.theclass.utcfromtimestamp(ts) 
Example 27
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_resolution_info(self):
        self.assertIsInstance(self.theclass.min, self.theclass)
        self.assertIsInstance(self.theclass.max, self.theclass)
        self.assertIsInstance(self.theclass.resolution, timedelta)
        self.assertTrue(self.theclass.max > self.theclass.min) 
Example 28
Project: arctic   Author: man-group   File: metadata_store.py    GNU Lesser General Public License v2.1 5 votes vote down vote up
def prepend(self, symbol, metadata, start_time=None):
        """
        Prepend a metadata entry for `symbol`

        Parameters
        ----------
        symbol : `str`
            symbol name for the item
        metadata : `dict`
            to be persisted
        start_time : `datetime.datetime`
            when metadata becomes effective
            Default: datetime.datetime.min
        """
        if metadata is None:
            return
        if start_time is None:
            start_time = dt.min
        old_metadata = self.find_one({'symbol': symbol}, sort=[('start_time', pymongo.ASCENDING)])
        if old_metadata is not None:
            if old_metadata['start_time'] <= start_time:
                raise ValueError('start_time={} is later than the first metadata @{}'.format(start_time,
                                                                                             old_metadata['start_time']))
            if old_metadata['metadata'] == metadata:
                self.find_one_and_update({'symbol': symbol}, {'$set': {'start_time': start_time}},
                                         sort=[('start_time', pymongo.ASCENDING)])
                old_metadata['start_time'] = start_time
                return old_metadata
            end_time = old_metadata.get('start_time')
        else:
            end_time = None

        document = {'_id': bson.ObjectId(), 'symbol': symbol, 'metadata': metadata, 'start_time': start_time}
        if end_time is not None:
            document['end_time'] = end_time
        mongo_retry(self.insert_one)(document)

        logger.debug('Finished writing metadata for %s', symbol)
        return document 
Example 29
Project: octopuscloud   Author: christianbaun   File: result.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, batch_name, sd, mimetype_files=None):
        self.sd = sd
        self.batch = batch_name
        self.log_fp = None
        self.num_files = 0
        self.total_time = 0
        self.min_time = timedelta.max
        self.max_time = timedelta.min
        self.earliest_time = datetime.max
        self.latest_time = datetime.min
        self.queue = self.sd.get_obj('output_queue')
        self.domain = self.sd.get_obj('output_domain') 
Example 30
Project: FX-RER-Value-Extraction   Author: tsKenneth   File: test_comparisons.py    MIT License 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp("2012-01-01")

        assert not stamp == datetime.min
        assert not stamp == datetime(1600, 1, 1)
        assert not stamp == datetime(2700, 1, 1)
        assert stamp != datetime.min
        assert stamp != datetime(1600, 1, 1)
        assert stamp != datetime(2700, 1, 1)
        assert stamp > datetime(1600, 1, 1)
        assert stamp >= datetime(1600, 1, 1)
        assert stamp < datetime(2700, 1, 1)
        assert stamp <= datetime(2700, 1, 1) 
Example 31
Project: flowlogs-reader   Author: obsrvbl   File: aggregation.py    Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.start = datetime.max
        self.end = datetime.min
        self.packets = 0
        self.bytes = 0 
Example 32
Project: recruit   Author: Frank-qlu   File: test_comparisons.py    Apache License 2.0 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp('2012-01-01')

        assert not stamp == datetime.min
        assert not stamp == datetime(1600, 1, 1)
        assert not stamp == datetime(2700, 1, 1)
        assert stamp != datetime.min
        assert stamp != datetime(1600, 1, 1)
        assert stamp != datetime(2700, 1, 1)
        assert stamp > datetime(1600, 1, 1)
        assert stamp >= datetime(1600, 1, 1)
        assert stamp < datetime(2700, 1, 1)
        assert stamp <= datetime(2700, 1, 1) 
Example 33
Project: Lyff   Author: akashlevy   File: result.py    MIT License 5 votes vote down vote up
def __init__(self, batch_name, sd, mimetype_files=None):
        self.sd = sd
        self.batch = batch_name
        self.log_fp = None
        self.num_files = 0
        self.total_time = 0
        self.min_time = timedelta.max
        self.max_time = timedelta.min
        self.earliest_time = datetime.max
        self.latest_time = datetime.min
        self.queue = self.sd.get_obj('output_queue')
        self.domain = self.sd.get_obj('output_domain') 
Example 34
Project: typhon   Author: atmtools   File: collocator.py    MIT License 5 votes vote down vote up
def _get_common_time_period(
            primary, secondary, max_interval, start, end):
        max_interval = pd.Timedelta(max_interval)

        # We want to select a common time window from both datasets,
        # aligned to the primary's time coverage. Because xarray has a
        # very annoying bug in time retrieving
        # (https://github.com/pydata/xarray/issues/1240), this is a
        # little bit cumbersome:
        common_start = max(
            start,
            pd.Timestamp(primary.time.min().item(0)) - max_interval,
            pd.Timestamp(secondary.time.min().item(0)) - max_interval
        )
        common_end = min(
            end,
            pd.Timestamp(primary.time.max().item(0)) + max_interval,
            pd.Timestamp(secondary.time.max().item(0)) + max_interval
        )

        primary_period = primary.time.where(
            (primary.time.values >= np.datetime64(common_start))
            & (primary.time.values <= np.datetime64(common_end))
        ).dropna(primary.time.dims[0])

        secondary_period = secondary.time.where(
            (secondary.time.values >= np.datetime64(common_start))
            & (secondary.time.values <= np.datetime64(common_end))
        ).dropna(secondary.time.dims[0])

        return primary_period, secondary_period 
Example 35
Project: typhon   Author: atmtools   File: fileset.py    MIT License 5 votes vote down vote up
def __init__(self, fileset, start, end, *args):
        if start == datetime.min and end >= datetime.max-timedelta(seconds=1):
            message = f"Found no files for {fileset.name}!"
        else:
            message = f"Found no files for {fileset.name} between {start} " \
                      f"and {end}!"

        message += f"\nPath: {fileset.path}\nCheck the path for misspellings" \
                   f" and whether there are files in this time period."
        Exception.__init__(self, message, *args) 
Example 36
Project: worktimer   Author: moehrenzahn   File: formatter.py    MIT License 5 votes vote down vote up
def format_time(t):
    if type(t) is timedelta:
        t = (datetime.min + t).time()
    if type(t) is time:
        t = datetime(2000, 1, 1, t.hour, t.minute)
    return datetime.strftime(t, "%H:%M") 
Example 37
Project: cookiecutter-pelican-grupys   Author: grupydf   File: sitemap.py    GNU General Public License v3.0 5 votes vote down vote up
def set_url_wrappers_modification_date(self, wrappers):
        for (wrapper, articles) in wrappers:
            lastmod = datetime.min.replace(tzinfo=self.timezone)
            for article in articles:
                lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone))
                try:
                    modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone)
                    lastmod = max(lastmod, modified)
                except ValueError:
                    # Supressed: user will be notified.
                    pass
            setattr(wrapper, 'modified', str(lastmod)) 
Example 38
Project: mars   Author: mars-project   File: worker_pages.py    Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.owner_to_ticker = OrderedDict()
        self.unfinished_to_indexes = dict()
        self.base_indexes = defaultdict(lambda: 0)

        self.min_left = datetime.max
        self.max_right = datetime.min
        self.min_bottom = 0xffffffff
        self.max_top = 0 
Example 39
Project: qis   Author: quru   File: permissions_manager.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def reset_portfolio_permissions(self):
        """
        Marks as expired all cached portfolio permissions data, for all image
        server processes, by incrementing the database data version number.
        """
        with self._data_refresh_lock:
            new_ver = self._db.increment_property(Property.FOLIO_PERMISSION_VERSION)
        self._foliop_last_check = datetime.min
        self._logger.info(
            'Portfolio permissions setting new version ' + new_ver
        ) 
Example 40
Project: qis   Author: quru   File: permissions_manager.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def reset_folder_permissions(self):
        """
        Marks as expired all cached folder permissions data, for all image
        server processes, by incrementing the database data version number.
        """
        with self._data_refresh_lock:
            new_ver = self._db.increment_property(Property.FOLDER_PERMISSION_VERSION)
        self._fp_last_check = datetime.min
        self._logger.info(
            'Folder permissions setting new version ' + new_ver
        ) 
Example 41
Project: qis   Author: quru   File: template_manager.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def __init__(self, data_manager, logger):
        self._db = data_manager
        self._logger = logger
        self._default_template_name = ''
        self._data_version = 0
        self._template_cache = KeyValueCache()
        self._update_lock = threading.Lock()
        self._last_check = datetime.min
        self._useable = threading.Event()
        self._useable.set() 
Example 42
Project: qis   Author: quru   File: template_manager.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def reset(self):
        """
        Invalidates the cached template data by incrementing the database data
        version number. This change will be detected on the next call to this
        object, and within the SYNC_INTERVAL by all other processes.
        """
        with self._update_lock:
            new_ver = self._db.increment_property(Property.IMAGE_TEMPLATES_VERSION)
        self._last_check = datetime.min
        self._logger.info('Image templates setting new version ' + new_ver) 
Example 43
Project: movie2parallelDB   Author: alpoktem   File: subsegment_movie.py    GNU General Public License v3.0 5 votes vote down vote up
def subriptime_to_seconds(srTime):
	t = datetime.combine(date.min, srTime.to_time()) - datetime.min
	return t.total_seconds() 
Example 44
Project: FUTU_Stop_Loss   Author: BigtoC   File: test_comparisons.py    MIT License 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp('2012-01-01')

        assert not stamp == datetime.min
        assert not stamp == datetime(1600, 1, 1)
        assert not stamp == datetime(2700, 1, 1)
        assert stamp != datetime.min
        assert stamp != datetime(1600, 1, 1)
        assert stamp != datetime(2700, 1, 1)
        assert stamp > datetime(1600, 1, 1)
        assert stamp >= datetime(1600, 1, 1)
        assert stamp < datetime(2700, 1, 1)
        assert stamp <= datetime(2700, 1, 1) 
Example 45
Project: vnpy_crypto   Author: birforce   File: test_comparisons.py    MIT License 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp('2012-01-01')

        assert not stamp == datetime.min
        assert not stamp == datetime(1600, 1, 1)
        assert not stamp == datetime(2700, 1, 1)
        assert stamp != datetime.min
        assert stamp != datetime(1600, 1, 1)
        assert stamp != datetime(2700, 1, 1)
        assert stamp > datetime(1600, 1, 1)
        assert stamp >= datetime(1600, 1, 1)
        assert stamp < datetime(2700, 1, 1)
        assert stamp <= datetime(2700, 1, 1) 
Example 46
Project: appcompatprocessor   Author: mbevilacqua   File: AmCacheParser.py    Apache License 2.0 5 votes vote down vote up
def make_windows_timestamp_value_getter(value_name):
    """
    return a function that fetches the value from the registry key
      as a Windows timestamp.
    """
    f = make_value_getter(value_name)
    def _value_getter(key):
        try:
            if key[0].get_value_by_name(value_name) != None:
                return parse_windows_timestamp(key[0].get_value_by_name(value_name).get_data_as_integer() or 0)
            else: return datetime.min
        except ValueError:
            return datetime.min
    return _value_getter 
Example 47
Project: appcompatprocessor   Author: mbevilacqua   File: AmCacheParser.py    Apache License 2.0 5 votes vote down vote up
def make_unix_timestamp_value_getter(value_name):
    """
    return a function that fetches the value from the registry key
      as a UNIX timestamp.
    """
    f = make_value_getter(value_name)
    def _value_getter(key):
        try:
            if key[0].get_value_by_name(value_name) != None:
                return parse_unix_timestamp(key[0].get_value_by_name(value_name).get_data_as_integer() or 0)
            else: return datetime.min
        except ValueError:
            return datetime.min
    return _value_getter 
Example 48
Project: appcompatprocessor   Author: mbevilacqua   File: appcompat_redline.py    Apache License 2.0 5 votes vote down vote up
def calculateID(self, file_name_fullpath):
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            for reg_key in root.findall('RegistryItem'):
                tmp_reg_key = reg_key.find('Modified')
                if tmp_reg_key is not None:
                    reg_modified = tmp_reg_key.text
                    try:
                        tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
                    except ValueError as e:
                        tmp_instanceID = datetime.max
                        logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
                    if instanceID < tmp_instanceID:
                        instanceID = tmp_instanceID
                else:
                    logger.warning("Found RegistryItem with no Modified date (Mir bug?): %s" % file_name_fullpath)
        except Exception:
            logger.exception("Error on calculateID for: %s" % file_name_fullpath)

        # If we found no Modified date in any of the RegistryItems we go with plan B (but most probably ShimCacheParser will fail to parse anyway)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID 
Example 49
Project: appcompatprocessor   Author: mbevilacqua   File: appcompat_hxregistryaudit.py    Apache License 2.0 5 votes vote down vote up
def calculateID(self, file_name_fullpath):
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            for reg_key in root.findall('RegistryItem'):
                tmp_reg_key = reg_key.find('Modified')
                if tmp_reg_key is not None:
                    reg_modified = tmp_reg_key.text
                    try:
                        tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
                    except ValueError as e:
                        tmp_instanceID = datetime.max
                        logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
                    if instanceID < tmp_instanceID:
                        instanceID = tmp_instanceID
                else:
                    logger.warning("Found RegistryItem with no Modified date (Mir bug?): %s" % file_name_fullpath)
        except Exception:
            logger.exception("Error on calculateID for: %s" % file_name_fullpath)

        # If we found no Modified date in any of the RegistryItems we go with plan B (but most probably ShimCacheParser will fail to parse anyway)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID 
Example 50
Project: appcompatprocessor   Author: mbevilacqua   File: appcompat_mirlua_v1.py    Apache License 2.0 5 votes vote down vote up
def calculateID(self, file_name_fullpath):
        # Get the creation date for the first PersistenceItem in the audit (they will all be the same)
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            reg_key = root.find('PersistenceItem')
            reg_modified = reg_key.get('created')
            try:
                tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
            except ValueError as e:
                tmp_instanceID = datetime.max
                logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
            instanceID = tmp_instanceID
        except Exception:
            traceback.print_exc(file=sys.stdout)

        # If we found no PersistenceItem date we go with plan B (but most probably this is corrupt and will fail later)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID 
Example 51
Project: appcompatprocessor   Author: mbevilacqua   File: appcompat_mirregistryaudit.py    Apache License 2.0 5 votes vote down vote up
def calculateID(self, file_name_fullpath):
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            for reg_key in root.findall('RegistryItem'):
                tmp_reg_key = reg_key.find('Modified')
                if tmp_reg_key is not None:
                    reg_modified = tmp_reg_key.text
                    try:
                        tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
                    except ValueError as e:
                        tmp_instanceID = datetime.max
                        logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
                    if instanceID < tmp_instanceID:
                        instanceID = tmp_instanceID
                else:
                    logger.warning("Found RegistryItem with no Modified date (Mir bug?): %s" % file_name_fullpath)
        except Exception:
            logger.exception("Error on calculateID for: %s" % file_name_fullpath)

        # If we found no Modified date in any of the RegistryItems we go with plan B (but most probably ShimCacheParser will fail to parse anyway)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID 
Example 52
Project: appcompatprocessor   Author: mbevilacqua   File: appcompat_mirlua_v2.py    Apache License 2.0 5 votes vote down vote up
def calculateID(self, file_name_fullpath):
        # Get the creation date for the first PersistenceItem in the audit (they will all be the same)
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            reg_key = root.find('AppCompatItemExtended')
            reg_modified = reg_key.get('created')
            try:
                tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
            except ValueError as e:
                tmp_instanceID = datetime.max
                logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
            instanceID = tmp_instanceID
        except Exception:
            traceback.print_exc(file=sys.stdout)

        # If we found no PersistenceItem date we go with plan B (but most probably this is corrupt and will fail later)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID 
Example 53
Project: EC-Council-OS   Author: cybershieldconsulting   File: usbdeviceforensics.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.timestamp = datetime.min
        self.file = '' 
Example 54
Project: EC-Council-OS   Author: cybershieldconsulting   File: usbdeviceforensics.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.vendor = ''
        self.product = ''
        self.version = ''
        self.serial_number = ''
        self.vid = ''
        self.pid = ''
        self.parent_prefix_id = ''
        self.drive_letter = ''
        self.volume_name = ''
        self.guid = ''
        self.disk_signature = ''
        self.mountpoint = ''
        self.device_classes_datetime_53f56307b6bf11d094f200a0c91efb8b = datetime.min
        self.device_classes_datetime_10497b1bba5144e58318a65c837b6661 = datetime.min
        self.vid_pid_datetime = datetime.min
        self.usb_stor_datetime = datetime.min
        self.install_datetime = datetime.min
        self.usbstor_datetime64 = datetime.min
        self.usbstor_datetime65 = datetime.min
        self.usbstor_datetime66 = datetime.min
        self.usbstor_datetime67 = datetime.min
        self.mountpoint2 = []
        self.emdmgmt = []


# System Hive Methods ######################################################### 
Example 55
Project: cuny-bdif   Author: aristotle-tek   File: result.py    MIT License 5 votes vote down vote up
def __init__(self, batch_name, sd, mimetype_files=None):
        self.sd = sd
        self.batch = batch_name
        self.log_fp = None
        self.num_files = 0
        self.total_time = 0
        self.min_time = timedelta.max
        self.max_time = timedelta.min
        self.earliest_time = datetime.max
        self.latest_time = datetime.min
        self.queue = self.sd.get_obj('output_queue')
        self.domain = self.sd.get_obj('output_domain') 
Example 56
Project: pyxlsb   Author: wwwiiilll   File: workbook.py    MIT License 5 votes vote down vote up
def convert_time(self, value):
        if not isinstance(value, int) and not isinstance(value, float):
            return None
        return (datetime.min + timedelta(seconds=int((value % 1) * 24 * 60 * 60))).time() 
Example 57
Project: mne-bids   Author: mne-tools   File: utils.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _get_mrk_meas_date(mrk):
    """Find the measurement date from a KIT marker file."""
    info = get_kit_info(mrk, False)[0]
    meas_date = info.get('meas_date', None)
    if isinstance(meas_date, (tuple, list, np.ndarray)):
        meas_date = meas_date[0]
    if isinstance(meas_date, datetime):
        meas_datetime = meas_date
    elif meas_date is not None:
        meas_datetime = datetime.fromtimestamp(meas_date)
    else:
        meas_datetime = datetime.min
    return meas_datetime 
Example 58
Project: thymekeeper   Author: wjt   File: ical.py    GNU General Public License v3.0 5 votes vote down vote up
def slice(self, start, end):
        # TODO: wrong, use dateutil's support for this
        vtimezone = self.calendar.contents['vtimezone'][0]
        tzid = vtimezone.contents['tzid'][0]
        tz = pytz.timezone(tzid.value)

        start_midnight = tz.localize(datetime.combine(start, time.min)) if start is not None else BIG_BANG
        end_midnight   = tz.localize(datetime.combine(end,   time.max)) if end   is not None else HEAT_DEATH_OF_UNIVERSE

        # TODO: ignore all-day events (?)
        # TODO: only count OPAQUE (ie busy) events, not TRANSPARENT (ie ignore for free/busy
        # purposes)?
        # TODO: recurring events
        vevents = []
        for vevent in self.calendar.contents['vevent']:
            start = vevent.dtstart.value

            # Skip all-day events
            if not isinstance(start, datetime):
                if not isinstance(start, date):
                    log.warning("DTSTART neither datetime nor date: %s", vevent)

                continue

            try:
                if start_midnight <= start <= end_midnight:
                    vevents.append(vevent)
            except TypeError:
                log.error("%s <= %s <= %s from %s",
                          start_midnight, start, end_midnight, vevent,
                          exc_info=True)

        vevents.sort(key=lambda vevent: (vevent.dtstart.value, vevent.dtend.value))
        return vevents 
Example 59
Project: xuemc   Author: skycucumber   File: times.py    GNU General Public License v2.0 5 votes vote down vote up
def datetime_to_long(dt):
    """Converts a datetime object to a long integer representing the number
    of microseconds since ``datetime.min``.
    """

    return timedelta_to_usecs(dt.replace(tzinfo=None) - dt.min) 
Example 60
Project: xuemc   Author: skycucumber   File: times.py    GNU General Public License v2.0 5 votes vote down vote up
def long_to_datetime(x):
    """Converts a long integer representing the number of microseconds since
    ``datetime.min`` to a datetime object.
    """

    days = x // 86400000000  # Microseconds in a day
    x -= days * 86400000000

    seconds = x // 1000000  # Microseconds in a second
    x -= seconds * 1000000

    return datetime.min + timedelta(days=days, seconds=seconds, microseconds=x)


# Ambiguous datetime object 
Example 61
Project: Computable   Author: ktraunmueller   File: test_timeseries.py    MIT License 5 votes vote down vote up
def test_timestamp_compare_with_early_datetime(self):
        # e.g. datetime.min
        stamp = Timestamp('2012-01-01')

        self.assertFalse(stamp == datetime.min)
        self.assertFalse(stamp == datetime(1600, 1, 1))
        self.assertFalse(stamp == datetime(2700, 1, 1))
        self.assert_(stamp != datetime.min)
        self.assert_(stamp != datetime(1600, 1, 1))
        self.assert_(stamp != datetime(2700, 1, 1))
        self.assert_(stamp > datetime(1600, 1, 1))
        self.assert_(stamp >= datetime(1600, 1, 1))
        self.assert_(stamp < datetime(2700, 1, 1))
        self.assert_(stamp <= datetime(2700, 1, 1)) 
Example 62
Project: Computable   Author: ktraunmueller   File: test_timeseries.py    MIT License 5 votes vote down vote up
def test_min_max(self):
        rng = date_range('1/1/2000', '12/31/2000')
        rng2 = rng.take(np.random.permutation(len(rng)))

        the_min = rng2.min()
        the_max = rng2.max()
        tm.assert_isinstance(the_min, Timestamp)
        tm.assert_isinstance(the_max, Timestamp)
        self.assertEqual(the_min, rng[0])
        self.assertEqual(the_max, rng[-1])

        self.assertEqual(rng.min(), rng[0])
        self.assertEqual(rng.max(), rng[-1]) 
Example 63
Project: Computable   Author: ktraunmueller   File: test_timeseries.py    MIT License 5 votes vote down vote up
def test_min_max_series(self):
        rng = date_range('1/1/2000', periods=10, freq='4h')
        lvls = ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C', 'C']
        df = DataFrame({'TS': rng, 'V': np.random.randn(len(rng)),
                        'L': lvls})

        result = df.TS.max()
        exp = Timestamp(df.TS.iget(-1))
        self.assertTrue(isinstance(result, Timestamp))
        self.assertEqual(result, exp)

        result = df.TS.min()
        exp = Timestamp(df.TS.iget(0))
        self.assertTrue(isinstance(result, Timestamp))
        self.assertEqual(result, exp) 
Example 64
Project: pitchfork-scraper   Author: dav1do   File: reviews.py    MIT License 5 votes vote down vote up
def make_datetime_from_date_string(date_string, format_string):
        """
        @param date_string: the date string to format
        @param format_string: format for time.strptime
        @return: a datetime object or datetime.min if it's an invalid format
        """
        try:
            date_string = strptime(date_string, format_string)
            date_string = datetime(*date_string[:6])  # magic unpacking of the struc_time object
        except ValueError:
            date_string = datetime.min
        return date_string 
Example 65
Project: IBATS_HuobiTrader_old   Author: mmmaaaggg   File: db_utils.py    GNU General Public License v3.0 5 votes vote down vote up
def default(self, obj):
        # print("obj.__class__", obj.__class__, "isinstance(obj.__class__, DeclarativeMeta)", isinstance(obj.__class__, DeclarativeMeta))
        if isinstance(obj.__class__, DeclarativeMeta):
            # an SQLAlchemy class
            fields = {}
            for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
                data = obj.__getattribute__(field)
                try:
                    json.dumps(data)     # this will fail on non-encodable values, like other classes
                    fields[field] = data
                except TypeError:    # 添加了对datetime的处理
                    print(data)
                    if isinstance(data, datetime):
                        fields[field] = data.isoformat()
                    elif isinstance(data, date):
                        fields[field] = data.isoformat()
                    elif isinstance(data, timedelta):
                        fields[field] = (datetime.min + data).time().isoformat()
                    else:
                        fields[field] = None
            # a json-encodable dict
            return fields
        elif isinstance(obj, date):
            return json.dumps(date_2_str(obj))

        return json.JSONEncoder.default(self, obj) 
Example 66
Project: PythonTwitchBotFramework   Author: sharkbound   File: streaminfoapi.py    MIT License 5 votes vote down vote up
def __init__(self, client_id: str, user: str):
        super().__init__(client_id, user)

        self.viewer_count: int = 0
        self.title: str = ''
        self.game_id: int = 0
        self.community_ids: frozenset = frozenset()
        self.started_at: datetime = datetime.min
        self.user_id: int = 0
        self.tag_ids: frozenset = frozenset() 
Example 67
Project: PythonTwitchBotFramework   Author: sharkbound   File: command.py    MIT License 5 votes vote down vote up
def get_time_since_execute(channel: str, cmd: str) -> int:
    last_execute = command_last_execute.get(_create_cooldown_key(channel, cmd), datetime.min)
    return int(abs((last_execute - datetime.now()).total_seconds())) 
Example 68
Project: PythonTwitchBotFramework   Author: sharkbound   File: twitch_api_util.py    MIT License 5 votes vote down vote up
def get_user_creation_date(user: str) -> datetime:
    _, json = await get_url(USER_ACCOUNT_AGE_API.format(user), get_headers())

    if 'created_at' not in json:
        return datetime.min()
    #                                            2012-09-03T01:30:56Z
    return datetime.strptime(json['created_at'], '%Y-%m-%dT%H:%M:%SZ') 
Example 69
Project: backtrader   Author: backtrader   File: rollover.py    GNU General Public License v3.0 5 votes vote down vote up
def start(self):
        super(RollOver, self).start()
        for d in self._rolls:
            d.setenvironment(self._env)
            d._start()

        # put the references in a separate list to have pops
        self._ds = list(self._rolls)
        self._d = self._ds.pop(0) if self._ds else None
        self._dexp = None
        self._dts = [datetime.min for xx in self._ds] 
Example 70
Project: backtrader   Author: backtrader   File: timer.py    GNU General Public License v3.0 5 votes vote down vote up
def _reset_when(self, ddate=datetime.min):
        self._when = self._rstwhen
        self._dtwhen = self._dwhen = None

        self._lastcall = ddate 
Example 71
Project: backtrader   Author: backtrader   File: timer.py    GNU General Public License v3.0 5 votes vote down vote up
def _check_week(self, ddate=date.min):
        if not self.p.weekdays:
            return True

        _, dweek, dwkday = ddate.isocalendar()

        mask = self._weekmask
        daycarry = False
        if dweek != self._curweek:
            self._curweek = dweek  # write down new month
            daycarry = self.p.weekcarry and bool(mask)
            self._weekmask = mask = collections.deque(self.p.weekdays)

        dc = bisect.bisect_left(mask, dwkday)  # "left" for days before dday
        daycarry = daycarry or (self.p.weekcarry and dc > 0)
        if dc < len(mask):
            curday = bisect.bisect_right(mask, dwkday, lo=dc) > 0  # check dday
            dc += curday
        else:
            curday = False

        while dc:
            mask.popleft()
            dc -= 1

        return daycarry or curday 
Example 72
Project: whylog   Author: whylog   File: test_get_search_range.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setUpClass(cls):
        SettingsFactorySelector.WHYLOG_DIR = TestPaths.WHYLOG_DIR
        cls.config = SettingsFactorySelector.get_settings()['config']
        cls.whylog_dir = SettingsFactorySelector._attach_whylog_dir(os.getcwd())

        cause1_regex = '^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d) cause1 transaction number: (\d+) Host: (\w)$'
        cause1_line = '2016-04-12 23:39:43 cause1 transaction number: 10101 Host: db_host'
        convertions = {1: 'date'}
        cls.cause1 = RegexParser("cause1", cause1_line, cause1_regex, [1], 'database', convertions)

        cause2_regex = '^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d) cause2 moved resource id: (\d+) Host: (\w)$'
        cause2_line = '2016-04-12 23:40:43 cause2 moved resource id: 1234 Host: apache_host'
        convertions = {1: 'date'}
        cls.cause2 = RegexParser("cause2", cause2_line, cause2_regex, [1], 'apache', convertions)

        effect_regex = '^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d) effect internal server error Host: (\w)$'
        effect_line = '2016-04-12 23:54:43 effect internal server error Host: apache_host'
        convertions = {1: 'date'}
        cls.effect = RegexParser("effect", effect_line, effect_regex, [1], 'apache', convertions)

        line_source = LineSource('localhost', 'node_1.log')
        cls.effect_time = datetime(2016, 4, 12, 23, 54, 43)
        effect_line = '2016-04-12 23:54:43 effect internal server error Host: apache_host'
        cls.effect_clues = {
            'effect': Clue((cls.effect_time, 'apache_host'), effect_line, 40, line_source)
        }

        cls.earliest_date = datetime.min
        cls.ten_second_earlier = datetime(2016, 4, 12, 23, 54, 33)
        cls.one_hundred_second_earlier = datetime(2016, 4, 12, 23, 53, 3)
        cls.ten_second_later = datetime(2016, 4, 12, 23, 54, 53) 
Example 73
Project: whylog   Author: whylog   File: test_read.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_bisect_first_line_of_file(self):
        investigation_step = InvestigationStep(
            None, {
                'date': {
                    InvestigationStep.LEFT_BOUND: datetime.min,
                    InvestigationStep.RIGHT_BOUND: self.dummy_date
                }
            }
        )

        backtracker = BacktrackSearcher("", investigation_step, self.super_parser)
        offset = backtracker._find_left(self.opened_file)

        assert offset == 0
        assert self.opened_file._seek_count < 35 
Example 74
Project: NiujiaoDebugger   Author: MrSrc   File: datetimetester.py    GNU General Public License v3.0 4 votes vote down vote up
def stats(cls, start_year=1):
        count = gap_count = fold_count = zeros_count = 0
        min_gap = min_fold = timedelta.max
        max_gap = max_fold = ZERO
        min_gap_datetime = max_gap_datetime = datetime.min
        min_gap_zone = max_gap_zone = None
        min_fold_datetime = max_fold_datetime = datetime.min
        min_fold_zone = max_fold_zone = None
        stats_since = datetime(start_year, 1, 1) # Starting from 1970 eliminates a lot of noise
        for zonename in cls.zonenames():
            count += 1
            tz = cls.fromname(zonename)
            for dt, shift in tz.transitions():
                if dt < stats_since:
                    continue
                if shift > ZERO:
                    gap_count += 1
                    if (shift, dt) > (max_gap, max_gap_datetime):
                        max_gap = shift
                        max_gap_zone = zonename
                        max_gap_datetime = dt
                    if (shift, datetime.max - dt) < (min_gap, datetime.max - min_gap_datetime):
                        min_gap = shift
                        min_gap_zone = zonename
                        min_gap_datetime = dt
                elif shift < ZERO:
                    fold_count += 1
                    shift = -shift
                    if (shift, dt) > (max_fold, max_fold_datetime):
                        max_fold = shift
                        max_fold_zone = zonename
                        max_fold_datetime = dt
                    if (shift, datetime.max - dt) < (min_fold, datetime.max - min_fold_datetime):
                        min_fold = shift
                        min_fold_zone = zonename
                        min_fold_datetime = dt
                else:
                    zeros_count += 1
        trans_counts = (gap_count, fold_count, zeros_count)
        print("Number of zones:       %5d" % count)
        print("Number of transitions: %5d = %d (gaps) + %d (folds) + %d (zeros)" %
              ((sum(trans_counts),) + trans_counts))
        print("Min gap:         %16s at %s in %s" % (min_gap, min_gap_datetime, min_gap_zone))
        print("Max gap:         %16s at %s in %s" % (max_gap, max_gap_datetime, max_gap_zone))
        print("Min fold:        %16s at %s in %s" % (min_fold, min_fold_datetime, min_fold_zone))
        print("Max fold:        %16s at %s in %s" % (max_fold, max_fold_datetime, max_fold_zone)) 
Example 75
Project: typhon   Author: atmtools   File: collocator.py    MIT License 4 votes vote down vote up
def concat_collocations(collocations):
    """Concat compact collocations

    Compact collocations cannot be concatenated directly because indices in
    *Collocations/pairs* won't be correct any longer afterwards. This
    concatenate function fixes this problem.

    Args:
        collocations: A list of xarray.Dataset objects
            with compact collocations.

    Returns:
        One xarray.Dataset object
    """

    # We need to increment the pair indices when concatening the datasets
    primary = collocations[0]["Collocations/group"].item(0)
    secondary = collocations[0]["Collocations/group"].item(1)
    primary_size = 0
    secondary_size = 0
    collocation_coord = {
        "Collocations": "Collocations/collocation",
        primary: f"{primary}/collocation",
        secondary: f"{secondary}/collocation",
    }

    # Collect all collocations for each single group:
    groups = defaultdict(list)
    for obj in collocations:
        for group, data in get_xarray_groups(obj).items():
            if group == "Collocations":
                # Correct the indices:
                data["Collocations/pairs"][0, :] += primary_size
                data["Collocations/pairs"][1, :] += secondary_size
                data = data.drop("Collocations/group")
            groups[group].append(data)

        primary_size += obj.dims[f"{primary}/collocation"]
        secondary_size += obj.dims[f"{secondary}/collocation"]

    starts = []
    ends = []
    for group, data_list in groups.items():
        groups[group] = xr.concat(
                data_list,
                dim=collocation_coord[group]
            )

    start = pd.Timestamp(groups[primary][primary+"/time"].min().item(0))
    end = pd.Timestamp(groups[primary][primary+"/time"].max().item(0))
    merged = xr.merge(groups.values())
    merged.attrs = {
        "start_time": str(start),
        "end_time": str(end),
    }
    merged["Collocations/group"] = collocations[0]["Collocations/group"]
    return merged 
Example 76
Project: typhon   Author: atmtools   File: fileset.py    MIT License 4 votes vote down vote up
def _get_time_resolution(path_or_dict, highest=True):
        """Get the lowest/highest time resolution of all placeholders

        Seconds have a higher time resolution than minutes, etc. If our path
        contains seconds, minutes and hours, this will return a timedelta
        object representing 1 second if *highest* is True otherwise 1 hour.

        Args:
            path_or_dict: A path or dictionary with placeholders.
            highest: If true, search for the highest time resolution instead of
                the lowest.

        Returns:
            The placeholder name with the lowest / highest resolution and
            the representing timedelta object.
        """
        if isinstance(path_or_dict, str):
            placeholders = set(re.findall(r"{(\w+)}", path_or_dict))
            if "doy" in placeholders:
                placeholders.remove("doy")
                placeholders.add("day")
            if "year2" in placeholders:
                placeholders.remove("year2")
                placeholders.add("year")
        else:
            placeholders = set(path_or_dict.keys())

        # All placeholders from which we know the resolution:
        placeholders = set(placeholders).intersection(
            FileSet._temporal_resolution
        )

        if not placeholders:
            # There are no placeholders in the path, therefore we return the
            # highest time resolution automatically
            return "year", FileSet._temporal_resolution["year"]

        # E.g. if we want to find the temporal placeholder with the lowest
        # resolution, we have to search for the maximum of their values because
        # they are represented as timedelta objects, i.e. month > day > hour,
        # etc. expect
        if highest:
            placeholder = min(
                placeholders, key=lambda k: FileSet._temporal_resolution[k]
            )
        else:
            placeholder = max(
                placeholders, key=lambda k: FileSet._temporal_resolution[k]
            )

        return placeholder, FileSet._temporal_resolution[placeholder] 
Example 77
Project: EC-Council-OS   Author: cybershieldconsulting   File: usbdeviceforensics.py    GNU General Public License v3.0 4 votes vote down vote up
def output_data_to_console():
    """Outputs the data to StdOut and an output file if required"""
    for device in usb_devices:
        print("Vendor: " + device.vendor)
        print("Product: " + device.product)
        print("Version: " + device.version)
        print("Serial Number: " + device.serial_number)
        print("VID: " + device.vid)
        print("PID: " + device.pid)
        print("Parent Prefix ID: " + device.parent_prefix_id)
        print("Drive Letter: " + device.drive_letter)
        print("Volume Name: " + device.volume_name)
        print("GUID : " + device.guid)
        print("Mountpoint: " + device.mountpoint)
        print("Disk Signature: " + device.disk_signature)

        if device.device_classes_datetime_53f56307b6bf11d094f200a0c91efb8b != datetime.min:
            print("Device Classes Timestamp (53f56): " + device.device_classes_datetime_53f56307b6bf11d094f200a0c91efb8b.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.device_classes_datetime_10497b1bba5144e58318a65c837b6661 != datetime.min:
            print("Device Classes Timestamp (10497): " + device.device_classes_datetime_10497b1bba5144e58318a65c837b6661.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.vid_pid_datetime != datetime.min:
            print("VID/PID Timestamp: " + device.vid_pid_datetime.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.usb_stor_datetime != datetime.min:
            print("USBSTOR Timestamp: " + device.usb_stor_datetime.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.install_datetime != datetime.min:
            print("Install Timestamp: " + device.install_datetime.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.usbstor_datetime64 != datetime.min:
            print("USBSTOR Timestamp (64): " + device.usbstor_datetime64.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.usbstor_datetime65 != datetime.min:
            print("USBSTOR Timestamp (65): " + device.usbstor_datetime65.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.usbstor_datetime66 != datetime.min:
            print("USBSTOR Timestamp (66): " + device.usbstor_datetime66.strftime('%Y-%m-%dT%H:%M:%S'))
        if device.usbstor_datetime67 != datetime.min:
            print("USBSTOR Timestamp (67): " + device.usbstor_datetime67.strftime('%Y-%m-%dT%H:%M:%S'))

        for mp in device.mountpoint2:
            print('\tMP2 File: ' + mp.file)
            if mp.timestamp != datetime.min:
                print('\tMP2 Timestamp: ' + mp.timestamp.strftime('%Y-%m-%dT%H:%M:%S'))

        for emd in device.emdmgmt:
            print('\tEMD Volume Serial No.: ' + emd.volume_serial_num)
            print('\tEMD Volume Serial No. (hex): ' + emd.volume_serial_num_hex)
            print('\tEMD Volume Name: ' + emd.volume_name)
            if emd.timestamp != datetime.min:
                print('\tEMD Timestamp: ' + emd.timestamp.strftime('%Y-%m-%dT%H:%M:%S'))

        print('------------------------------------------------------------------------------') 
Example 78
Project: EC-Council-OS   Author: cybershieldconsulting   File: usbdeviceforensics.py    GNU General Public License v3.0 4 votes vote down vote up
def output_data_to_file_text(output):
    """Outputs the data to a file in text format"""
    write_debug(data='Method: output_data_to_file_text')

    with open(output, "wb") as f:
        for device in usb_devices:
            f.write("Vendor: " + device.vendor + '\n')
            f.write("Product: " + device.product + '\n')
            f.write("Version: " + device.version + '\n')
            f.write("Serial Number: " + device.serial_number + '\n')
            f.write("VID: " + device.vid + '\n')
            f.write("PID: " + device.pid + '\n')
            f.write("Parent Prefix ID: " + device.parent_prefix_id + '\n')
            f.write("Drive Letter: " + device.drive_letter + '\n')
            f.write("Volume Name: " + device.volume_name + '\n')
            f.write("GUID : " + device.guid + '\n')
            f.write("Mountpoint: " + device.mountpoint + '\n')
            f.write("Disk Signature: " + device.disk_signature + '\n')

            if device.device_classes_datetime_53f56307b6bf11d094f200a0c91efb8b != datetime.min:
                f.write("Device Classes Timestamp (53f56): " + device.device_classes_datetime_53f56307b6bf11d094f200a0c91efb8b.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.device_classes_datetime_10497b1bba5144e58318a65c837b6661 != datetime.min:
                f.write("Device Classes Timestamp (10497): " + device.device_classes_datetime_10497b1bba5144e58318a65c837b6661.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.vid_pid_datetime != datetime.min:
                f.write("VID/PID Timestamp: " + device.vid_pid_datetime.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.usb_stor_datetime != datetime.min:
                f.write("USBSTOR Timestamp: " + device.usb_stor_datetime.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.install_datetime != datetime.min:
                f.write("Install Timestamp: " + device.install_datetime.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.usbstor_datetime64 != datetime.min:
                f.write("USBSTOR Timestamp (64): " + device.usbstor_datetime64.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.usbstor_datetime65 != datetime.min:
                f.write("USBSTOR Timestamp (65): " + device.usbstor_datetime65.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.usbstor_datetime66 != datetime.min:
                f.write("USBSTOR Timestamp (66): " + device.usbstor_datetime66.strftime('%Y-%m-%dT%H:%M:%S') + '\n')
            if device.usbstor_datetime67 != datetime.min:
                f.write("USBSTOR Timestamp (67): " + device.usbstor_datetime67.strftime('%Y-%m-%dT%H:%M:%S') + '\n')

            for i in range(len(device.mountpoint2)):
                f.write('MP2 File: ' + device.mountpoint2[i].file + '\n')
                if device.mountpoint2[i].timestamp != datetime.min:
                    f.write('MP2 Timestamp: ' + device.mountpoint2[i].timestamp.strftime('%Y-%m-%dT%H:%M:%S') + '\n')

            for emd in device.emdmgmt:
                f.write('EMD Volume Serial No.: ' + emd.volume_serial_num + '\n')
                f.write('EMD Volume Serial No. (hex): ' + emd.volume_serial_num_hex + '\n')
                f.write('EMD Volume Name: ' + emd.volume_name + '\n')
                if emd.timestamp != datetime.min:
                    f.write('EMD Timestamp: ' + emd.timestamp.strftime('%Y-%m-%dT%H:%M:%S') + '\n')

            f.write('------------------------------------------------------------------------------\n') 
Example 79
Project: streamalert   Author: airbnb   File: alert.py    Apache License 2.0 4 votes vote down vote up
def merge(cls, alerts):
        """Combine a list of alerts into a new merged alert.

        The caller is responsible for determining *which* alerts should be merged, this just
        implements the merge algorithm.

        Args:
            alerts (list): List of alerts to merge.
                These should all have the same values for their merge keys.

        Returns:
            Alert: A new alert whose record is formed by merging the records of all the alerts.
                The merged alert outputs are a union of all outputs in the original alerts.
                Other information (rule name, description, etc) is copied from the first alert.
        """
        alerts = sorted(alerts)  # Put alerts in chronological order.
        merge_keys = set(alerts[0].merge_by_keys)
        # Remove merge keys from the alert record, so that it doesn't show up in common/diff
        records = [cls._clean_record(alert.record, merge_keys) for alert in alerts]
        common = cls._compute_common(records)

        # Keys are named such that more important information is at the beginning alphabetically.
        new_record = {
            'AlertCount': len(alerts),
            'AlertTimeFirst': min(alert.created for alert in alerts).strftime(cls.DATETIME_FORMAT),
            'AlertTimeLast': max(alert.created for alert in alerts).strftime(cls.DATETIME_FORMAT),
            'MergedBy': {
                key: utils.get_first_key(alerts[0].record, key, '(n/a)') for key in merge_keys
            },
            'OtherCommonKeys': common,
            'ValueDiffs': {
                alert.created.strftime(cls.DATETIME_FORMAT): cls._compute_diff(common, record)
                for alert, record in zip(alerts, records)
            }
        }

        # TODO: the cluster, log_source, source_entity, etc, could be different between alerts
        return cls(
            alerts[0].rule_name,
            new_record,
            alerts[-1].outputs,  # Use the most recent set of outputs
            cluster=alerts[0].cluster,
            context=alerts[0].context,
            log_source=alerts[0].log_source,
            log_type=alerts[0].log_type,
            publishers=alerts[0].publishers,
            rule_description=alerts[0].rule_description,
            source_entity=alerts[0].source_entity,
            source_service=alerts[0].source_service,
            staged=any(alert.staged for alert in alerts)
        ) 
Example 80
Project: signet-python   Author: signet-org   File: signet.py    MIT License 4 votes vote down vote up
def validate(self, identifier):
        matches = {}

        def record_match(keyid, ts, kind):
            if keyid not in matches or ts > matches[keyid]['ts']:
                matches[keyid] = {'ts': ts, 'kind': kind}

        attestation_list = self.repos.lookup(identifier)

        for attestation in attestation_list:
            keyid = attestation['key']
            try:
                ts = verify_attestation(attestation, self.config.keyring_path)
            except GPGKeyNotFoundError:
                record_match(keyid, datetime.min, 'unknown')
                continue
            except GPGInvalidSignatureError:
                record_match(keyid, datetime.min, 'invalid')
                continue

            if attestation['data']['reviewed'] is not True:
                continue

            if self.config['trust'].get(keyid) is not True:
                record_match(keyid, ts, 'untrusted')
                continue

            if attestation['data']['ok'] is not True:
                record_match(keyid, ts, 'not-ok')
                continue

            record_match(keyid, ts, 'ok')

        categories = defaultdict(list)
        for keyid, match in matches.iteritems():
            categories[match['kind']].append(keyid)

        policy = self.config['policy']
        if not any(len(keys) > 0 for keys in categories.itervalues()):
            valid = None
        else:
            valid = len(categories['ok']) >= policy['ok'] and len(categories['not-ok']) <= policy['not-ok']
        return valid, categories