Python datetime.datetime.max() Examples
The following are 30
code examples of datetime.datetime.max().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
datetime.datetime
, or try the search function
.
Example #1
Source File: fileset.py From typhon with MIT License | 8 votes |
def time_coverage(self, value): if self.single_file: if value is None: # The default for single file filesets: self._time_coverage = [ datetime.min, datetime.max ] else: self._time_coverage = [ to_datetime(value[0]), to_datetime(value[1]), ] elif value is not None: self._time_coverage = to_timedelta(value) else: self._time_coverage = None # Reset the info cache because some file information may have changed # now self.info_cache = {}
Example #2
Source File: test_datetime.py From oss-ftp with MIT License | 7 votes |
def test_extreme_ordinals(self): a = self.theclass.min a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1)) b = a + timedelta(days=1) self.assertEqual(b.toordinal(), aord + 1) self.assertEqual(b, self.theclass.fromordinal(aord + 1)) a = self.theclass.max a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1)) b = a - timedelta(days=1) self.assertEqual(b.toordinal(), aord - 1) self.assertEqual(b, self.theclass.fromordinal(aord - 1))
Example #3
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 6 votes |
def test_parse_once(self): test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') processor_agent = DagFileProcessorAgent(test_dag_path, 1, type(self)._processor_factory, timedelta.max, [], False, async_mode) processor_agent.start() parsing_result = [] if not async_mode: processor_agent.run_single_parsing_loop() while not processor_agent.done: if not async_mode: processor_agent.wait_until_finished() parsing_result.extend(processor_agent.harvest_simple_dags()) dag_ids = [result.dag_id for result in parsing_result] self.assertEqual(dag_ids.count('test_start_date_scheduling'), 1)
Example #4
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 6 votes |
def test_launch_process(self): test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') log_file_loc = conf.get('logging', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION') try: os.remove(log_file_loc) except OSError: pass # Starting dag processing with 0 max_runs to avoid redundant operations. processor_agent = DagFileProcessorAgent(test_dag_path, 0, type(self)._processor_factory, timedelta.max, [], False, async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() processor_agent._process.join() self.assertTrue(os.path.isfile(log_file_loc))
Example #5
Source File: test_datetime.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_extreme_ordinals(self): a = self.theclass.min a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1)) b = a + timedelta(days=1) self.assertEqual(b.toordinal(), aord + 1) self.assertEqual(b, self.theclass.fromordinal(aord + 1)) a = self.theclass.max a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1)) b = a - timedelta(days=1) self.assertEqual(b.toordinal(), aord - 1) self.assertEqual(b, self.theclass.fromordinal(aord - 1))
Example #6
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 6 votes |
def test_set_file_paths_when_processor_file_path_is_in_new_file_paths(self): manager = DagFileProcessorManager( dag_directory='directory', max_runs=1, processor_factory=MagicMock().return_value, processor_timeout=timedelta.max, signal_conn=MagicMock(), dag_ids=[], pickle_dags=False, async_mode=True) mock_processor = MagicMock() mock_processor.stop.side_effect = AttributeError( 'DagFileProcessor object has no attribute stop') mock_processor.terminate.side_effect = None manager._processors['abc.txt'] = mock_processor manager.set_file_paths(['abc.txt']) self.assertDictEqual(manager._processors, {'abc.txt': mock_processor})
Example #7
Source File: test_datetime.py From BinderFilter with MIT License | 6 votes |
def test_extreme_ordinals(self): a = self.theclass.min a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1)) b = a + timedelta(days=1) self.assertEqual(b.toordinal(), aord + 1) self.assertEqual(b, self.theclass.fromordinal(aord + 1)) a = self.theclass.max a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1)) b = a - timedelta(days=1) self.assertEqual(b.toordinal(), aord - 1) self.assertEqual(b, self.theclass.fromordinal(aord - 1))
Example #8
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 6 votes |
def test_overflow(self): tiny = timedelta.resolution td = timedelta.min + tiny td -= tiny # no problem self.assertRaises(OverflowError, td.__sub__, tiny) self.assertRaises(OverflowError, td.__add__, -tiny) td = timedelta.max - tiny td += tiny # no problem self.assertRaises(OverflowError, td.__add__, tiny) self.assertRaises(OverflowError, td.__sub__, -tiny) self.assertRaises(OverflowError, lambda: -timedelta.max) day = timedelta(1) self.assertRaises(OverflowError, day.__mul__, 10**9) self.assertRaises(OverflowError, day.__mul__, 1e9) self.assertRaises(OverflowError, day.__truediv__, 1e-20) self.assertRaises(OverflowError, day.__truediv__, 1e-10) self.assertRaises(OverflowError, day.__truediv__, 9e-10)
Example #9
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 6 votes |
def test_extreme_ordinals(self): a = self.theclass.min a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1)) b = a + timedelta(days=1) self.assertEqual(b.toordinal(), aord + 1) self.assertEqual(b, self.theclass.fromordinal(aord + 1)) a = self.theclass.max a = self.theclass(a.year, a.month, a.day) # get rid of time parts aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1)) b = a - timedelta(days=1) self.assertEqual(b.toordinal(), aord - 1) self.assertEqual(b, self.theclass.fromordinal(aord - 1))
Example #10
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 6 votes |
def test_max_runs_when_no_files(self): child_pipe, parent_pipe = multiprocessing.Pipe() with TemporaryDirectory(prefix="empty-airflow-dags-") as dags_folder: async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') manager = DagFileProcessorManager( dag_directory=dags_folder, max_runs=1, processor_factory=FakeDagFileProcessorRunner._fake_dag_processor_factory, processor_timeout=timedelta.max, signal_conn=child_pipe, dag_ids=[], pickle_dags=False, async_mode=async_mode) self.run_processor_manager_one_loop(manager, parent_pipe) child_pipe.close() parent_pipe.close()
Example #11
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 6 votes |
def test_set_file_paths_when_processor_file_path_not_in_new_file_paths(self): manager = DagFileProcessorManager( dag_directory='directory', max_runs=1, processor_factory=MagicMock().return_value, processor_timeout=timedelta.max, signal_conn=MagicMock(), dag_ids=[], pickle_dags=False, async_mode=True) mock_processor = MagicMock() mock_processor.stop.side_effect = AttributeError( 'DagFileProcessor object has no attribute stop') mock_processor.terminate.side_effect = None manager._processors['missing_file.txt'] = mock_processor manager._file_stats['missing_file.txt'] = DagFileStat(0, 0, None, None, 0) manager.set_file_paths(['abc.txt']) self.assertDictEqual(manager._processors, {})
Example #12
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_resolution_info(self): self.assertIsInstance(timedelta.min, timedelta) self.assertIsInstance(timedelta.max, timedelta) self.assertIsInstance(timedelta.resolution, timedelta) self.assertTrue(timedelta.max > timedelta.min) self.assertEqual(timedelta.min, timedelta(-999999999)) self.assertEqual(timedelta.max, timedelta(999999999, 24*3600-1, 1e6-1)) self.assertEqual(timedelta.resolution, timedelta(0, 0, 1))
Example #13
Source File: test_utils.py From python-zhmcclient with Apache License 2.0 | 5 votes |
def find_max_value(test_func, initial_value): """ Starting from an initial number (integer or float), find the maximum value for which the test function does not yet fail, and return that maximum value. """ assert isinstance(initial_value, int) and initial_value > 0 fails = FailsArray(test_func) value = initial_value # Advance the value exponentially beyond the max value while fails[value] == 0: value *= 2 # Search for the exact max value in the previous range. We search for the # boundary where the fails array goes from 0 to 1. boundary = 0.5 value = binary_search(fails, boundary, value // 2, value) max_value = value - 1 # Verify that we found exactly the maximum: assert fails[max_value] == 0 and fails[max_value + 1] == 1, \ "max_value={}, fails[+-2]: {}, {}, {}, {}, {}".\ format(max_value, fails[max_value - 2], fails[max_value - 1], fails[max_value], fails[max_value + 1], fails[max_value + 2]) return max_value
Example #14
Source File: datetimetester.py From ironpython3 with Apache License 2.0 | 5 votes |
def test_str(self): for tz in [self.ACDT, self.EST, timezone.utc, timezone.min, timezone.max]: self.assertEqual(str(tz), tz.tzname(None))
Example #15
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_resolution_info(self): self.assertIsInstance(self.theclass.min, self.theclass) self.assertIsInstance(self.theclass.max, self.theclass) self.assertIsInstance(self.theclass.resolution, timedelta) self.assertTrue(self.theclass.max > self.theclass.min)
Example #16
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_bool(self): # All dates are considered true. self.assertTrue(self.theclass.min) self.assertTrue(self.theclass.max)
Example #17
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_extreme_timedelta(self): big = self.theclass.max - self.theclass.min # 3652058 days, 23 hours, 59 minutes, 59 seconds, 999999 microseconds n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds # n == 315537897599999999 ~= 2**58.13 justasbig = timedelta(0, 0, n) self.assertEqual(big, justasbig) self.assertEqual(self.theclass.min + big, self.theclass.max) self.assertEqual(self.theclass.max - big, self.theclass.min)
Example #18
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_resolution_info(self): # XXX: Should min and max respect subclassing? if issubclass(self.theclass, datetime): expected_class = datetime else: expected_class = date self.assertIsInstance(self.theclass.min, expected_class) self.assertIsInstance(self.theclass.max, expected_class) self.assertIsInstance(self.theclass.resolution, timedelta) self.assertTrue(self.theclass.max > self.theclass.min)
Example #19
Source File: oandastore.py From backtrader with GNU General Public License v3.0 | 5 votes |
def order_create(self, order, stopside=None, takeside=None, **kwargs): okwargs = dict() okwargs['instrument'] = order.data._dataname okwargs['units'] = abs(order.created.size) okwargs['side'] = 'buy' if order.isbuy() else 'sell' okwargs['type'] = self._ORDEREXECS[order.exectype] if order.exectype != bt.Order.Market: okwargs['price'] = order.created.price if order.valid is None: # 1 year and datetime.max fail ... 1 month works valid = datetime.utcnow() + timedelta(days=30) else: valid = order.data.num2date(order.valid) # To timestamp with seconds precision okwargs['expiry'] = int((valid - self._DTEPOCH).total_seconds()) if order.exectype == bt.Order.StopLimit: okwargs['lowerBound'] = order.created.pricelimit okwargs['upperBound'] = order.created.pricelimit if order.exectype == bt.Order.StopTrail: okwargs['trailingStop'] = order.trailamount if stopside is not None: okwargs['stopLoss'] = stopside.price if takeside is not None: okwargs['takeProfit'] = takeside.price okwargs.update(**kwargs) # anything from the user self.q_ordercreate.put((order.ref, okwargs,)) return order
Example #20
Source File: accessible_time.py From INGInious with GNU Affero General Public License v3.0 | 5 votes |
def get_std_start_date(self): """ If the date is custom, return the start datetime with the format %Y-%m-%d %H:%M:%S. Else, returns "". """ first, _ = self._val if first != datetime.min and first != datetime.max: return first.strftime("%Y-%m-%d %H:%M:%S") else: return ""
Example #21
Source File: accessible_time.py From INGInious with GNU Affero General Public License v3.0 | 5 votes |
def is_never_accessible(self): """ Returns true if the course/task is never accessible """ return self._val[0] == datetime.max and self._val[1] == datetime.max
Example #22
Source File: accessible_time.py From INGInious with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, val=None): """ Parse a string/a boolean to get the correct time period. Correct values for val: True (task always open) False (task always closed) 2014-07-16 11:24:00 (task is open from 2014-07-16 at 11:24:00) 2014-07-16 (task is open from 2014-07-16) / 2014-07-16 11:24:00 (task is only open before the 2014-07-16 at 11:24:00) / 2014-07-16 (task is only open before the 2014-07-16) 2014-07-16 11:24:00 / 2014-07-20 11:24:00 (task is open from 2014-07-16 11:24:00 and will be closed the 2014-07-20 at 11:24:00) 2014-07-16 / 2014-07-20 11:24:00 (...) 2014-07-16 11:24:00 / 2014-07-20 (...) 2014-07-16 / 2014-07-20 (...) 2014-07-16 11:24:00 / 2014-07-20 11:24:00 / 2014-07-20 12:24:00 (task is open from 2014-07-16 11:24:00, has a soft deadline set at 2014-07-20 11:24:00 and will be closed the 2014-07-20 at 11:24:00) 2014-07-16 / 2014-07-20 11:24:00 / 2014-07-21 (...) 2014-07-16 / 2014-07-20 / 2014-07-21 (...) """ if val is None or val == "" or val is True: self._val = [datetime.min, datetime.max] self._soft_end = datetime.max elif val == False: self._val = [datetime.max, datetime.max] self._soft_end = datetime.max else: # str values = val.split("/") if len(values) == 1: self._val = [parse_date(values[0].strip(), datetime.min), datetime.max] self._soft_end = datetime.max elif len(values) == 2: # Has start time and hard deadline self._val = [parse_date(values[0].strip(), datetime.min), parse_date(values[1].strip(), datetime.max)] self._soft_end = self._val[1] else: # Has start time, soft deadline and hard deadline self._val = [parse_date(values[0].strip(), datetime.min), parse_date(values[2].strip(), datetime.max)] self._soft_end = parse_date(values[1].strip(), datetime.max) # Having a soft deadline after the hard one does not make sense if self._soft_end > self._val[1]: raise ValueError(_("Cannot have a soft deadline after the hard one"))
Example #23
Source File: test_dag_command.py From airflow with Apache License 2.0 | 5 votes |
def test_cli_list_dag_runs(self): dag_command.dag_trigger(self.parser.parse_args([ 'dags', 'trigger', 'example_bash_operator', ])) args = self.parser.parse_args(['dags', 'list_runs', '--dag-id', 'example_bash_operator', '--no-backfill', '--start-date', DEFAULT_DATE.isoformat(), '--end-date', timezone.make_aware(datetime.max).isoformat()]) dag_command.dag_list_dag_runs(args)
Example #24
Source File: test_datetime.py From oss-ftp with MIT License | 5 votes |
def test_overflow(self): tiny = self.theclass.resolution for delta in [tiny, timedelta(1), timedelta(2)]: dt = self.theclass.min + delta dt -= delta # no problem self.assertRaises(OverflowError, dt.__sub__, delta) self.assertRaises(OverflowError, dt.__add__, -delta) dt = self.theclass.max - delta dt += delta # no problem self.assertRaises(OverflowError, dt.__add__, delta) self.assertRaises(OverflowError, dt.__sub__, -delta)
Example #25
Source File: test_datetime.py From oss-ftp with MIT License | 5 votes |
def test_extreme_timedelta(self): big = self.theclass.max - self.theclass.min # 3652058 days, 23 hours, 59 minutes, 59 seconds, 999999 microseconds n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds # n == 315537897599999999 ~= 2**58.13 justasbig = timedelta(0, 0, n) self.assertEqual(big, justasbig) self.assertEqual(self.theclass.min + big, self.theclass.max) self.assertEqual(self.theclass.max - big, self.theclass.min)
Example #26
Source File: datetimetester.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_non_abstractness(self): # In order to allow subclasses to get pickled, the C implementation # wasn't able to get away with having __init__ raise # NotImplementedError. useless = tzinfo() dt = datetime.max self.assertRaises(NotImplementedError, useless.tzname, dt) self.assertRaises(NotImplementedError, useless.utcoffset, dt) self.assertRaises(NotImplementedError, useless.dst, dt)
Example #27
Source File: test_dag_processing.py From airflow with Apache License 2.0 | 5 votes |
def test_reload_module(self): """ Configure the context to have logging.logging_config_class set to a fake logging class path, thus when reloading logging module the airflow.processor_manager logger should not be configured. """ with settings_context(SETTINGS_FILE_VALID): # Launch a process through DagFileProcessorAgent, which will try # reload the logging module. test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') log_file_loc = conf.get('logging', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION') try: os.remove(log_file_loc) except OSError: pass # Starting dag processing with 0 max_runs to avoid redundant operations. processor_agent = DagFileProcessorAgent(test_dag_path, 0, type(self)._processor_factory, timedelta.max, [], False, async_mode) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() processor_agent._process.join() # Since we are reloading logging config not creating this file, # we should expect it to be nonexistent. self.assertFalse(os.path.isfile(log_file_loc))
Example #28
Source File: test_datetime.py From oss-ftp with MIT License | 5 votes |
def test_bool(self): # All dates are considered true. self.assertTrue(self.theclass.min) self.assertTrue(self.theclass.max)
Example #29
Source File: test_datetime.py From oss-ftp with MIT License | 5 votes |
def test_resolution_info(self): self.assertIsInstance(self.theclass.min, self.theclass) self.assertIsInstance(self.theclass.max, self.theclass) self.assertIsInstance(self.theclass.resolution, timedelta) self.assertTrue(self.theclass.max > self.theclass.min)
Example #30
Source File: formatters.py From teleport with Apache License 2.0 | 5 votes |
def format_ad_timestamp(raw_value): """ Active Directory stores date/time values as the number of 100-nanosecond intervals that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored. The time is always stored in Greenwich Mean Time (GMT) in the Active Directory. """ if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int return datetime.max # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999) try: timestamp = int(raw_value) if timestamp < 0: # ad timestamp cannot be negative return raw_value except Exception: return raw_value try: return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, tz=OffsetTzInfo(0, 'UTC')) # forces true division in python 2 except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed try: unix_epoch = datetime.fromtimestamp(0, tz=OffsetTzInfo(0, 'UTC')) diff_seconds = timedelta(seconds=timestamp/10000000.0 - 11644473600) return unix_epoch + diff_seconds except Exception: pass except Exception: pass return raw_value