Python pendulum.parse() Examples

The following are 30 code examples of pendulum.parse(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pendulum , or try the search function .
Example #1
Source File: forms.py    From airflow with Apache License 2.0 6 votes vote down vote up
def process_formdata(self, valuelist):
        if not valuelist:
            return
        date_str = ' '.join(valuelist)
        try:
            # Check if the datetime string is in the format without timezone, if so convert it to the
            # default timezone
            if len(date_str) == 19:
                parsed_datetime = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S')
                default_timezone = self._get_default_timezone()
                self.data = default_timezone.convert(parsed_datetime)
            else:
                self.data = pendulum.parse(date_str)
        except ValueError:
            self.data = None
            raise ValueError(self.gettext('Not a valid datetime value')) 
Example #2
Source File: digitalocean.py    From k8s-snapshots with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def load_snapshots(
    ctx: Context, label_filters: Dict[str, str]
) -> List[Snapshot]:
    snapshots = digitalocean.Manager().get_volume_snapshots()

    tag_filters = set(k+':'+v for k, v in label_filters.items())
    filtered = [snapshot
                for snapshot in snapshots
                if tag_filters.intersection(snapshot.tags)]

    _logger.debug('digitalocean.load_snaphots', label_filters=label_filters,
                  tag_filters=tag_filters, snapshots_count=len(snapshots),
                  filtered=filtered)

    return list(map(lambda snapshot: Snapshot(
        name=snapshot.id,
        created_at=pendulum.parse(snapshot.created_at),
        disk=DODiskIdentifier(volume_id=snapshot.resource_id),
    ), filtered)) 
Example #3
Source File: core.py    From maya with MIT License 6 votes vote down vote up
def parse_iso8601_duration(cls, duration, start=None, end=None):
        match = re.match(
            r"(?:P(?P<weeks>\d+)W)|(?:P(?:(?:(?P<years>\d+)Y)?(?:(?P<months>\d+)M)?(?:(?P<days>\d+)D))?(?:T(?:(?P<hours>\d+)H)?(?:(?P<minutes>\d+)M)?(?:(?P<seconds>\d+)S)?)?)",  # noqa
            duration,
        )

        time_components = {}
        if match:
            time_components = match.groupdict(0)
            for key, value in time_components.items():
                time_components[key] = int(value)

            duration = relativedelta(**time_components)

            if start:
                return parse(start.datetime() + duration)

            if end:
                return parse(end.datetime() - duration)

        return None 
Example #4
Source File: core.py    From maya with MIT License 6 votes vote down vote up
def from_iso8601(cls, s):
        # # Start and end, such as "2007-03-01T13:00:00Z/2008-05-11T15:30:00Z"
        start, end = s.split("/")
        try:
            start = parse(start)
        except pendulum.parsing.exceptions.ParserError:
            # start = self._parse_iso8601_duration(start, end=end)
            raise NotImplementedError()

        try:
            end = parse(end)
        except (pendulum.parsing.exceptions.ParserError, TypeError):
            end = cls.parse_iso8601_duration(end, start=start)

        return cls(start=start, end=end)

        # # Start and duration, such as "2007-03-01T13:00:00Z/P1Y2M10DT2H30M"
        # # Duration and end, such as "P1Y2M10DT2H30M/2008-05-11T15:30:00Z" 
Example #5
Source File: plugin.py    From limnoria-plugins with Do What The F*ck You Want To Public License 6 votes vote down vote up
def _sortData(self, data):
        # print(data)
        for item in data:
            # print(item)
            if item:
                if "Games with" not in item:
                    # tmp = None
                    # for game in item:
                    #    if 'Games with' in game:
                    #        tmp = item.pop()
                    item.sort(
                        key=lambda x: pendulum.parse(
                            x.split("(")[1].replace(")", ""), strict=False
                        ).int_timestamp
                    )
                # if tmp:
                #    item.append(tmp)

        return data 
Example #6
Source File: test_model.py    From FlowKit with Mozilla Public License 2.0 6 votes vote down vote up
def test_set_state(session):
    """
    Make sure we can add a row to the DB and the content
    we expect is in the DB afterwards.
    """
    workflow_run_data = dict(
        workflow_name="DUMMY_WORKFLOW_NAME",
        parameters={"DUMMY_PARAM_NAME": "DUMMY_PARAM_VALUE"},
        state=RunState.running,
    )

    now = pendulum.parse("2016-01-02T13:00:01Z")
    with patch("pendulum.now", lambda x: now):
        WorkflowRuns.set_state(**workflow_run_data, session=session)

    rows = session.query(WorkflowRuns).all()
    assert len(rows) == 1

    row = rows[0]
    assert row.workflow_name == workflow_run_data["workflow_name"]
    assert row.parameters_hash == get_params_hash(workflow_run_data["parameters"])
    assert row.state == workflow_run_data["state"]
    assert pendulum.instance(row.timestamp) == now 
Example #7
Source File: test_model.py    From FlowKit with Mozilla Public License 2.0 6 votes vote down vote up
def test_set_state_with_sqlite(sqlite_session):
    """
    Make sure we can add a row to a sqlite DB.
    """
    workflow_run_data = dict(
        workflow_name="DUMMY_WORKFLOW_NAME",
        parameters={"DUMMY_PARAM_NAME": "DUMMY_PARAM_VALUE"},
        state=RunState.running,
    )

    now = pendulum.parse("2016-01-02T13:00:01Z")
    with patch("pendulum.now", lambda x: now):
        WorkflowRuns.set_state(**workflow_run_data, session=sqlite_session)

    rows = sqlite_session.query(WorkflowRuns).all()
    assert len(rows) == 1

    row = rows[0]
    assert row.workflow_name == workflow_run_data["workflow_name"]
    assert row.parameters_hash == get_params_hash(workflow_run_data["parameters"])
    assert row.state == workflow_run_data["state"]
    assert pendulum.instance(row.timestamp) == now 
Example #8
Source File: plugin.py    From limnoria-plugins with Do What The F*ck You Want To Public License 6 votes vote down vote up
def _getGames(self, team, date, tz="US/Eastern"):
        """Given a date, populate the url with it and try to download its
        content. If successful, parse the JSON data and extract the relevant
        fields for each game. Returns a list of games."""
        url = self._getEndpointURL(date)

        # (If asking for today's results, enable the 'If-Mod.-Since' flag)
        use_cache = date == self._getTodayDate()
        # use_cache = False
        response = self._getURL(url, use_cache)
        if isinstance(response, str):
            return "ERROR: Something went wrong, check input"

        json = self._extractJSON(response)
        games = self._parseGames(json, team, tz)
        return games 
Example #9
Source File: plugin.py    From limnoria-plugins with Do What The F*ck You Want To Public License 6 votes vote down vote up
def getValidDateFmt(irc, msg, args, state):
    date = args[0]
    valid = ['yesterday', 'tomorrow']
    check = None
    try:
        if date.lower() in valid:
            if date.lower() == 'yesterday':
                check = pendulum.yesterday().format('MM/DD/YYYY')
            else:
                check = pendulum.tomorrow().format('MM/DD/YYYY')
        else:
            check = pendulum.parse(date, strict=False).format('MM/DD/YYYY')
    except:
        pass
    if not check:
        state.errorInvalid(_('date format'), str(date))
    else:
        state.args.append(check)
        del args[0] 
Example #10
Source File: postgres_to_gcs.py    From airflow with Apache License 2.0 6 votes vote down vote up
def convert_type(self, value, schema_type):
        """
        Takes a value from Postgres, and converts it to a value that's safe for
        JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
        Decimals are converted to floats. Times are converted to seconds.
        """
        if isinstance(value, (datetime.datetime, datetime.date)):
            return pendulum.parse(value.isoformat()).float_timestamp
        if isinstance(value, datetime.time):
            formated_time = time.strptime(str(value), "%H:%M:%S")
            return int(datetime.timedelta(
                hours=formated_time.tm_hour,
                minutes=formated_time.tm_min,
                seconds=formated_time.tm_sec).total_seconds())
        if isinstance(value, dict):
            return json.dumps(value)
        if isinstance(value, Decimal):
            return float(value)
        return value 
Example #11
Source File: get-next-checkins.py    From serverless-southwest-check-in with MIT License 6 votes vote down vote up
def get_executions(args):
    # TODO(dw): pagination for > 100 executions
    executions = SFN.list_executions(
        stateMachineArn=args.state_machine_arn,
        statusFilter='RUNNING'
    )

    loop = asyncio.get_event_loop()
    futures = [
        loop.run_in_executor(None, get_execution_history, e['executionArn'])
        for e in executions['executions']
    ]

    done, _ = await asyncio.wait(futures)
    results = [r.result() for r in done]
    sorted_results = sorted(results, key=lambda x: pendulum.parse(x['check_in_times']['next']))

    if args.reverse:
        sorted_results = list(reversed(sorted_results))

    print(json.dumps(sorted_results[:args.count])) 
Example #12
Source File: test_main.py    From bitfinex-ohlc-import with MIT License 6 votes vote down vote up
def test_get_candles():
    """
    Get all candles for symbol between two dates.
    """
    symbol = 'BTCUSD'
    start_date = pendulum.parse('2018-01-01 00:00:00')
    end_date = pendulum.parse('2018-01-01 00:02:00')
    with open('fixture_get_candles.json') as f:
        data = json.load(f)
    url = API_URL + '/candles/trade:1m:tBTCUSD/hist' \
                    '?start=1514764800000&end=1514764920000&limit=1000'

    with patch('bitfinex.main.get_data') as mock_get_data:
        mock_get_data.return_value = data
        result = get_candles(symbol, start_date, end_date)

    mock_get_data.assert_called_once_with(url)
    assert result == data 
Example #13
Source File: helpers.py    From pycounter with MIT License 6 votes vote down vote up
def convert_date_run(datestring):
    """
    Convert a date of the format 'YYYY-MM-DD' to a datetime.date object.

    (Will also accept MM/DD/YYYY format, ISO 8601 timestamps, or existing
    datetime objects; these shouldn't be in COUNTER reports, but they
    do show up in real world data...)

    :param datestring: the string to convert to a date.

    :return: datetime.date object

    """
    if isinstance(datestring, datetime.date):
        return datestring

    return pendulum.parse(datestring, strict=False).date() 
Example #14
Source File: parser.py    From shadowreader with Apache License 2.0 6 votes vote down vote up
def main(files: str, app: str, bucket: str, timeformat: str, regex: str):
    # def main(file: str, app: str, bucket: str, regex: str, timeformat: str = ""):
    """
    Accepts input from CLI to parse locally stored logs
    Example:
    python3 parser.py --file logs --app app1 --bucket serverless-sr-deploys \
    --timeformat 'DD/MMM/YYYY:HH:mm:ss ZZ' \
    --regex '(?P<remote_addr>[\S]+) - (?P<remote_user>[\S]+) \[(?P<timestamp>.+)\] "(?P<req_method>.+) (?P<uri>.+) (?P<httpver>.+)" (?P<status>[\S]+) (?P<body_bytes_sent>[\S]+) "(?P<referer>[\S]+)" "(?P<user_agent>[\S]+)" "(?P<x_forwarded_for>[\S]+)"'

    :param file: Name of log file to parse
    :param app: Name of the application for the logs
    :param bucket: S3 bucket to store the parsed logs to
    :param timeformat: The format of the timestamp in the logs
                       Accepts the following tokens: https://pendulum.eustace.io/docs/#tokens
    :param regex: Regex to use to parse the logs
    """  # noqa: W605

    for f in files:
        click.echo(f"Processing file: {f}")
        parse_file(f, app, bucket, timeformat, regex)
        click.echo() 
Example #15
Source File: check.py    From heartbeats with MIT License 6 votes vote down vote up
def process_at_service(self, service):
        """
        当 当前时间 > at时,看[at, at + grace]之间是否有上报的数据
        """
        latest_ping = self.get_last_ping(service)
        if not latest_ping:
            return

        at = pendulum.parse(service.value, tz=settings.TIME_ZONE).in_timezone('UTC')
        last_created = pendulum.instance(latest_ping.created)
        now = pendulum.now(tz='UTC')

        if now < at.add(minutes=int(service.grace)):
            return
        if last_created < at:
            self.notify(service, now) 
Example #16
Source File: __init__.py    From tap-facebook with GNU Affero General Public License v3.0 6 votes vote down vote up
def _iterate(self, generator, record_preparation):
        max_bookmark = None
        for recordset in generator:
            for record in recordset:
                updated_at = pendulum.parse(record[UPDATED_TIME_KEY])

                if self.current_bookmark and self.current_bookmark >= updated_at:
                    continue
                if not max_bookmark or updated_at > max_bookmark:
                    max_bookmark = updated_at

                record = record_preparation(record)
                yield {'record': record}

            if max_bookmark:
                yield {'state': advance_bookmark(self, UPDATED_TIME_KEY, str(max_bookmark))} 
Example #17
Source File: request.py    From transformer with MIT License 5 votes vote down vote up
def from_har_entry(cls, entry: dict) -> "Request":
        """
        Creates a request from a HAR entry__.

        __ http://www.softwareishard.com/blog/har-12-spec/#entries

        :raise KeyError: if *entry* is not a valid HAR "entry" object.
        :raise ValueError: if the ``request.startedDateTime`` value cannot be
            interpreted as a timestamp.
        """

        request = entry["request"]
        return Request(
            timestamp=pendulum.parse(entry["startedDateTime"]),
            method=HttpMethod[request["method"]],
            url=urlparse(request["url"]),
            har_entry=entry,
            name=None,
            headers=CaseInsensitiveDict(
                {d["name"]: d["value"] for d in request.get("headers", [])}
            ),
            post_data=request.get("postData"),
            query=[
                QueryPair(name=d["name"], value=d["value"])
                for d in request.get("queryString", [])
            ],
        ) 
Example #18
Source File: datasets.py    From pyattck with MIT License 5 votes vote down vote up
def generated_attck_data(self, force=False):
        """Downloads, saves, or retrieves the Mitre ATT&CK Enterprise Generated Dataset JSON
        
        Args:
            force (bool, optional): Will force the download of a new Generated Datset JSON file. Defaults to False.
        
        Returns:
            [dict]: Mitre ATT&CK Enterprise Generated Dataset JSON
        """
        if force:
            datasets = self.__get_datasets()
            self.__save_locally(self.dataset_json_path, datasets)
            return datasets
        else:
            cached_data = self.__get_cached_data(self.dataset_json_path)
            if cached_data:
                if pendulum.now().add(days=30).to_iso8601_string() >= pendulum.parse(cached_data['last_updated']).to_iso8601_string():
                    return cached_data
                else:
                    datasets = requests.get(self.__DATASETS_URL).json()
                    self.__save_locally(self.dataset_json_path, datasets)
                    return datasets
            else:
                datasets = requests.get(self.__DATASETS_URL)
                if datasets:
                    try:
                        datasets = datasets.json()
                    except:
                        print('Unable to downlad and load external datasets')
                        pass
                self.__save_locally(self.dataset_json_path, datasets)
                return datasets 
Example #19
Source File: core.py    From maya with MIT License 5 votes vote down vote up
def from_rfc3339(rfc3339_string):
        """Returns MayaDT instance from rfc3339 string."""
        return parse(rfc3339_string)

    # Exporters
    # --------- 
Example #20
Source File: parser.py    From shadowreader with Apache License 2.0 5 votes vote down vote up
def parse_file(file: str, app: str, bucket: str, timeformat: str, regex: str):
    with open(file) as f:
        lines = f.readlines()

    regex = re.compile(regex)

    lines = [x.strip() for x in lines if x.strip()]
    lines = [regex.match(x) for x in lines if x]
    lines = [x.groupdict() for x in lines if x]

    def parse_time(t):
        if timeformat:
            return pendulum.from_format(t, timeformat)
        else:
            return pendulum.parse(t)

    if not lines:
        click.echo(
            f"No logs were parsed for {file}. Could the RegEx be wrong or the file empty?"
        )
        return

    tzinfo = pendulum.tz.local_timezone()
    if lines:
        first = lines[0]
        inst = parse_time(first[timestamp_field])
        tzinfo = inst.tzinfo

    for l in lines:
        l[timestamp_field] = parse_time(l[timestamp_field])

    payload = {app: defaultdict(list)}
    payload = _batch_lines_by_timestamp(lines, payload, app)

    click.echo(f"Processing done. Pushing results to S3...")

    parse_results = _put_payload_on_s3(
        payload=payload[app], bucket=bucket, elb_name=app, testing={}
    )

    print_stats(parse_results, tzinfo, app) 
Example #21
Source File: mail.py    From serverless-southwest-check-in with MIT License 5 votes vote down vote up
def send_confirmation(to, reservation):
    """
    Sends an email confirming that the user's checkin has been scheduled
    """

    subject = "Your checkin has been scheduled!"
    body = (
        "Thanks for scheduling a checkin for your flight. I will set "
        "my alarm and wake up to check you in 24 hours before your "
        "departure.\n\n"
        "The boarding position which you receive is based on the number of Early Bird "
        "and A-List passengers on your flight. 80%% of checkins are in position B15 or "
        "better, which almost guarantees you won't be stuck with a middle seat. Enjoy your flight!\n\n"
        "Confirmation Number: %s\n"
        "Check-in times:\n"
    ) % (reservation.confirmation_number)

    for c in reversed(reservation.check_in_times):
        pt = pendulum.parse(c)
        body += " - {}\n".format(pt.to_day_datetime_string())

    feedback_email = os.environ.get('EMAIL_FEEDBACK')
    if feedback_email:
        body += f"\nQuestions? Comments? Reply to this message or email {feedback_email}."

    return send_ses_email(to, subject, body) 
Example #22
Source File: swa.py    From serverless-southwest-check-in with MIT License 5 votes vote down vote up
def _get_check_in_time(self, departure_time):
        """
        Receives a departure time in RFC3339 format:

            2017-02-09T07:50:00.000-06:00

        And returns the check in time (24 hours prior) as a pendulum time
        object. `self.check_in_seconds` seconds (Default 5) are added to
        the checkin time to allow for some clock skew buffer.
        """
        return pendulum.parse(departure_time)\
                .subtract(days=1)\
                .add(seconds=self.check_in_seconds) 
Example #23
Source File: core.py    From maya with MIT License 5 votes vote down vote up
def from_rfc2822(rfc2822_string):
        """Returns MayaDT instance from rfc2822 string."""
        return parse(rfc2822_string) 
Example #24
Source File: test_utils.py    From FlowKit with Mozilla Public License 2.0 5 votes vote down vote up
def test_get_output_filename():
    """
    Test that get_output_filename returns the expected filename when a tag is provided.
    """
    now = pendulum.parse("2016-01-01")
    with patch("pendulum.now", lambda x: now):
        output_filename = get_output_filename("dummy_filename.suffix", "DUMMY_TAG")
    assert output_filename == "dummy_filename__DUMMY_TAG__20160101T000000Z.suffix" 
Example #25
Source File: google.py    From k8s-snapshots with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def load_snapshots(ctx, label_filters: Dict[str, str]) -> List[Snapshot]:
    """
    Return the existing snapshots.
    """
    snapshots = get_gcloud(ctx).snapshots()
    request = snapshots.list(
        project=get_project_id(ctx),
        filter=snapshot_list_filter_expr(label_filters),
        maxResults=500,
    )

    loaded_snapshots = []

    while request is not None:
        resp = request.execute()
        for item in resp.get('items', []):
            # We got to parse out the disk zone and name from the source disk.
            # It's an url that ends with '/zones/{zone}/disks/{name}'/
            sourceDiskList = item['sourceDisk'].split('/')

            disk = sourceDiskList[-1]

            if "regions" in sourceDiskList:
                region = sourceDiskList[8]
                loaded_snapshots.append(Snapshot(
                    name=item['name'],
                    created_at=parse_timestamp(item['creationTimestamp']),
                    disk=GoogleDiskIdentifier(name=disk, region=region, regional=True)
                ))
            else:
                zone = sourceDiskList[8]
                loaded_snapshots.append(Snapshot(
                    name=item['name'],
                    created_at=parse_timestamp(item['creationTimestamp']),
                    disk=GoogleDiskIdentifier(name=disk, zone=zone, regional=False)
                ))

        request = snapshots.list_next(request, resp)

    return loaded_snapshots 
Example #26
Source File: google.py    From k8s-snapshots with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def parse_timestamp(date_str: str) -> pendulum.Pendulum:
    return pendulum.parse(date_str).in_timezone('utc') 
Example #27
Source File: schema.py    From pytezos with MIT License 5 votes vote down vote up
def decode_literal(node, prim):
    core_type, value = next(iter(node.items()))
    if prim in ['int', 'nat']:
        return int(value)
    if prim == 'timestamp':
        if core_type == 'int':
            return pendulum.from_timestamp(int(value))
        else:
            return pendulum.parse(value)
    if prim == 'mutez':
        return Decimal(value) / 10 ** 6
    if prim == 'bool':
        return value == 'True'
    if prim == 'address' and core_type == 'bytes':
        prefix = {'0000': b'tz1', '0001': b'tz2', '0002': b'tz3'}  # TODO: check it's ttr
        return base58_encode(bytes.fromhex(value[4:]), prefix[value[:4]]).decode()
    return value 
Example #28
Source File: block.py    From pytezos with MIT License 5 votes vote down vote up
def to_timestamp(v):
    try:
        v = pendulum.parse(v)
    except ParserError:
        pass
    if isinstance(v, datetime):
        v = int(v.timestamp())
    return v 
Example #29
Source File: test_from_format.py    From pendulum with MIT License 5 votes vote down vote up
def test_from_format(text, fmt, expected, now):
    if now is None:
        now = pendulum.datetime(2015, 11, 12)
    else:
        now = pendulum.parse(now)

    # Python 2.7 loses precision for x timestamps
    # so we don't test
    if fmt == "x" and PY2:
        return

    with pendulum.test(now):
        assert pendulum.from_format(text, fmt).isoformat() == expected 
Example #30
Source File: test_add.py    From pendulum with MIT License 5 votes vote down vote up
def test_add_to_fixed_timezones():
    dt = pendulum.parse("2015-03-08T01:00:00-06:00")
    dt = dt.add(weeks=1)
    dt = dt.add(hours=1)

    assert_datetime(dt, 2015, 3, 15, 2, 0, 0)
    assert dt.timezone_name == "-06:00"
    assert dt.offset == -6 * 3600