Python boto3.session() Examples

The following are 30 code examples of boto3.session(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module boto3 , or try the search function .
Example #1
Source File: aws-git-backed-static-website-lambda.py    From aws-git-backed-static-website with Apache License 2.0 7 votes vote down vote up
def setup(event):
    # Extract attributes passed in by CodePipeline
    job_id = event['CodePipeline.job']['id']
    job_data = event['CodePipeline.job']['data']
    artifact = job_data['inputArtifacts'][0]
    config = job_data['actionConfiguration']['configuration']
    credentials = job_data['artifactCredentials']
    from_bucket = artifact['location']['s3Location']['bucketName']
    from_key = artifact['location']['s3Location']['objectKey']
    from_revision = artifact['revision']
    #output_artifact = job_data['outputArtifacts'][0]
    #to_bucket = output_artifact['location']['s3Location']['bucketName']
    #to_key = output_artifact['location']['s3Location']['objectKey']

    # Temporary credentials to access CodePipeline artifact in S3
    key_id = credentials['accessKeyId']
    key_secret = credentials['secretAccessKey']
    session_token = credentials['sessionToken']
    session = Session(aws_access_key_id=key_id,
                      aws_secret_access_key=key_secret,
                      aws_session_token=session_token)
    s3 = session.client('s3',
                        config=botocore.client.Config(signature_version='s3v4'))

    return (job_id, s3, from_bucket, from_key, from_revision) 
Example #2
Source File: MLOps-BYO-EvaluateModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   
 
    s3 = session.resource('s3')

    obj = s3.Object(bucketname,objectKey)
    
    item = json.loads(obj.get()['Body'].read().decode('utf-8'))
    
    print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item)
    return item 
Example #3
Source File: cfn_validate_lambda.py    From automating-governance-sample with Apache License 2.0 6 votes vote down vote up
def setup_s3_client(job_data):
    """Creates an S3 client

    Uses the credentials passed in the event by CodePipeline. These
    credentials can be used to access the artifact bucket.

    Args:
        job_data: The job data structure

    Returns:
        An S3 client with the appropriate credentials

    """
    key_id = job_data['artifactCredentials']['accessKeyId']
    key_secret = job_data['artifactCredentials']['secretAccessKey']
    session_token = job_data['artifactCredentials']['sessionToken']

    session = Session(
        aws_access_key_id=key_id,
        aws_secret_access_key=key_secret,
        aws_session_token=session_token)
    return session.client('s3', config=botocore.client.Config(signature_version='s3v4')) 
Example #4
Source File: metric.py    From cloudwatch-fluent-metrics with Apache License 2.0 6 votes vote down vote up
def __init__(self, client=None, **kwargs):
        self.dimensions = []
        self.timers = {}
        self.dimension_stack = []
        self.storage_resolution = 60
        self.use_stream_id = kwargs.get('UseStreamId', True)
        if self.use_stream_id:
            self.stream_id = str(uuid.uuid4())
            self.with_dimension('MetricStreamId', self.stream_id)
        else:
            self.stream_id = None

        if client:
            self.client = client
        else:
            profile = kwargs.get('Profile')
            if profile:
                session = boto3.session.Session(profile_name=profile)
                self.client = session.client('cloudwatch')
            else:
                self.client = boto3.client('cloudwatch') 
Example #5
Source File: MLOps-BIA-EvaluateModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):
    print("[DEBUG]EVENT IN:", event)
    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Previous Job Info Bucket:", bucketname)

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    print("[INFO]Previous Job Info Object:", objectKey)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                      aws_secret_access_key=artifactCredentials['secretAccessKey'],
                      aws_session_token=artifactCredentials['sessionToken'])

    s3 = session.resource('s3')

    obj = s3.Object(bucketname, objectKey)

    item = json.loads(obj.get()['Body'].read().decode('utf-8'))

    print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item)
    return item 
Example #6
Source File: MLOps-BIA-EvaluateModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event):
    KMSKeyIdSSEIn = os.environ['SSEKMSKeyIdIn']

    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']
    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']
    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']

    json_data = json.dumps(event)

    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                      aws_secret_access_key=artifactCredentials['secretAccessKey'],
                      aws_session_token=artifactCredentials['sessionToken'])

    s3 = session.resource("s3")
    object = s3.Object(bucketname, objectKey + '/event.json')
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=KMSKeyIdSSEIn) 
Example #7
Source File: MLOps-BIA-DeployModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):
    tmp_file = tempfile.NamedTemporaryFile()

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']

    print("[INFO]Object:", objectKey)

    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Bucket:", bucketname)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                      aws_secret_access_key=artifactCredentials['secretAccessKey'],
                      aws_session_token=artifactCredentials['sessionToken'])

    s3 = session.resource('s3')

    obj = s3.Object(bucketname, objectKey)

    item = json.loads(obj.get()['Body'].read().decode('utf-8'))

    print("Item:", item)

    return item 
Example #8
Source File: MLOps-BIA-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event, writeData):
    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']

    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']
    json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str)

    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                      aws_secret_access_key=artifactCredentials['secretAccessKey'],
                      aws_session_token=artifactCredentials['sessionToken'])

    s3 = session.resource("s3")
    # object = s3.Object(bucketname, objectKey + '/event.json')
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId)
    print('[INFO]event written to s3') 
Example #9
Source File: MLOps-BIA-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):
    tmp_file = tempfile.NamedTemporaryFile()

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    print("[INFO]Object Key:", objectKey)

    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Bucket Name:", bucketname)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                      aws_secret_access_key=artifactCredentials['secretAccessKey'],
                      aws_session_token=artifactCredentials['sessionToken'])

    s3 = session.resource('s3')

    obj = s3.Object(bucketname, objectKey)

    item = json.loads(obj.get()['Body'].read().decode('utf-8'))

    return item 
Example #10
Source File: MLOps-BYO-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event, writeData):

    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']

    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']
    json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str)

    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   

    s3 = session.resource("s3")
    #object = s3.Object(bucketname, objectKey + '/event.json')
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId)
    print('[INFO]event written to s3') 
Example #11
Source File: MLOps-BYO-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):

    tmp_file = tempfile.NamedTemporaryFile()

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    print("[INFO]Object Key:", objectKey)

    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Bucket Name:", bucketname)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   
 
    s3 = session.resource('s3')

    obj = s3.Object(bucketname,objectKey)
      
    item = json.loads(obj.get()['Body'].read().decode('utf-8'))
      
    return item 
Example #12
Source File: MLOps-BYO-TrainModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event):
    print(event)

    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']
    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']
    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']
    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']
    
    # S3 Managed Key for Encryption
    S3SSEKey = os.environ['SSEKMSKeyIdIn']

    json_data = json.dumps(event)
    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   

    s3 = session.resource("s3")
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey)
    
    print('[SUCCESS]Job Information Written to S3') 
Example #13
Source File: MLOps-BIA-EvaluateModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):

    print("[DEBUG]EVENT IN:", event)
    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Previous Job Info Bucket:", bucketname)
    
    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    print("[INFO]Previous Job Info Object:", objectKey)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   
 
    s3 = session.resource('s3')

    obj = s3.Object(bucketname,objectKey)
  
    item = json.loads(obj.get()['Body'].read().decode('utf-8'))
    
    print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item)
    return item 
Example #14
Source File: clear_lambda_storage.py    From clear-lambda-storage with MIT License 6 votes vote down vote up
def init_boto_client(client_name, region, args):
    """
    Initiates boto's client object
    :param client_name: client name
    :param region: region name
    :param args: arguments
    :return: Client
    """
    if args.token_key_id and args.token_secret:
        boto_client = boto3.client(
            client_name,
            aws_access_key_id=args.token_key_id,
            aws_secret_access_key=args.token_secret,
            region_name=region
        )
    elif args.profile:
        session = boto3.session.Session(profile_name=args.profile)
        boto_client = session.client(client_name, region_name=region)
    else:
        boto_client = boto3.client(client_name, region_name=region)

    return boto_client 
Example #15
Source File: MLOps-BIA-TrainModel.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event):
    print(event)

    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']
    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']
    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']
    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']
    
    # S3 Managed Key for Encryption
    S3SSEKey = os.environ['SSEKMSKeyIdIn']

    json_data = json.dumps(event)
    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   

    s3 = session.resource("s3")
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey)
    
    print('[SUCCESS]Job Information Written to S3') 
Example #16
Source File: MLOps-BIA-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def write_job_info_s3(event, writeData):

    objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']

    bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name']
    json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str)

    print(json_data)

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   

    s3 = session.resource("s3")
    #object = s3.Object(bucketname, objectKey + '/event.json')
    object = s3.Object(bucketname, objectKey)
    print(object)
    object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId)
    print('[INFO]event written to s3') 
Example #17
Source File: MLOps-BIA-GetStatus.py    From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 6 votes vote down vote up
def read_job_info(event):

    tmp_file = tempfile.NamedTemporaryFile()

    objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
    print("[INFO]Object Key:", objectKey)

    bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
    print("[INFO]Bucket Name:", bucketname)

    artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials']

    session = Session(aws_access_key_id=artifactCredentials['accessKeyId'],
                  aws_secret_access_key=artifactCredentials['secretAccessKey'],
                  aws_session_token=artifactCredentials['sessionToken'])
   
 
    s3 = session.resource('s3')

    obj = s3.Object(bucketname,objectKey)
      
    item = json.loads(obj.get()['Body'].read().decode('utf-8'))
      
    return item 
Example #18
Source File: upload.py    From zulip with Apache License 2.0 6 votes vote down vote up
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
        def percent_callback(bytes_transferred: Any) -> None:
            sys.stdout.write('.')
            sys.stdout.flush()

        # We use the avatar bucket, because it's world-readable.
        key = self.avatar_bucket.Object(os.path.join("exports", generate_random_token(32),
                                                     os.path.basename(tarball_path)))

        key.upload_file(tarball_path, Callback=percent_callback)

        session = botocore.session.get_session()
        config = Config(signature_version=botocore.UNSIGNED)

        public_url = session.create_client('s3', config=config).generate_presigned_url(
            'get_object',
            Params={
                'Bucket': self.avatar_bucket.name,
                'Key': key.key,
            },
            ExpiresIn=0,
        )
        return public_url 
Example #19
Source File: s3.py    From Hands-On-Reactive-Programming-with-Python with MIT License 5 votes vote down vote up
def make_driver():
    def driver(sink):

        def on_subscribe(observer, scheduler):
            client = None
            bucket = None

            def on_next(item):
                nonlocal client
                nonlocal bucket

                if type(item) is Configure:
                    session = Session(aws_access_key_id=item.access_key,
                                      aws_secret_access_key=item.secret_key)
                    client = session.client(
                        's3',
                        endpoint_url=item.endpoint_url,
                        region_name=item.region_name)
                    bucket = item.bucket

                elif type(item) is UploadObject:
                    data = BytesIO(item.data)
                    client.upload_fileobj(data, bucket, item.key)
                    observer.on_next(UploadReponse(
                        key=item.key,
                        id=item.id))

                else:
                    observer.on_error("unknown item: {}".format(type(item)))

            sink.request.subscribe(
                on_next=on_next,
                on_error=lambda e: observer.on_error(e),
                on_completed=lambda: observer.on_completed())

        return Source(
            response=rx.create(on_subscribe)
        )

    return Component(call=driver, input=Sink) 
Example #20
Source File: action.py    From stackstorm-aws with Apache License 2.0 5 votes vote down vote up
def assume_role(self, account_id):
        ''' Assumes role and setup Boto3 session for the cross-account capability'''
        if account_id == self.account_id:
            return

        try:
            assumed_role = self.session.client('sts').assume_role(
                RoleArn=self.cross_roles[account_id],
                RoleSessionName='StackStormEvents'
            )
        except ClientError:
            self.logger.error("Failed to assume role as account %s when using the AWS session "
                              "client. Check the roles configured for the AWS pack and ensure "
                              "that the '%s' is still valid.",
                              account_id, self.cross_roles[account_id])
            raise
        except KeyError:
            self.logger.error("Could not find the role referring %s account in the config file. "
                              "Please, introduce it in 'aws.yaml' file.", account_id)
            raise

        self.credentials.update({
            'aws_access_key_id': assumed_role["Credentials"]["AccessKeyId"],
            'aws_secret_access_key': assumed_role["Credentials"]["SecretAccessKey"],
            'security_token': assumed_role["Credentials"]["SessionToken"]
        }) 
Example #21
Source File: __init__.py    From watchtower with Apache License 2.0 5 votes vote down vote up
def _get_session(boto3_session, boto3_profile_name):
        if boto3_session:
            return boto3_session

        if boto3_profile_name:
            return boto3.session.Session(profile_name=boto3_profile_name)

        return boto3 
Example #22
Source File: __init__.py    From watchtower with Apache License 2.0 5 votes vote down vote up
def __init__(self, log_group=__name__, stream_name=None, use_queues=True, send_interval=60,
                 max_batch_size=1024 * 1024, max_batch_count=10000, boto3_session=None,
                 boto3_profile_name=None, create_log_group=True, log_group_retention_days=None,
                 create_log_stream=True, json_serialize_default=None, *args, **kwargs):
        handler_base_class.__init__(self, *args, **kwargs)
        self.log_group = log_group
        self.stream_name = stream_name
        self.use_queues = use_queues
        self.send_interval = send_interval
        self.json_serialize_default = json_serialize_default or _json_serialize_default
        self.max_batch_size = max_batch_size
        self.max_batch_count = max_batch_count
        self.queues, self.sequence_tokens = {}, {}
        self.threads = []
        self.creating_log_stream, self.shutting_down = False, False
        self.create_log_stream = create_log_stream
        self.log_group_retention_days = log_group_retention_days

        # Creating session should be the final call in __init__, after all instance attributes are set.
        # This ensures that failing to create the session will not result in any missing attribtues.
        self.cwl_client = self._get_session(boto3_session, boto3_profile_name).client("logs")
        if create_log_group:
            _idempotent_create(self.cwl_client.create_log_group, logGroupName=self.log_group)

        if log_group_retention_days:
            _idempotent_create(
                self.cwl_client.put_retention_policy,
                logGroupName=self.log_group,
                retentionInDays=self.log_group_retention_days
            ) 
Example #23
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def parse_clients(session: Session, config: Config) -> Generator[Client, None, None]:
    for name in [service for service in session.get_available_services() if service in config.services]:
        print(f'Parsing: {name}')
        client = session.client(name)
        yield Client(
            name,
            list(parse_methods(get_instance_public_methods(client)))
        ) 
Example #24
Source File: aws_common.py    From resilient-community-apps with MIT License 5 votes vote down vote up
def __init__(self, service_name, aws_access_key_id, aws_secret_access_key, region_name):
        session = boto3.session.Session(aws_access_key_id=aws_access_key_id,
                                        aws_secret_access_key=aws_secret_access_key,
                                        region_name=region_name)
        self.aws_client = session.client(service_name) 
Example #25
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def retrieve_sub_resources(session, resource) -> Generator[Boto3ServiceResource, None, None]:
    loader = session._session.get_component('data_loader')
    json_resource_model = loader.load_service_model(
        resource.meta.service_name,
        'resources-1'
    )
    service_model = resource.meta.client.meta.service_model
    try:
        service_waiter_model = session._session.get_waiter_model(service_model.service_name)
    except UnknownServiceError:
        service_waiter_model = None
    for name in json_resource_model['resources']:
        resource_model = json_resource_model['resources'][name]
        cls = session.resource_factory.load_from_definition(
            resource_name=name,
            single_resource_json_definition=resource_model,
            service_context=ServiceContext(
                service_name=resource.meta.service_name,
                resource_json_definitions=json_resource_model['resources'],
                service_model=service_model,
                service_waiter_model=service_waiter_model
            )
        )
        identifiers = cls.meta.resource_model.identifiers
        args = []
        for _ in identifiers:
            args.append('foo')
        yield cls(*args, client=boto3.client(resource.meta.service_name)) 
Example #26
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def parse_service_waiters(session: Session, config: Config) -> Generator[ServiceWaiter, None, None]:
    for name in [service for service in session.get_available_services() if service in config.services]:
        client = session.client(name)
        if client.waiter_names:
            print(f'Parsing: {name}')
            yield ServiceWaiter(
                name,
                list(parse_waiters(client))
            ) 
Example #27
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def parse_client_types(session: Session) -> Set[str]:
    types = set()
    for name in session.get_available_services():
        print(f'Parsing: {name}')
        client = session.client(name)
        types = types.union(types.union(parse_method_types(get_instance_public_methods(client))))
    return types 
Example #28
Source File: clear_lambda_storage.py    From clear-lambda-storage with MIT License 5 votes vote down vote up
def list_available_lambda_regions():
    """
    Enumerates list of all Lambda regions
    :return: list of regions
    """
    session = Session()
    return session.get_available_regions('lambda') 
Example #29
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def parse_service_resources(session: Session, config: Config) -> Generator[ServiceResource, None, None]:
    for resource_name in [service for service in session.get_available_resources() if service in config.services]:
        service_resource = session.resource(resource_name)
        print(f'Parsing: {resource_name}')
        yield ServiceResource(
            resource_name,
            list(parse_methods(get_instance_public_methods(service_resource))),
            list(parse_attributes(service_resource)) + list(parse_identifiers(service_resource)),
            list(parse_collections(service_resource)),
            [parse_resource(resource) for resource in retrieve_sub_resources(session, service_resource)]
        ) 
Example #30
Source File: parsers.py    From boto3_type_annotations with MIT License 5 votes vote down vote up
def parse_service_resource_types(session: Session) -> Set[str]:
    types = set()
    for resource_name in session.get_available_resources():
        service_resource = session.resource(resource_name)
        types = types.union(parse_method_types(get_resource_public_actions(service_resource)))
        types = types.union(parse_attribute_types(service_resource))
        for collection in service_resource.meta.resource_model.collections:
            types = types.union(parse_method_types(get_instance_public_methods(getattr(service_resource, collection.name))))
        for resource in retrieve_sub_resources(session, service_resource):
            types = types.union(parse_method_types(get_resource_public_actions(resource.__class__)))
            types = types.union(parse_attribute_types(resource))
            for collection in resource.meta.resource_model.collections:
                types = types.union(parse_method_types(get_instance_public_methods(getattr(resource, collection.name))))
    return types