Python boto3.session() Examples
The following are 30
code examples of boto3.session().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto3
, or try the search function
.

Example #1
Source File: aws-git-backed-static-website-lambda.py From aws-git-backed-static-website with Apache License 2.0 | 7 votes |
def setup(event): # Extract attributes passed in by CodePipeline job_id = event['CodePipeline.job']['id'] job_data = event['CodePipeline.job']['data'] artifact = job_data['inputArtifacts'][0] config = job_data['actionConfiguration']['configuration'] credentials = job_data['artifactCredentials'] from_bucket = artifact['location']['s3Location']['bucketName'] from_key = artifact['location']['s3Location']['objectKey'] from_revision = artifact['revision'] #output_artifact = job_data['outputArtifacts'][0] #to_bucket = output_artifact['location']['s3Location']['bucketName'] #to_key = output_artifact['location']['s3Location']['objectKey'] # Temporary credentials to access CodePipeline artifact in S3 key_id = credentials['accessKeyId'] key_secret = credentials['secretAccessKey'] session_token = credentials['sessionToken'] session = Session(aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) s3 = session.client('s3', config=botocore.client.Config(signature_version='s3v4')) return (job_id, s3, from_bucket, from_key, from_revision)
Example #2
Source File: clear_lambda_storage.py From clear-lambda-storage with MIT License | 6 votes |
def init_boto_client(client_name, region, args): """ Initiates boto's client object :param client_name: client name :param region: region name :param args: arguments :return: Client """ if args.token_key_id and args.token_secret: boto_client = boto3.client( client_name, aws_access_key_id=args.token_key_id, aws_secret_access_key=args.token_secret, region_name=region ) elif args.profile: session = boto3.session.Session(profile_name=args.profile) boto_client = session.client(client_name, region_name=region) else: boto_client = boto3.client(client_name, region_name=region) return boto_client
Example #3
Source File: upload.py From zulip with Apache License 2.0 | 6 votes |
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str: def percent_callback(bytes_transferred: Any) -> None: sys.stdout.write('.') sys.stdout.flush() # We use the avatar bucket, because it's world-readable. key = self.avatar_bucket.Object(os.path.join("exports", generate_random_token(32), os.path.basename(tarball_path))) key.upload_file(tarball_path, Callback=percent_callback) session = botocore.session.get_session() config = Config(signature_version=botocore.UNSIGNED) public_url = session.create_client('s3', config=config).generate_presigned_url( 'get_object', Params={ 'Bucket': self.avatar_bucket.name, 'Key': key.key, }, ExpiresIn=0, ) return public_url
Example #4
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #5
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") #object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #6
Source File: MLOps-BIA-TrainModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): print(event) objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey) print('[SUCCESS]Job Information Written to S3')
Example #7
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): print("[DEBUG]EVENT IN:", event) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Previous Job Info Bucket:", bucketname) objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Previous Job Info Object:", objectKey) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #8
Source File: MLOps-BYO-TrainModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): print(event) objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey) print('[SUCCESS]Job Information Written to S3')
Example #9
Source File: MLOps-BYO-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #10
Source File: MLOps-BYO-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #11
Source File: MLOps-BYO-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") #object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #12
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #13
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") # object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #14
Source File: MLOps-BIA-DeployModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("Item:", item) return item
Example #15
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): KMSKeyIdSSEIn = os.environ['SSEKMSKeyIdIn'] objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=KMSKeyIdSSEIn)
Example #16
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): print("[DEBUG]EVENT IN:", event) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Previous Job Info Bucket:", bucketname) objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Previous Job Info Object:", objectKey) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #17
Source File: metric.py From cloudwatch-fluent-metrics with Apache License 2.0 | 6 votes |
def __init__(self, client=None, **kwargs): self.dimensions = [] self.timers = {} self.dimension_stack = [] self.storage_resolution = 60 self.use_stream_id = kwargs.get('UseStreamId', True) if self.use_stream_id: self.stream_id = str(uuid.uuid4()) self.with_dimension('MetricStreamId', self.stream_id) else: self.stream_id = None if client: self.client = client else: profile = kwargs.get('Profile') if profile: session = boto3.session.Session(profile_name=profile) self.client = session.client('cloudwatch') else: self.client = boto3.client('cloudwatch')
Example #18
Source File: cfn_validate_lambda.py From automating-governance-sample with Apache License 2.0 | 6 votes |
def setup_s3_client(job_data): """Creates an S3 client Uses the credentials passed in the event by CodePipeline. These credentials can be used to access the artifact bucket. Args: job_data: The job data structure Returns: An S3 client with the appropriate credentials """ key_id = job_data['artifactCredentials']['accessKeyId'] key_secret = job_data['artifactCredentials']['secretAccessKey'] session_token = job_data['artifactCredentials']['sessionToken'] session = Session( aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) return session.client('s3', config=botocore.client.Config(signature_version='s3v4'))
Example #19
Source File: schedule.py From aws-batch-example with MIT License | 5 votes |
def __init__(self,profile=None,region='us-east-1'): """Schedule(profile=None,region='us-east-1') Creates the AWS boto3 client""" session = Session(profile_name=profile,region_name=region) self.client = session.client('batch')
Example #20
Source File: clear_lambda_storage.py From clear-lambda-storage with MIT License | 5 votes |
def list_available_lambda_regions(): """ Enumerates list of all Lambda regions :return: list of regions """ session = Session() return session.get_available_regions('lambda')
Example #21
Source File: upload.py From zulip with Apache License 2.0 | 5 votes |
def get_bucket(session: Session, bucket_name: str) -> ServiceResource: # See https://github.com/python/typeshed/issues/2706 # for why this return type is a `ServiceResource`. bucket = session.resource('s3').Bucket(bucket_name) return bucket
Example #22
Source File: upload.py From zulip with Apache License 2.0 | 5 votes |
def __init__(self) -> None: self.session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY) self.avatar_bucket = get_bucket(self.session, settings.S3_AVATAR_BUCKET) network_location = urllib.parse.urlparse( self.avatar_bucket.meta.client.meta.endpoint_url).netloc self.avatar_bucket_url = f"https://{self.avatar_bucket.name}.{network_location}" self.uploads_bucket = get_bucket(self.session, settings.S3_AUTH_UPLOADS_BUCKET)
Example #23
Source File: MLOps-BIA-DeployModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 5 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() #objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("Item:", item) return item
Example #24
Source File: MLOps-BIA-DeployModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 5 votes |
def write_job_info_s3(event): print(event) objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") #object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey) print('event written to s3')
Example #25
Source File: MLOps-BYO-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 5 votes |
def write_job_info_s3(event): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(event) print(json_data) # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey)
Example #26
Source File: MLOps-BYO-DeployModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 5 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() #objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("Item:", item) return item
Example #27
Source File: kmstool.py From kmstool with Apache License 2.0 | 5 votes |
def __init__(self, key_id=None, key_spec='AES_256', temp_dir='/var/tmp/kmstool', profile=None, region=None, key_length=32,): self.key_id=key_id self.key_spec=key_spec self.key_length=key_length self.bs = AES.block_size self.temp_dir = '{}/{}/'.format(temp_dir.rstrip('/\\'), uuid.uuid4()) self.profile=profile self.region=region try: makedirs(self.temp_dir) except: self.rm_rf(self.temp_dir) makedirs(self.temp_dir) self.enc_file = join(self.temp_dir, 'file.enc') self.cipher_file = join(self.temp_dir, 'key.enc') self.session = self.connect() self.kms = self.session.client('kms') self.s3 = self.session.client('s3') # walk a directory structure and remove everything
Example #28
Source File: kmstool.py From kmstool with Apache License 2.0 | 5 votes |
def rm_rf(self, path): for root, dirs, files in walk(path, topdown=False): # if dir is empty skip files for name in files: remove(join(root, name)) for name in dirs: rmdir(join(root, name)) rmdir(path) # create a session with profile optional region
Example #29
Source File: kmstool.py From kmstool with Apache License 2.0 | 5 votes |
def connect(self): if self.region == None: session = Session(profile_name=self.profile) else: session = Session(profile_name=self.profile,region_name=self.region) return session # make a big messy md5
Example #30
Source File: index.py From aws-lambda-codepipeline-site-generator-hugo with Apache License 2.0 | 5 votes |
def setup(event): # Extract attributes passed in by CodePipeline job_id = event['CodePipeline.job']['id'] job_data = event['CodePipeline.job']['data'] input_artifact = job_data['inputArtifacts'][0] config = job_data['actionConfiguration']['configuration'] credentials = job_data['artifactCredentials'] from_bucket = input_artifact['location']['s3Location']['bucketName'] from_key = input_artifact['location']['s3Location']['objectKey'] from_revision = input_artifact['revision'] output_artifact = job_data['outputArtifacts'][0] to_bucket = output_artifact['location']['s3Location']['bucketName'] to_key = output_artifact['location']['s3Location']['objectKey'] user_parameters = config['UserParameters'] # Temporary credentials to access CodePipeline artifacts in S3 key_id = credentials['accessKeyId'] key_secret = credentials['secretAccessKey'] session_token = credentials['sessionToken'] session = Session(aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) s3 = session.client('s3', config=botocore.client.Config(signature_version='s3v4')) return (job_id, s3, from_bucket, from_key, from_revision, to_bucket, to_key, user_parameters)