Python boto.s3.connection.S3Connection() Examples

The following are 30 code examples of boto.s3.connection.S3Connection(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module boto.s3.connection , or try the search function .
Example #1
Source File: test_bwa_alignment.py    From toil-scripts with Apache License 2.0 7 votes vote down vote up
def test_bwa():
    work_dir = tempfile.mkdtemp()
    create_config(work_dir)
    create_manifest(work_dir)
    # Call Pipeline
    try:
        subprocess.check_call(['toil-bwa', 'run',
                               os.path.join(work_dir, 'jstore'),
                               '--manifest', os.path.join(work_dir, 'manifest.txt'),
                               '--config', os.path.join(work_dir, 'config.txt'),
                               '--retryCount', '1'])
    finally:
        shutil.rmtree(work_dir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/ci_test.bam'
        k.delete() 
Example #2
Source File: write_to_s3.py    From brain_segmentation with MIT License 6 votes vote down vote up
def files_to_s3(files, bucket_name):
    '''
    INPUT   (1) list 'files': all files to upload to s3 bucket
            (2) string 'bucket_name': name of bucket to dump into
    writes all files to s3 bucket using threads
    '''
    AWS_KEY = os.environ['AWS_ACCESS_KEY_ID']
    AWS_SECRET = os.environ['AWS_SECRET_ACCESS_KEY']


    def upload(myfile):
        conn = S3Connection(aws_access_key_id = AWS_KEY, aws_secret_access_key = AWS_SECRET)
        bucket = conn.get_bucket(bucket_name)
        key = bucket.new_key(myfile).set_contents_from_filename(myfile) # , cb=percent_cb, num_cb=1)
        return myfile

    for fname in files:
        t = threading.Thread(target=upload, args=(fname,)).start() 
Example #3
Source File: s3cache.py    From funfuzz with Mozilla Public License 2.0 6 votes vote down vote up
def connect(self):  # pylint: disable=missing-return-doc,missing-return-type-doc
        """Connect to the S3 bucket, but only on non-WSL Linux systems."""
        if not (platform.system() == "Linux" and "Microsoft" not in platform.release()):
            return False

        EC2_PROFILE = None if isEC2VM() else "laniakea"  # pylint: disable=invalid-name
        try:
            conn = S3Connection(profile_name=EC2_PROFILE)
            self.bucket = conn.get_bucket(self.bucket_name)
            return True
        except boto.provider.ProfileNotFoundError:
            print(f'Unable to connect via boto using profile name "{EC2_PROFILE}" in ~/.boto')
            return False
        except boto.exception.S3ResponseError:
            print(f'Unable to connect to the following bucket "{self.bucket_name}", please check your credentials.')
            return False 
Example #4
Source File: s3.py    From learn_python3_spider with MIT License 6 votes vote down vote up
def _get_boto_connection():
    from boto.s3.connection import S3Connection

    class _v19_S3Connection(S3Connection):
        """A dummy S3Connection wrapper that doesn't do any synchronous download"""
        def _mexe(self, method, bucket, key, headers, *args, **kwargs):
            return headers

    class _v20_S3Connection(S3Connection):
        """A dummy S3Connection wrapper that doesn't do any synchronous download"""
        def _mexe(self, http_request, *args, **kwargs):
            http_request.authorize(connection=self)
            return http_request.headers

    try:
        import boto.auth
    except ImportError:
        _S3Connection = _v19_S3Connection
    else:
        _S3Connection = _v20_S3Connection

    return _S3Connection 
Example #5
Source File: rnaseq_unc_pipeline.py    From toil-scripts with Apache License 2.0 6 votes vote down vote up
def download_from_s3_url(file_path, url):
    from urlparse import urlparse
    from boto.s3.connection import S3Connection
    s3 = S3Connection()
    try:
        parsed_url = urlparse(url)
        if not parsed_url.netloc or not parsed_url.path.startswith('/'):
            raise RuntimeError("An S3 URL must be of the form s3:/BUCKET/ or "
                               "s3://BUCKET/KEY. '%s' is not." % url)
        bucket = s3.get_bucket(parsed_url.netloc)
        key = bucket.get_key(parsed_url.path[1:])
        key.get_contents_to_filename(file_path)
    finally:
        s3.close()


# Job Functions 
Example #6
Source File: test_exome.py    From toil-scripts with Apache License 2.0 6 votes vote down vote up
def test_exome():
    workdir = tempfile.mkdtemp()
    create_config_and_manifest(workdir)
    # Call Pipeline
    try:
        base_command = ['toil-exome', 'run',
                        '--config', os.path.join(workdir, 'config-toil-exome.yaml'),
                        os.path.join(workdir, 'jstore')]
        # Run with manifest
        subprocess.check_call(base_command + ['--manifest', os.path.join(workdir, 'manifest-toil-exome.tsv')])
    finally:
        shutil.rmtree(workdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/exome-ci-test.tar.gz'
        k.delete() 
Example #7
Source File: s3.py    From learn_python3_spider with MIT License 6 votes vote down vote up
def _get_boto_connection():
    from boto.s3.connection import S3Connection

    class _v19_S3Connection(S3Connection):
        """A dummy S3Connection wrapper that doesn't do any synchronous download"""
        def _mexe(self, method, bucket, key, headers, *args, **kwargs):
            return headers

    class _v20_S3Connection(S3Connection):
        """A dummy S3Connection wrapper that doesn't do any synchronous download"""
        def _mexe(self, http_request, *args, **kwargs):
            http_request.authorize(connection=self)
            return http_request.headers

    try:
        import boto.auth
    except ImportError:
        _S3Connection = _v19_S3Connection
    else:
        _S3Connection = _v20_S3Connection

    return _S3Connection 
Example #8
Source File: distribution.py    From aws-extender with MIT License 6 votes vote down vote up
def _get_bucket(self):
        if isinstance(self.config.origin, S3Origin):
            if not self._bucket:
                bucket_dns_name = self.config.origin.dns_name
                bucket_name = bucket_dns_name.replace('.s3.amazonaws.com', '')
                from boto.s3.connection import S3Connection
                s3 = S3Connection(self.connection.aws_access_key_id,
                                  self.connection.aws_secret_access_key,
                                  proxy=self.connection.proxy,
                                  proxy_port=self.connection.proxy_port,
                                  proxy_user=self.connection.proxy_user,
                                  proxy_pass=self.connection.proxy_pass)
                self._bucket = s3.get_bucket(bucket_name)
                self._bucket.distribution = self
                self._bucket.set_key_class(self._object_class)
            return self._bucket
        else:
            raise NotImplementedError('Unable to get_objects on CustomOrigin') 
Example #9
Source File: text_aggregation.py    From aggregation with Apache License 2.0 6 votes vote down vote up
def __s3_connect__(self):
        """
        connect to s3 - currently return both S3Connection and client because they seem
        to do offer different functionality - uploading files vs. generating signed urls
        seems pretty silly that this is the case - so feel free to fix it
        :return:
        """
        # Adam has created keys which always work - had trouble with sending out emails otherwise
        param_file = open("/app/config/aws.yml","rb")
        param_details = yaml.load(param_file)

        id_ = param_details["aws_access_key_id"]
        key = param_details["aws_secret_access_key"]

        conn = S3Connection(id_,key)

        # s3 = boto3.resource("s3",aws_access_key_id=id_,aws_secret_access_key=key)

        client = boto3.client(
            's3',
            aws_access_key_id=id_,
            aws_secret_access_key=key,
        )

        return conn,client 
Example #10
Source File: compress_css_js_files.py    From django-webpacker with MIT License 6 votes vote down vote up
def upload_to_s3(css_file):
    bucket_name = settings.AWS_BUCKET_NAME
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)

    folder = 'webpack_bundles/'
    bucket = conn.get_bucket(bucket_name=bucket_name)

    filename = css_file.split('/')[-1]
    file_obj = open(css_file, 'r')
    content = file_obj.read()

    key = folder + filename
    bucket = conn.get_bucket(bucket_name=bucket_name)
    mime = mimetypes.guess_type(filename)[0]
    k = Key(bucket)
    k.key = key  # folder + filename
    k.set_metadata("Content-Type", mime)
    k.set_contents_from_string(content)
    public_read = True
    if public_read:
        k.set_acl("public-read") 
Example #11
Source File: api_views.py    From open-humans with MIT License 6 votes vote down vote up
def post(self, request):
        super().post(request)

        key = get_upload_path(self.project.id_label, self.form.cleaned_data["filename"])

        datafile = ProjectDataFile(
            user=self.project_member.member.user,
            file=key,
            metadata=self.form.cleaned_data["metadata"],
            direct_sharing_project=self.project,
        )

        datafile.save()
        datafile.datatypes.set(self.form.cleaned_data["datatypes"])

        s3 = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)

        url = s3.generate_url(
            expires_in=settings.INCOMPLETE_FILE_EXPIRATION_HOURS * 60 * 60,
            method="PUT",
            bucket=settings.AWS_STORAGE_BUCKET_NAME,
            key=key,
        )

        return Response({"id": datafile.id, "url": url}, status=status.HTTP_201_CREATED) 
Example #12
Source File: flask_admin_s3_upload.py    From flask-admin-s3-upload with Apache License 2.0 6 votes vote down vote up
def _delete_file(self, filename, obj):
        storage_type = getattr(obj, self.storage_type_field, '')
        bucket_name = getattr(obj, self.bucket_name_field, '')

        if not (storage_type and bucket_name):
            return super(S3FileUploadField, self)._delete_file(filename)

        if storage_type != 's3':
            raise ValueError(
                'Storage type "%s" is invalid, the only supported storage type'
                ' (apart from default local storage) is s3.' % storage_type)

        conn = S3Connection(self.access_key_id, self.access_key_secret)
        bucket = conn.get_bucket(bucket_name)

        path = self._get_s3_path(filename)
        k = Key(bucket)
        k.key = path

        try:
            bucket.delete_key(k)
        except S3ResponseError:
            pass 
Example #13
Source File: flask_admin_s3_upload.py    From flask-admin-s3-upload with Apache License 2.0 6 votes vote down vote up
def _delete_thumbnail(self, filename, storage_type, bucket_name):
        if not (storage_type and bucket_name):
            self._delete_thumbnail_local(filename)
            return

        if storage_type != 's3':
            raise ValueError(
                'Storage type "%s" is invalid, the only supported storage type'
                ' (apart from default local storage) is s3.' % storage_type)

        conn = S3Connection(self.access_key_id, self.access_key_secret)
        bucket = conn.get_bucket(bucket_name)

        path = self._get_s3_path(self.thumbnail_fn(filename))
        k = Key(bucket)
        k.key = path

        try:
            bucket.delete_key(k)
        except S3ResponseError:
            pass

    # Saving 
Example #14
Source File: blockstore.py    From sync-engine with GNU Affero General Public License v3.0 6 votes vote down vote up
def _save_to_s3_bucket(data_sha256, bucket_name, data):
    assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!'
    assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!'
    start = time.time()

    # Boto pools connections at the class level
    conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'),
                        config.get('AWS_SECRET_ACCESS_KEY'))
    bucket = conn.get_bucket(bucket_name, validate=False)

    # See if it already exists; if so, don't recreate.
    key = bucket.get_key(data_sha256)
    if key:
        return

    key = Key(bucket)
    key.key = data_sha256
    key.set_contents_from_string(data)

    end = time.time()
    latency_millis = (end - start) * 1000
    statsd_client.timing('s3_blockstore.save_latency', latency_millis) 
Example #15
Source File: _boto.py    From storefact with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _get_s3bucket(host, bucket, access_key, secret_key, force_bucket_suffix=True, create_if_missing=True):
    from boto.s3.connection import S3Connection, OrdinaryCallingFormat, S3ResponseError

    s3con = S3Connection(aws_access_key_id=access_key,
                         aws_secret_access_key=secret_key,
                         host=host, is_secure=False,
                         calling_format=OrdinaryCallingFormat())
    # add access key prefix to bucket name, unless explicitly prohibited
    if force_bucket_suffix and not bucket.lower().endswith('-' + access_key.lower()):
        bucket = bucket + '-' + access_key.lower()
    try:
        return s3con.get_bucket(bucket)
    except S3ResponseError as ex:
        if ex.status == 404:
            if create_if_missing:
                return s3con.create_bucket(bucket)
            else:
                raise IOError("Bucket {} does not exist".format(bucket))
        raise 
Example #16
Source File: backup_ugc.py    From canvas with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def store_results(start_time, end_time, stored, skipped, failed, failed_list):
    keyname = 'ugc-backup-results-{0}'.format(datestr())
    conn = S3Connection()
    bucket = conn.get_bucket(results_bucket_name)
    key = bucket.new_key(keyname)
    backup_size_str = get_dir_size(destination_directory())
    report = {
        'start_time': start_time,
        'end_time': end_time,
        'stored': stored,
        'skipped': skipped,
        'failed': failed,
        'size': backup_size_str,
        'failed_list': failed_list,
    }
    key.set_contents_from_string(yaml.dump(report)) 
Example #17
Source File: __init__.py    From canvas with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def connect_walrus(host, aws_access_key_id=None, aws_secret_access_key=None,
                   port=8773, path='/services/Walrus', is_secure=False,
                   **kwargs):
    """
    Connect to a Walrus service.

    :type host: string
    :param host: the host name or ip address of the Walrus server

    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Walrus
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    return S3Connection(aws_access_key_id, aws_secret_access_key,
                        host=host, port=port, path=path,
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs) 
Example #18
Source File: s3boto.py    From astrobin with GNU Affero General Public License v3.0 6 votes vote down vote up
def __init__(self, bucket=STORAGE_BUCKET_NAME,
                       bucket_cname=STORAGE_BUCKET_CNAME,
                       access_key=None, secret_key=None, acl=DEFAULT_ACL,
                       headers=HEADERS, gzip=IS_GZIPPED,
                       gzip_content_types=GZIP_CONTENT_TYPES,
                       querystring_auth=QUERYSTRING_AUTH,
                       force_no_ssl=True):
        self.bucket_name = bucket
        self.bucket_cname = bucket_cname
        self.acl = acl
        self.headers = headers
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.force_no_ssl = force_no_ssl
        # This is called as chunks are uploaded to S3. Useful for getting
        # around limitations in eventlet for things like gunicorn.
        self.s3_callback_during_upload = None

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = S3Connection(access_key, secret_key) 
Example #19
Source File: find_bad.py    From label_generator with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def run_s3(bucket_name, path, chunk, of):
    conn = S3Connection(config.access_key, config.secret_key, is_secure=False)
    bucket = conn.get_bucket(bucket_name)

    print >> sys.stderr, "Run {} of {}".format(chunk, of)

    start = time.time()

    for i, key in enumerate(bucket.list(path)):
        if i % 1000 == 0:
            so_far = time.time() - start
            logging.info("Processing number {} after {} seconds".format(i, so_far))

        if i % of == chunk:
            if key.name.strip('/') == path.strip('/'):
                # ignore the directory itself
                continue
            if os.path.splitext(key.name)[1] == '.json':
                if check(key.get_contents_as_string()):
                    groups = PATTERN.search(os.path.basename(key.name))
                    if groups:
                        print(groups.group(1))
            else:
                logging.error("Not a json file {}".format(key.name)) 
Example #20
Source File: maxswe.py    From FloodMapsWorkshop with Apache License 2.0 6 votes vote down vote up
def CopyToS3( s3_folder, file_list ):
	aws_access_key 			= os.environ.get('AWS_ACCESSKEYID')
	aws_secret_access_key 	= os.environ.get('AWS_SECRETACCESSKEY')
	
	conn 		= S3Connection(aws_access_key, aws_secret_access_key)
	
	mybucket 	= conn.get_bucket(config.BUCKET)
	k 			= Key(mybucket)

	for f in file_list:
		fname	= os.path.basename(f)
		k.key 	= os.path.join(s3_folder, fname)
	
		# Check if it already exists
		possible_key = mybucket.get_key(k.key)
	
		if force or not possible_key:
			if verbose:
				print "storing to s3:", mybucket, k.key
	
			k.set_contents_from_filename(f)
			mybucket.set_acl('public-read', k.key ) 
Example #21
Source File: s3.py    From FloodMapsWorkshop with Apache License 2.0 6 votes vote down vote up
def CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ):
	aws_access_key 			= os.environ.get('AWS_ACCESSKEYID')
	aws_secret_access_key 	= os.environ.get('AWS_SECRETACCESSKEY')
	
	conn 		= S3Connection(aws_access_key, aws_secret_access_key)
	
	mybucket 	= conn.get_bucket(s3_bucket)
	k 			= Key(mybucket)

	for f in file_list:
		fname	= os.path.basename(f)
		k.key 	= os.path.join(s3_folder, fname)
	
		# Check if it already exists
		possible_key = mybucket.get_key(k.key)
	
		if force or not possible_key:
			if verbose:
				print "storing to s3:", mybucket, k.key
	
			k.set_contents_from_filename(f)
			mybucket.set_acl('public-read', k.key ) 
Example #22
Source File: awss3.py    From depot with MIT License 6 votes vote down vote up
def __init__(self, access_key_id, secret_access_key, bucket=None, host=None,
                 policy=None, encrypt_key=False, prefix=''):
        policy = policy or CANNED_ACL_PUBLIC_READ
        assert policy in [CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE], (
            "Key policy must be %s or %s" % (CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE))
        self._policy = policy or CANNED_ACL_PUBLIC_READ
        self._encrypt_key = encrypt_key

        if bucket is None:
            bucket = 'filedepot-%s' % (access_key_id.lower(),)

        kw = {}
        if host is not None:
            kw['host'] = host
        self._conn = S3Connection(access_key_id, secret_access_key, **kw)
        bucket = self._conn.lookup(bucket) or self._conn.create_bucket(bucket)
        self._bucket_driver = BucketDriver(bucket, prefix) 
Example #23
Source File: flask_admin_s3_upload.py    From flask-admin-s3-upload with Apache License 2.0 6 votes vote down vote up
def _save_file(self, temp_file, filename):
        if not (self.storage_type and self.bucket_name):
            return self._save_file_local(temp_file, filename)

        if self.storage_type != 's3':
            raise ValueError(
                'Storage type "%s" is invalid, the only supported storage type'
                ' (apart from default local storage) is s3.'
                % self.storage_type)

        conn = S3Connection(self.access_key_id, self.access_key_secret)
        bucket = conn.get_bucket(self.bucket_name)

        path = self._get_s3_path(filename)
        k = bucket.new_key(path)
        k.set_contents_from_string(temp_file.getvalue())
        k.set_acl(self.acl)

        return filename 
Example #24
Source File: amazon_s3.py    From django-distill with MIT License 5 votes vote down vote up
def authenticate(self, calling_format=None):
        access_key_id = self.account_username()
        secret_access_key = self.options.get('SECRET_ACCESS_KEY', '')
        bucket = self.account_container()
        kwargs = {'calling_format': calling_format} if calling_format else {}
        try:
            self.d['connection'] = S3Connection(access_key_id,
                                                secret_access_key, **kwargs)
            self.d['bucket'] = self.d['connection'].get_bucket(bucket)
        except CertificateError as e:
            # work-around for upstream boto bug for buckets containing dots:
            # https://github.com/boto/boto/issues/2836
            if calling_format:
                raise e
            self.authenticate(calling_format=OrdinaryCallingFormat()) 
Example #25
Source File: compile_js.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def upload_compiled_js_to_s3(local_path, s3_path):
    with file(local_path, 'rb') as handle:
        raw_filedata = handle.read()

    filedata = gzip_string(raw_filedata)

    headers = {
        'Cache-Control': 'max-age=315360000, public',
        'Expires': 'Thu, 31 Dec 2037 23:55:55 GMT',
        'Content-Encoding': 'gzip',
        'Content-Type': 'text/javascript',
    }

    conn = S3Connection(*aws)
    bucket = conn.get_bucket(Config['compress_bucket'])

    key = Key(bucket)
    key.key = s3_path
    try:
        if key.exists():
            print "Skipping", s3_path, " already exists."
        else:
            print "Uploading %s (%s kb)" % (s3_path, len(filedata) // 1024)
            key.set_contents_from_string(filedata, headers=headers)
    except BotoServerError, bse:
        print bse 
Example #26
Source File: s3.py    From NEXT with Apache License 2.0 5 votes vote down vote up
def create_bucket(AWS_BUCKET_NAME, AWS_ID, AWS_KEY):
    """
    Creates a bucket for an S3 account
    """
    conn = S3Connection(AWS_ID, AWS_KEY)
    bucket = conn.create_bucket(AWS_BUCKET_NAME)
    return bucket 
Example #27
Source File: label_gen.py    From label_generator with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def run_s3(in_bucket_name, filename, out_bucket_name, path, ramtemp, debug_image):
    conn = S3Connection(config.access_key, config.secret_key, is_secure=False)
    in_bucket = conn.get_bucket(in_bucket_name)
    out_bucket = conn.get_bucket(out_bucket_name)

    dirpath = tempfile.mkdtemp(dir='/tmp/ram/' if ramtemp else None)
    logging.debug('Temp directory in {}'.format(dirpath))

    try:
        # copy into temp
        key = Key(in_bucket, filename)
        target = os.path.join(dirpath, os.path.basename(filename))
        key.get_contents_to_filename(target)

        # run algos
        files = run_local(target, dirpath, debug_image, True)

        # write files back to s3
        for f in files[0]:
            key = Key(out_bucket, os.path.join(path, 'json', os.path.basename(f)))
            key.set_contents_from_filename(f)
        for f in files[1]:
            key = Key(out_bucket, os.path.join(path, 'img', os.path.basename(f)))
            key.set_contents_from_filename(f)
        for f in files[2]:
            key = Key(out_bucket, os.path.join(
                path, 'text-masked', os.path.basename(f)))
            key.set_contents_from_filename(f)
    finally:
        shutil.rmtree(dirpath) 
Example #28
Source File: s3.py    From NEXT with Apache License 2.0 5 votes vote down vote up
def get_bucket(AWS_BUCKET_NAME, AWS_ID, AWS_KEY):
    """
    Creates a bucket for an S3 account
    """
    conn = S3Connection(AWS_ID, AWS_KEY)
    bucket = conn.get_bucket(AWS_BUCKET_NAME, validate=False)
    return bucket 
Example #29
Source File: s3.py    From arbalest with MIT License 5 votes vote down vote up
def __init__(self, aws_access_key_id, aws_secret_access_key, name,
                 s3_connection=None):
        self.name = name
        if s3_connection is None:
            self.connection = S3Connection(aws_access_key_id,
                                           aws_secret_access_key)
        else:
            self.connection = s3_connection

        self.bucket = self.connection.get_bucket(name) 
Example #30
Source File: load_to_mongo.py    From unshred-tag with MIT License 5 votes vote down vote up
def __init__(self, config):
        self.conn = S3Connection(config["S3_ACCESS_KEY_ID"],
                                 config["S3_SECRET_ACCESS_KEY"])

        self.dst_bucket_name = config["S3_DST_BUCKET_NAME"]

        self.src_bucket = self.conn.get_bucket(config["S3_SRC_BUCKET_NAME"])
        self.dst_bucket = self.conn.get_bucket(self.dst_bucket_name)