Python django.conf.settings.AWS_STORAGE_BUCKET_NAME Examples

The following are 8 code examples for showing how to use django.conf.settings.AWS_STORAGE_BUCKET_NAME(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module django.conf.settings , or try the search function .

Example 1
Project: open-humans   Author: OpenHumans   File: api_views.py    License: MIT License 6 votes vote down vote up
def post(self, request):
        super().post(request)

        key = get_upload_path(self.project.id_label, self.form.cleaned_data["filename"])

        datafile = ProjectDataFile(
            user=self.project_member.member.user,
            file=key,
            metadata=self.form.cleaned_data["metadata"],
            direct_sharing_project=self.project,
        )

        datafile.save()
        datafile.datatypes.set(self.form.cleaned_data["datatypes"])

        s3 = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)

        url = s3.generate_url(
            expires_in=settings.INCOMPLETE_FILE_EXPIRATION_HOURS * 60 * 60,
            method="PUT",
            bucket=settings.AWS_STORAGE_BUCKET_NAME,
            key=key,
        )

        return Response({"id": datafile.id, "url": url}, status=status.HTTP_201_CREATED) 
Example 2
Project: OasisPlatform   Author: OasisLMF   File: views.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get(self, request):
        server_version = ""
        server_config = dict()

        try:
            with open('VERSION', 'r') as ver:
                server_version = ver.read().strip()
        except FileNotFoundError:
            server_version = ""

        server_config['DEBUG'] = settings.DEBUG
        server_config['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
        server_config['TIME_ZONE'] = settings.TIME_ZONE

        # Backends  
        server_config['WSGI_APPLICATION'] = settings.WSGI_APPLICATION
        server_config['DEFAULT_FILE_STORAGE'] = settings.DEFAULT_FILE_STORAGE
        server_config['DB_ENGINE'] = settings.DB_ENGINE

        # Storage 
        server_config['STORAGE_TYPE'] = settings.STORAGE_TYPE
        server_config['MEDIA_ROOT'] = settings.MEDIA_ROOT
        server_config['AWS_STORAGE_BUCKET_NAME'] = settings.AWS_STORAGE_BUCKET_NAME
        server_config['AWS_LOCATION'] = settings.AWS_LOCATION
        server_config['AWS_SHARED_BUCKET'] = settings.AWS_SHARED_BUCKET
        server_config['AWS_QUERYSTRING_EXPIRE'] = settings.AWS_QUERYSTRING_EXPIRE
        server_config['AWS_QUERYSTRING_AUTH'] = settings.AWS_QUERYSTRING_AUTH

        # Token Conf
        server_config['ROTATE_REFRESH_TOKEN'] = settings.SIMPLE_JWT['ROTATE_REFRESH_TOKENS']
        server_config['ACCESS_TOKEN_LIFETIME'] = settings.SIMPLE_JWT['ACCESS_TOKEN_LIFETIME']
        server_config['REFRESH_TOKEN_LIFETIME'] = settings.SIMPLE_JWT['REFRESH_TOKEN_LIFETIME']

        return Response({
            'version': server_version,
            'config': server_config
        }) 
Example 3
Project: OasisPlatform   Author: OasisLMF   File: tasks.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def log_worker_monitor(sender, **k):
    logger.info('DEBUG: {}'.format(settings.DEBUG))    
    logger.info('DB_ENGINE: {}'.format(settings.DB_ENGINE))    
    logger.info('STORAGE_TYPE: {}'.format(settings.STORAGE_TYPE))    
    logger.info('DEFAULT_FILE_STORAGE: {}'.format(settings.DEFAULT_FILE_STORAGE))    
    logger.info('MEDIA_ROOT: {}'.format(settings.MEDIA_ROOT))    
    logger.info('AWS_STORAGE_BUCKET_NAME: {}'.format(settings.AWS_STORAGE_BUCKET_NAME))    
    logger.info('AWS_LOCATION: {}'.format(settings.AWS_LOCATION))    
    logger.info('AWS_S3_REGION_NAME: {}'.format(settings.AWS_S3_REGION_NAME))    
    logger.info('AWS_QUERYSTRING_AUTH: {}'.format(settings.AWS_QUERYSTRING_AUTH))    
    logger.info('AWS_QUERYSTRING_EXPIRE: {}'.format(settings.AWS_QUERYSTRING_EXPIRE))    
    logger.info('AWS_SHARED_BUCKET: {}'.format(settings.AWS_SHARED_BUCKET))    
    logger.info('AWS_IS_GZIPPED: {}'.format(settings.AWS_IS_GZIPPED)) 
Example 4
Project: opentaps_seas   Author: opentaps   File: utils.py    License: GNU Lesser General Public License v3.0 5 votes vote down vote up
def check_boto_config():
    if not settings.AWS_ACCESS_KEY_ID or not settings.AWS_SECRET_ACCESS_KEY or not settings.AWS_STORAGE_BUCKET_NAME:
        raise Exception('''AWS configuration is required, check your settings for
                           AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME''') 
Example 5
Project: opentaps_seas   Author: opentaps   File: checks.py    License: GNU Lesser General Public License v3.0 5 votes vote down vote up
def check_aws_config(app_configs, **kwargs):
    errors = []
    if not settings.AWS_ACCESS_KEY_ID or not settings.AWS_SECRET_ACCESS_KEY or not settings.AWS_STORAGE_BUCKET_NAME:
        errors.append(
            Warning(
                'Missing AWS configuration, file storage will be unavailable',
                hint='''Make sure you set AWS_ACCESS_KEY_ID,
                        AWS_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME in secrets.json''',
                obj=settings,
                id='opentaps_seas.W002',
            )
        )
    return errors 
Example 6
Project: education-backend   Author: f213   File: s3.py    License: MIT License 5 votes vote down vote up
def get_presigned_url(self, object_id: str, expires: int):
        return self.client.generate_presigned_url(
            ClientMethod='get_object',
            Params={
                'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
                'Key': object_id,
                'ResponseContentDisposition': 'attachment',
            },
            ExpiresIn=expires,
        ) 
Example 7
Project: arxiv-vanity   Author: arxiv-vanity   File: renderer.py    License: Apache License 2.0 4 votes vote down vote up
def render_paper(
    source, output_path, webhook_url=None, output_bucket=None, extra_run_kwargs=None
):
    """
    Render a source directory using Engrafo.
    """
    client = create_client()

    renders_running = client.info()["ContainersRunning"]
    if renders_running >= settings.PAPERS_MAX_RENDERS_RUNNING:
        raise TooManyRendersRunningError(
            f"{renders_running} renders running, which is more than PAPERS_MAX_RENDERS_RUNNING"
        )

    labels = {}
    environment = {
        "BIBLIO_GLUTTON_URL": settings.BIBLIO_GLUTTON_URL,
        "GROBID_URL": settings.GROBID_URL,
        "SENTRY_DSN": settings.ENGRAFO_SENTRY_DSN,
    }
    volumes = {}
    network = None

    # Production
    if settings.MEDIA_USE_S3:
        if output_bucket is None:
            output_bucket = settings.AWS_STORAGE_BUCKET_NAME
        source = f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{source}"
        output_path = f"s3://{output_bucket}/{output_path}"
        environment["AWS_ACCESS_KEY_ID"] = settings.AWS_ACCESS_KEY_ID
        environment["AWS_SECRET_ACCESS_KEY"] = settings.AWS_SECRET_ACCESS_KEY
        environment["AWS_S3_REGION_NAME"] = settings.AWS_S3_REGION_NAME
    # Development
    else:
        # HACK(bfirsh): MEDIA_ROOT is an absolute path to something on
        # the host machine. We need to make this relative to a mount inside the
        # Docker container.
        docker_media_root = os.path.join("/mnt", os.path.basename(settings.MEDIA_ROOT))
        source = os.path.join(docker_media_root, source)
        output_path = os.path.join(docker_media_root, output_path)
        # HOST_PWD is set in docker-compose.yml
        volumes[os.environ["HOST_PWD"]] = {"bind": "/mnt", "mode": "rw"}

    # If running on the local machine, we need to add the container to the same network
    # as the web app so it can call the callback
    if os.environ.get("DOCKER_HOST") == "unix:///var/run/docker.sock":
        network = "arxiv-vanity_default"

    if extra_run_kwargs is None:
        extra_run_kwargs = {}
    return client.containers.run(
        settings.ENGRAFO_IMAGE,
        "sh -c "
        + shlex.quote("; ".join(make_command(source, output_path, webhook_url))),
        volumes=volumes,
        environment=environment,
        labels=labels,
        network=network,
        detach=True,
        **extra_run_kwargs,
    ) 
Example 8
Project: intake   Author: codeforamerica   File: download_data.py    License: MIT License 4 votes vote down vote up
def handle(self, *args, **kwargs):
        """Downloads a single full-database fixture into db and syncs s3
        by ./manage.py download_data

        1. sync replica from origin
        2. pull fixture from bucket to local tempfile
        3. drops all tables in the public schema of the existing database
        4. load local fixture tempfile

        Relevant settings:
            ORIGIN_MEDIA_BUCKET_FOR_SYNC - bucket to pull from for sync
            AWS_STORAGE_BUCKET_NAME - bucket to overwrite with new files
            SYNC_BUCKET - bucket to pull fixture from
            SYNC_FIXTURE_LOCATION - filename used for fixture

        Assumes that a db fixture has already been dumped to SYNC_BUCKET
        by ./manage.py upload_data
        Relevant settings:
        """
        if not settings.ORIGIN_MEDIA_BUCKET_FOR_SYNC:
            raise Exception(
                "Warning: ORIGIN_MEDIA_BUCKET_FOR_SYNC not set."
                "Its likely this is production. This Error has protected you.")
        sync_s3 = [
            settings.AWS_CLI_LOCATION,
            's3', 'sync',
            's3://%s' % settings.ORIGIN_MEDIA_BUCKET_FOR_SYNC,  # sync from
            's3://%s' % settings.AWS_STORAGE_BUCKET_NAME,  # sync to
        ]  # syncs replica from origin
        aws_open(sync_s3)

        download_s3 = [
            settings.AWS_CLI_LOCATION,
            's3', 'mv',
            's3://%s/%s' % (
                settings.SYNC_BUCKET,  # bucket to pull from
                ntpath.basename(settings.SYNC_FIXTURE_LOCATION),  # filename
            ),
            settings.SYNC_FIXTURE_LOCATION,  # local temp filename
        ]  # command to pull down fixture to local file, with aws env vars
        aws_open(download_s3)
        table_names = run_sql(
            "select tablename from pg_tables where schemaname = 'public'")
        for table_name in table_names:
            print(table_name)
            drop_table(table_name[0])
        pg_load(settings.SYNC_FIXTURE_LOCATION)