Python boto.exception() Examples

The following are 30 code examples of boto.exception(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module boto , or try the search function .
Example #1
Source File: test_s3.py    From drf-to-s3 with MIT License 6 votes vote down vote up
def setUp(self):
        import boto
        from boto.exception import NoAuthHandlerFound
        from boto.s3.key import Key

        keys = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']
        try:
            for k in keys:
                os.environ[k]
            self.bucket_name = os.environ.get('AWS_TEST_BUCKET', 'drf-to-s3-test')
        except KeyError:
            self.skipTest('To test s3, set %s in .env' % ' and '.join(keys))

        conn = boto.connect_s3()
        bucket = conn.get_bucket(self.bucket_name)
        k = Key(bucket)
        k.key = "%s%s.txt" % (str(uuid.uuid4()), self.prefix)
        k.set_contents_from_string('This is a test of S3')

        self.existing_key = k.key
        self.existing_key_etag = k.etag
        self.bucket = bucket

        self.nonexisting_key = "%s%s.txt" % (str(uuid.uuid4()), self.prefix)
        self.new_key = None 
Example #2
Source File: __init__.py    From brix with Apache License 2.0 6 votes vote down vote up
def validate(self, quiet=False, full=False):
        error = False
        for name, data in self.templates.iteritems():
            if 'error' in data:
                error = True
                print("{} error: {}".format(name, data['error'][1]))
                continue
            if full:
                # Run server-based validation
                # Trying to use template_body fails randomly, probably due to
                # length limits.
                bucket = self.s3.get_bucket('balanced-cfn-us-east-1')
                key = bucket.get_key('validation_tmp', validate=False)
                key.set_contents_from_string(data['json'])
                try:
                    self.cfn.validate_template(template_url='https://balanced-cfn-us-east-1.s3.amazonaws.com/validation_tmp')
                except boto.exception.BotoServerError, e:
                    if e.status != 400:
                        raise
                    error = True
                    print("{} error: {}".format(name, e.message))
                    continue
                finally: 
Example #3
Source File: awsqueryservice.py    From canvas with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, **args):
        self.args = args
        self.check_for_credential_file()
        self.check_for_env_url()
        if 'host' not in self.args:
            if self.Regions:
                region_name = self.args.get('region_name',
                                            self.Regions[0]['name'])
                for region in self.Regions:
                    if region['name'] == region_name:
                        self.args['host'] = region['endpoint']
        if 'path' not in self.args:
            self.args['path'] = self.Path
        if 'port' not in self.args:
            self.args['port'] = self.Port
        try:
            boto.connection.AWSQueryConnection.__init__(self, **self.args)
            self.aws_response = None
        except boto.exception.NoAuthHandlerFound:
            raise NoCredentialsError() 
Example #4
Source File: document.py    From aws-extender with MIT License 6 votes vote down vote up
def _check_num_ops(self, type_, response_num):
        """Raise exception if number of ops in response doesn't match commit

        :type type_: str
        :param type_: Type of commit operation: 'add' or 'delete'

        :type response_num: int
        :param response_num: Number of adds or deletes in the response.

        :raises: :class:`boto.cloudsearch.document.CommitMismatchError`
        """
        commit_num = len([d for d in self.doc_service.documents_batch
            if d['type'] == type_])

        if response_num != commit_num:
            raise CommitMismatchError(
                'Incorrect number of {0}s returned. Commit: {1} Response: {2}'\
                .format(type_, commit_num, response_num)) 
Example #5
Source File: bucket.py    From aws-extender with MIT License 6 votes vote down vote up
def set_xml_logging(self, logging_str, headers=None):
        """
        Set logging on a bucket directly to the given xml string.

        :type logging_str: unicode string
        :param logging_str: The XML for the bucketloggingstatus which
            will be set.  The string will be converted to utf-8 before
            it is sent.  Usually, you will obtain this XML from the
            BucketLogging object.

        :rtype: bool
        :return: True if ok or raises an exception.
        """
        body = logging_str
        if not isinstance(body, bytes):
            body = body.encode('utf-8')
        response = self.connection.make_request('PUT', self.name, data=body,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body) 
Example #6
Source File: awsqueryservice.py    From aws-extender with MIT License 6 votes vote down vote up
def __init__(self, **args):
        self.args = args
        self.check_for_credential_file()
        self.check_for_env_url()
        if 'host' not in self.args:
            if self.Regions:
                region_name = self.args.get('region_name',
                                            self.Regions[0]['name'])
                for region in self.Regions:
                    if region['name'] == region_name:
                        self.args['host'] = region['endpoint']
        if 'path' not in self.args:
            self.args['path'] = self.Path
        if 'port' not in self.args:
            self.args['port'] = self.Port
        try:
            super(AWSQueryService, self).__init__(**self.args)
            self.aws_response = None
        except boto.exception.NoAuthHandlerFound:
            raise NoCredentialsError() 
Example #7
Source File: connection.py    From aws-extender with MIT License 6 votes vote down vote up
def get_domain_and_name(self, domain_or_name):
        """
        Given a ``str`` or :class:`boto.sdb.domain.Domain`, return a
        ``tuple`` with the following members (in order):

            * In instance of :class:`boto.sdb.domain.Domain` for the requested
              domain
            * The domain's name as a ``str``

        :type domain_or_name: ``str`` or :class:`boto.sdb.domain.Domain`
        :param domain_or_name: The domain or domain name to get the domain
            and name for.

        :raises: :class:`boto.exception.SDBResponseError` when an invalid
            domain name is specified.

        :rtype: tuple
        :return: A ``tuple`` with contents outlined as per above.
        """
        if (isinstance(domain_or_name, Domain)):
            return (domain_or_name, domain_or_name.name)
        else:
            return (self.get_domain(domain_or_name), domain_or_name) 
Example #8
Source File: connection.py    From aws-extender with MIT License 6 votes vote down vote up
def get_domain(self, domain_name, validate=True):
        """
        Retrieves a :py:class:`boto.sdb.domain.Domain` object whose name
        matches ``domain_name``.

        :param str domain_name: The name of the domain to retrieve
        :keyword bool validate: When ``True``, check to see if the domain
            actually exists. If ``False``, blindly return a
            :py:class:`Domain <boto.sdb.domain.Domain>` object with the
            specified name set.

        :raises:
            :py:class:`boto.exception.SDBResponseError` if ``validate`` is
            ``True`` and no match could be found.

        :rtype: :py:class:`boto.sdb.domain.Domain`
        :return: The requested domain
        """
        domain = Domain(self, domain_name)
        if validate:
            self.select(domain, """select * from `%s` limit 1""" % domain_name)
        return domain 
Example #9
Source File: cf.py    From stacks with MIT License 6 votes vote down vote up
def upload_template(config, tpl, stack_name):
    """Upload a template to S3 bucket and returns S3 key url"""
    bn = config.get('templates_bucket_name', '{}-stacks-{}'.format(config['env'], config['region']))

    try:
        b = config['s3_conn'].get_bucket(bn)
    except boto.exception.S3ResponseError as err:
        if err.code == 'NoSuchBucket':
            print('Bucket {} does not exist.'.format(bn))
        else:
            print(err)
        sys.exit(1)

    h = _calc_md5(tpl)
    k = boto.s3.key.Key(b)
    k.key = '{}/{}/{}'.format(config['env'], stack_name, h)
    k.set_contents_from_string(tpl)
    url = k.generate_url(expires_in=30)
    return url 
Example #10
Source File: connection.py    From aws-extender with MIT License 6 votes vote down vote up
def lookup(self, domain_name, validate=True):
        """
        Lookup an existing SimpleDB domain. This differs from
        :py:meth:`get_domain` in that ``None`` is returned if ``validate`` is
        ``True`` and no match was found (instead of raising an exception).

        :param str domain_name: The name of the domain to retrieve

        :param bool validate: If ``True``, a ``None`` value will be returned
            if the specified domain can't be found. If ``False``, a
            :py:class:`Domain <boto.sdb.domain.Domain>` object will be dumbly
            returned, regardless of whether it actually exists.

        :rtype: :class:`boto.sdb.domain.Domain` object or ``None``
        :return: The Domain object or ``None`` if the domain does not exist.
        """
        try:
            domain = self.get_domain(domain_name, validate)
        except:
            domain = None
        return domain 
Example #11
Source File: test_bypass_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_should_meet_conditions(self):
        config = create_s3_bypass_simple_config()
        # shouldn't raise any exception
        S3Bypass.meets_conditions(config) 
Example #12
Source File: __init__.py    From brix with Apache License 2.0 5 votes vote down vote up
def update(self, stack_name, template_name=None, params={}):
        try:
            stack = self.cfn.describe_stacks(stack_name)[0]
            operation = 'update_stack'
            kwargs = {}
            if stack.parameters:
                existing_params = {p.key: p.value for p in stack.parameters}
                existing_params.update(params)
                params = existing_params
            # if not template_name:
            #     template_name = stack.tags.get('TemplateName')
            print('Updating stack {} in {}'.format(stack_name, self.region))
        except boto.exception.BotoServerError:
            operation = 'create_stack'
            kwargs = {'disable_rollback': True}#, 'tags': {'TemplateName': template_name}}
            print('Creating stack {} in {}'.format(stack_name, self.region))
        if not template_name:
            raise ValueError('Template name for stack {} is required'.format(stack_name))
        print()
        data = self._get_template(template_name)
        getattr(self.cfn, operation)(
            stack_name=stack_name,
            template_url='https://balanced-cfn-{}.s3.amazonaws.com/{}'.format(self.region, data['s3_key']),
            capabilities=['CAPABILITY_IAM'],
            parameters=params.items(),
            **kwargs) 
Example #13
Source File: s3.py    From drf-to-s3 with MIT License 5 votes vote down vote up
def copy(src_bucket, src_key, dst_bucket, dst_key, src_etag=None, validate_src_etag=False):
    '''
    Copy a key from one bucket to another.

    If validate_etag is True, the ETag must match. Raises
    ObjectNotFoundException if the key does not exist,
    or the ETag doesn't match.

    We return the same error in either case, since a mismatched
    ETag might mean the user wasn't the last to upload the object.
    If the bucket is private they may not even know it exists.
    By returning the same error, we avoid giving out extra
    information.

    '''
    import boto
    from boto.exception import S3ResponseError
    conn = boto.connect_s3()
    bucket = conn.get_bucket(dst_bucket)
    if validate_src_etag:
        headers = {
            'x-amz-copy-source-if-match': src_etag,
        }
    else:
        headers = {}
    try:
        bucket.copy_key(
            new_key_name=dst_key,
            src_bucket_name=src_bucket,
            src_key_name=src_key,
            headers=headers
        )
    except S3ResponseError as e:
        if e.status in [status.HTTP_404_NOT_FOUND, status.HTTP_412_PRECONDITION_FAILED]:
            raise ObjectNotFoundException()
        else:
            raise 
Example #14
Source File: pack.py    From bcbio-nextgen-vm with MIT License 5 votes vote down vote up
def _get_s3_bucket(conn, bucket_name):
    from boto.exception import S3ResponseError
    try:
        bucket = conn.get_bucket(bucket_name)
    except S3ResponseError as e:
        if e.status == 404:
            bucket = conn.create_bucket(bucket_name)
        else:
            raise
    return bucket 
Example #15
Source File: awsqueryservice.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self):
        s = 'Unable to find credentials'
        boto.exception.BotoClientError.__init__(self, s) 
Example #16
Source File: awsqueryrequest.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def process_args(self, **args):
        """
        Responsible for walking through Params defined for the request and:

        * Matching them with keyword parameters passed to the request
          constructor or via the command line.
        * Checking to see if all required parameters have been specified
          and raising an exception, if not.
        * Encoding each value into the set of request parameters that will
          be sent in the request to the AWS service.
        """
        self.args.update(args)
        self.connection_args = copy.copy(self.args)
        if 'debug' in self.args and self.args['debug'] >= 2:
            boto.set_stream_logger(self.name())
        required = [p.name for p in self.Params+self.Args if not p.optional]
        for param in self.Params+self.Args:
            if param.long_name:
                python_name = param.long_name.replace('-', '_')
            else:
                python_name = boto.utils.pythonize_name(param.name, '_')
            value = None
            if python_name in self.args:
                value = self.args[python_name]
            if value is None:
                value = param.default
            if value is not None:
                if param.name in required:
                    required.remove(param.name)
                if param.request_param:
                    if param.encoder:
                        param.encoder(param, self.request_params, value)
                    else:
                        Encoder.encode(param, self.request_params, value)
            if python_name in self.args:
                del self.connection_args[python_name]
        if required:
            raise RequiredParamError(required)
        boto.log.debug('request_params: %s' % self.request_params)
        self.process_markers(self.Response) 
Example #17
Source File: awsqueryrequest.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, error_msg):
        s = 'Error encoding value (%s)' % error_msg
        boto.exception.BotoClientError.__init__(self, s) 
Example #18
Source File: awsqueryrequest.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, required):
        self.required = required
        s = 'Required parameters are missing: %s' % self.required
        boto.exception.BotoClientError.__init__(self, s) 
Example #19
Source File: sdbmanager.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def load_object(self, obj):
        if not obj._loaded:
            a = self.domain.get_attributes(obj.id,consistent_read=self.consistent)
            if a.has_key('__type__'):
                for prop in obj.properties(hidden=False):
                    if a.has_key(prop.name):
                        value = self.decode_value(prop, a[prop.name])
                        value = prop.make_value_from_datastore(value)
                        try:
                            setattr(obj, prop.name, value)
                        except Exception, e:
                            boto.log.exception(e)
            obj._loaded = True 
Example #20
Source File: sdbmanager.py    From canvas with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def encode_string(self, value):
        """Convert ASCII, Latin-1 or UTF-8 to pure Unicode"""
        if not isinstance(value, str): return value
        try:
            return unicode(value, 'utf-8')
        except: # really, this should throw an exception.
                # in the interest of not breaking current
                # systems, however:
            arr = []
            for ch in value:
                arr.append(unichr(ord(ch)))
            return u"".join(arr) 
Example #21
Source File: awslauncher.py    From ferry with Apache License 2.0 5 votes vote down vote up
def _delete_stack(self, cluster_uuid, service_uuid):
        # Find the relevant stack information. 
        ips = []
        stacks = self.apps.find( { "_cluster_uuid" : cluster_uuid,
                                   "_service_uuid" : service_uuid } )

        logging.warning("Deleting cluster %s" % str(cluster_uuid))
        for stack in stacks:
            for s in stack.values():
                if type(s) is dict and s["type"] == "AWS::CloudFormation::Stack":
                    stack_id = s["id"]

                    # Now try to delete the stack. Wrap this in a try-block so that
                    # we don't completely fail even if the stack doesn't exist. 
                    try:
                        logging.warning("Deleting stack %s" % str(stack_id))
                        self.cf.delete_stack(stack_id)
                    except boto.exception.BotoServerError as e:
                        logging.error(str(e))
                    except:
                        # We could not delete the stack. This probably means
                        # that the AWS service is temporarily down. 
                        logging.error("could not delete Cloudformation stack")

        self.apps.remove( { "_cluster_uuid" : cluster_uuid,
                            "_service_uuid" : service_uuid } ) 
Example #22
Source File: awslauncher.py    From ferry with Apache License 2.0 5 votes vote down vote up
def _launch_cloudformation(self, stack_name, cloud_plan, stack_desc):
        """
        Launch the cluster plan.  
        """
        try:
            # Try to create the application stack. 
            stack_id = self.cf.create_stack(stack_name, template_body=json.dumps((cloud_plan)))
        except boto.exception.BotoServerError as e:
            logging.error(str(e))
            return None
        except:
            # We could not create the stack. This probably means
            # that the AWS service is temporarily down. 
            logging.error("could not create Cloudformation stack")
            return None

        # Now wait for the stack to be in a completed state
        # before returning. That way we'll know if the stack creation
        # has failed or not. 
        if not self._wait_for_stack(stack_id):
            logging.warning("Heat plan %s CREATE_FAILED" % stack_id)
            return None

        # Now find the physical IDs of all the resources. 
        resources = self._collect_resources(stack_id)
        for r in resources:
            if r.logical_resource_id in stack_desc:
                stack_desc[r.logical_resource_id]["id"] = r.physical_resource_id

        # Record the Stack ID in the description so that
        # we can refer back to it later. 
        stack_desc[stack_name] = { "id" : stack_id,
                                   "type": "AWS::CloudFormation::Stack" }

        return stack_desc 
Example #23
Source File: test_storage.py    From simpleflow with MIT License 5 votes vote down vote up
def test_sanitize_bucket_and_host(self):
        self.create()

        # bucket where "get_location" works: return bucket+region
        self.assertEqual(
            storage.sanitize_bucket_and_host(self.bucket),
            (self.bucket, 'us-east-1'))

        # bucket where "get_location" doesn't work: return bucket + default region setting
        def _access_denied():
            from boto.exception import S3ResponseError
            err = S3ResponseError("reason", "resp")
            err.error_code = "AccessDenied"
            raise err

        with patch("boto.s3.bucket.Bucket.get_location", side_effect=_access_denied):
            with patch("simpleflow.settings.SIMPLEFLOW_S3_HOST") as default:
                self.assertEqual(
                    storage.sanitize_bucket_and_host(self.bucket),
                    (self.bucket, default))

        # bucket where we provided a host/bucket: return bucket+host
        self.assertEqual(
            storage.sanitize_bucket_and_host('s3.amazonaws.com/{}'.format(self.bucket)),
            (self.bucket, 's3.amazonaws.com'))

        # bucket trivially invalid: raise
        with self.assertRaises(ValueError):
            storage.sanitize_bucket_and_host('any/mybucket')

        # bucket with too many "/": raise
        with self.assertRaises(ValueError):
            storage.sanitize_bucket_and_host('s3-eu-west-1.amazonaws.com/mybucket/subpath') 
Example #24
Source File: document.py    From aws-extender with MIT License 5 votes vote down vote up
def __init__(self, response, doc_service, sdf):
        self.response = response
        self.doc_service = doc_service
        self.sdf = sdf

        _body = response.content.decode('utf-8')

        try:
            self.content = json.loads(_body)
        except:
            boto.log.error('Error indexing documents.\nResponse Content:\n{0}\n\n'
                'SDF:\n{1}'.format(_body, self.sdf))
            raise boto.exception.BotoServerError(self.response.status_code, '',
                body=_body)

        self.status = self.content['status']
        if self.status == 'error':
            self.errors = [e.get('message') for e in self.content.get('errors',
                [])]
            for e in self.errors:
                if "Illegal Unicode character" in e:
                    raise EncodingError("Illegal Unicode character in document")
                elif e == "The Content-Length is too long":
                    raise ContentTooLongError("Content was too long")
            if 'adds' not in self.content or 'deletes' not in self.content:
                raise SearchServiceException("Error indexing documents"
                    " => %s" % self.content.get('message', ''))
        else:
            self.errors = []

        self.adds = self.content['adds']
        self.deletes = self.content['deletes']
        self._check_num_ops('add', self.adds)
        self._check_num_ops('delete', self.deletes) 
Example #25
Source File: bucket.py    From aws-extender with MIT License 5 votes vote down vote up
def disable_logging(self, headers=None):
        """
        Disable logging on a bucket.

        :rtype: bool
        :return: True if ok or raises an exception.
        """
        blogging = BucketLogging()
        return self.set_xml_logging(blogging.to_xml(), headers=headers) 
Example #26
Source File: layer1.py    From aws-extender with MIT License 5 votes vote down vote up
def delete_stream(self, stream_name):
        """
        Deletes a stream and all its shards and data. You must shut
        down any applications that are operating on the stream before
        you delete the stream. If an application attempts to operate
        on a deleted stream, it will receive the exception
        `ResourceNotFoundException`.

        If the stream is in the `ACTIVE` state, you can delete it.
        After a `DeleteStream` request, the specified stream is in the
        `DELETING` state until Amazon Kinesis completes the deletion.

        **Note:** Amazon Kinesis might continue to accept data read
        and write operations, such as PutRecord, PutRecords, and
        GetRecords, on a stream in the `DELETING` state until the
        stream deletion is complete.

        When you delete a stream, any shards in that stream are also
        deleted, and any tags are dissociated from the stream.

        You can use the DescribeStream operation to check the state of
        the stream, which is returned in `StreamStatus`.

        `DeleteStream` has a limit of 5 transactions per second per
        account.

        :type stream_name: string
        :param stream_name: The name of the stream to delete.

        """
        params = {'StreamName': stream_name, }
        return self.make_request(action='DeleteStream',
                                 body=json.dumps(params)) 
Example #27
Source File: layer1.py    From aws-extender with MIT License 5 votes vote down vote up
def make_request(self, action, body='', object_hook=None):
        """
        :raises: ``SWFResponseError`` if response status is not 200.
        """
        headers = {'X-Amz-Target': '%s.%s' % (self.ServiceName, action),
                   'Host': self.region.endpoint,
                   'Content-Type': 'application/json; charset=UTF-8',
                   'Content-Encoding': 'amz-1.0',
                   'Content-Length': str(len(body))}
        http_request = self.build_base_http_request('POST', '/', '/',
                                                    {}, headers, body, None)
        response = self._mexe(http_request, sender=None,
                              override_num_retries=10)
        response_body = response.read().decode('utf-8')
        boto.log.debug(response_body)
        if response.status == 200:
            if response_body:
                return json.loads(response_body, object_hook=object_hook)
            else:
                return None
        else:
            json_body = json.loads(response_body)
            fault_name = json_body.get('__type', None)
            # Certain faults get mapped to more specific exception classes.
            excp_cls = self._fault_excp.get(fault_name, self.ResponseError)
            raise excp_cls(response.status, response.reason, body=json_body)

    # Actions related to Activities 
Example #28
Source File: document.py    From aws-extender with MIT License 5 votes vote down vote up
def _check_num_ops(self, type_, response_num):
        """Raise exception if number of ops in response doesn't match commit

        :type type_: str
        :param type_: Type of commit operation: 'add' or 'delete'

        :type response_num: int
        :param response_num: Number of adds or deletes in the response.

        :raises: :class:`boto.cloudsearch2.document.CommitMismatchError`
        """
        commit_num = len([d for d in self.doc_service.documents_batch
                          if d['type'] == type_])

        if response_num != commit_num:
            if self.signed_request:
                boto.log.debug(self.response)
            else:
                boto.log.debug(self.response.content)
            # There will always be a commit mismatch error if there is any
            # errors on cloudsearch. self.errors gets lost when this
            # CommitMismatchError is raised. Whoever is using boto has no idea
            # why their commit failed. They can't even notify the user of the
            # cause by parsing the error messages from amazon. So let's
            # attach the self.errors to the exceptions if we already spent
            # time and effort collecting them out of the response.
            exc = CommitMismatchError(
                'Incorrect number of {0}s returned. Commit: {1} Response: {2}'
                .format(type_, commit_num, response_num)
            )
            exc.errors = self.errors
            raise exc 
Example #29
Source File: document.py    From aws-extender with MIT License 5 votes vote down vote up
def __init__(self, response, doc_service, sdf, signed_request=False):
        self.response = response
        self.doc_service = doc_service
        self.sdf = sdf
        self.signed_request = signed_request

        if self.signed_request:
            self.content = response
        else:
            _body = response.content.decode('utf-8')

            try:
                self.content = json.loads(_body)
            except:
                boto.log.error('Error indexing documents.\nResponse Content:\n{0}'
                               '\n\nSDF:\n{1}'.format(_body, self.sdf))
                raise boto.exception.BotoServerError(self.response.status_code, '',
                                                     body=_body)

        self.status = self.content['status']
        if self.status == 'error':
            self.errors = [e.get('message') for e in self.content.get('errors',
                                                                      [])]
            for e in self.errors:
                if "Illegal Unicode character" in e:
                    raise EncodingError("Illegal Unicode character in document")
                elif e == "The Content-Length is too long":
                    raise ContentTooLongError("Content was too long")
        else:
            self.errors = []

        self.adds = self.content['adds']
        self.deletes = self.content['deletes']
        self._check_num_ops('add', self.adds)
        self._check_num_ops('delete', self.deletes) 
Example #30
Source File: sdbmanager.py    From aws-extender with MIT License 5 votes vote down vote up
def encode_string(self, value):
        """Convert ASCII, Latin-1 or UTF-8 to pure Unicode"""
        if not isinstance(value, str):
            return value
        try:
            return six.text_type(value, 'utf-8')
        except:
            # really, this should throw an exception.
            # in the interest of not breaking current
            # systems, however:
            arr = []
            for ch in value:
                arr.append(six.unichr(ord(ch)))
            return u"".join(arr)