Python pymongo.errors() Examples

The following are 30 code examples for showing how to use pymongo.errors(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module pymongo , or try the search function .

Example 1
Project: recruit   Author: Frank-qlu   File: helpers.py    License: Apache License 2.0 6 votes vote down vote up
def _check_write_command_response(results):
    """Backward compatibility helper for write command error handling.
    """
    errors = [res for res in results
              if "writeErrors" in res[1] or "writeConcernError" in res[1]]
    if errors:
        # If multiple batches had errors
        # raise from the last batch.
        offset, result = errors[-1]
        # Prefer write errors over write concern errors
        write_errors = result.get("writeErrors")
        if write_errors:
            # If the last batch had multiple errors only report
            # the last error to emulate continue_on_error.
            error = write_errors[-1]
            error["index"] += offset
            if error.get("code") == 11000:
                raise DuplicateKeyError(error.get("errmsg"), 11000, error)
        else:
            error = result["writeConcernError"]
            if "errInfo" in error and error["errInfo"].get('wtimeout'):
                # Make sure we raise WTimeoutError
                raise WTimeoutError(error.get("errmsg"),
                                    error.get("code"), error)
        raise OperationFailure(error.get("errmsg"), error.get("code"), error) 
Example 2
Project: ChatterBot   Author: gunthercox   File: mongodb.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)
        from pymongo import MongoClient
        from pymongo.errors import OperationFailure

        self.database_uri = kwargs.get(
            'database_uri', 'mongodb://localhost:27017/chatterbot-database'
        )

        # Use the default host and port
        self.client = MongoClient(self.database_uri)

        # Increase the sort buffer to 42M if possible
        try:
            self.client.admin.command({'setParameter': 1, 'internalQueryExecMaxBlockingSortBytes': 44040192})
        except OperationFailure:
            pass

        # Specify the name of the database
        self.database = self.client.get_database()

        # The mongo collection of statement documents
        self.statements = self.database['statements'] 
Example 3
Project: pytest-plugins   Author: man-group   File: mongo.py    License: MIT License 6 votes vote down vote up
def check_server_up(self):
        """Test connection to the server."""
        import pymongo
        from pymongo.errors import AutoReconnect, ConnectionFailure

        # Hostname must exist before continuing
        # Some server class (e.g. Docker) will only allocate an IP after the
        # container has started.
        if not self.hostname:
            return False

        log.info("Connecting to Mongo at %s:%s" % (self.hostname, self.port))
        try:
            self.api = pymongo.MongoClient(self.hostname, self.port,
                                           serverselectiontimeoutms=200)
            self.api.list_database_names()
            # Configure the client with default timeouts in case the server goes slow
            self.api = pymongo.MongoClient(self.hostname, self.port)
            return True
        except (AutoReconnect, ConnectionFailure) as e:
            pass
        return False 
Example 4
Project: ceph-lcm   Author: Mirantis   File: task.py    License: Apache License 2.0 6 votes vote down vote up
def create(self):
        """Creates model in database."""

        state = self.get_state()

        state.pop("_id", None)
        state["time"]["created"] = timeutils.current_unix_timestamp()
        state["time"]["updated"] = state["time"]["created"]
        state["update_marker"] = self.new_update_marker()

        collection = self.collection()
        insert_method = retryutils.mongo_retry()(collection.insert_one)
        find_method = retryutils.mongo_retry()(collection.find_one)

        try:
            document = insert_method(state)
        except pymongo.errors.DuplicateKeyError as exc:
            raise exceptions.UniqueConstraintViolationError from exc

        document = find_method({"_id": document.inserted_id})
        self.set_state(document)

        return self 
Example 5
Project: arctic   Author: man-group   File: arctic.py    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def library_exists(self, library):
        """
        Check whether a given library exists.

        Parameters
        ----------
        library : `str`
            The name of the library. e.g. 'library' or 'user.library'

        Returns
        -------
        `bool`
            True if the library with the given name already exists, False otherwise
        """
        exists = False
        try:
            # This forces auth errors, and to fall back to the slower "list_collections"
            ArcticLibraryBinding(self, library).get_library_type()
            # This will obtain the library, if no exception thrown we have verified its existence
            self.get_library(library)
            exists = True
        except OperationFailure:
            exists = library in self.list_libraries()
        except LibraryNotFoundException:
            pass
        return exists 
Example 6
Project: arctic   Author: man-group   File: _ndarray_store.py    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def _update_fw_pointers(collection, symbol, version, previous_version, is_append, shas_to_add=None):
    """
    This function will decide whether to update the version document with forward pointers to segments.
    It detects cases where no prior writes/appends have been performed with FW pointers, and extracts the segment IDs.
    It also sets the metadata which indicate the mode of operation at the time of the version creation.
    """
    version[FW_POINTERS_CONFIG_KEY] = ARCTIC_FORWARD_POINTERS_CFG.name  # get the str as enum is not BSON serializable

    if ARCTIC_FORWARD_POINTERS_CFG is FwPointersCfg.DISABLED:
        return

    version_shas = set()

    if is_append:
        # Appends are tricky, as we extract the SHAs from the previous version (assuming it has FW pointers info)
        prev_fw_cfg = get_fwptr_config(previous_version)
        if prev_fw_cfg is FwPointersCfg.DISABLED.name:
            version_shas.update(Binary(sha) for sha in collection.find(
                {'symbol': symbol,
                 'parent': version_base_or_id(previous_version),
                 'segment': {'$lt': previous_version['up_to']}},
                {'sha': 1}))
        else:
            version_shas.update(previous_version[FW_POINTERS_REFS_KEY])

    # It is a write (we always get the all-inclusive set of SHAs), so no need to obtain previous SHAs
    version_shas.update(shas_to_add)

    # Verify here the number of seen segments vs expected ones
    if len(version_shas) != version['segment_count']:
        raise pymongo.errors.OperationFailure("Mismatched number of forward pointers to segments for {}: {} != {})"
                                              "Is append: {}. Previous version: {}. "
                                              "Gathered forward pointers segment shas: {}.".format(
            symbol, len(version_shas), version['segment_count'], is_append, previous_version['_id'], version_shas))

    version[FW_POINTERS_REFS_KEY] = list(version_shas) 
Example 7
Project: arctic   Author: man-group   File: _ndarray_store.py    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def _fw_pointers_convert_append_to_write(previous_version):
    """
    This method decides whether to convert an append to a full write  in order to avoid data integrity errors
    """
    # Switching from ENABLED --> DISABLED/HYBRID when appending can cause integrity errors for subsequent reads:
    #   - Assume the last write was done with ENABLED (segments don't have parent references updated).
    #   - Subsequent appends were done in DISABLED/HYBRID (append segments have parent references).
    #   - Reading with DISABLED won't "see" the first write's segments.
    prev_fw_config = get_fwptr_config(previous_version)
    # Convert to a full-write, which force-updates all segments with parent references.
    return prev_fw_config is FwPointersCfg.ENABLED and ARCTIC_FORWARD_POINTERS_CFG is not FwPointersCfg.ENABLED 
Example 8
Project: arctic   Author: man-group   File: _ndarray_store.py    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def check_written(collection, symbol, version):
        # Currently only called from methods which guarantee 'base_version_id' is not populated.
        # Make it nonetheless safe for the general case.
        parent_id = version_base_or_id(version)

        # Check all the chunks are in place
        if version.get(FW_POINTERS_CONFIG_KEY) == FwPointersCfg.DISABLED.name:
            spec = {'symbol': symbol, 'parent': parent_id}
        else:
            spec = {'symbol': symbol, 'sha': {'$in': version[FW_POINTERS_REFS_KEY]}}

        seen_chunks = mongo_count(collection, filter=spec)

        if seen_chunks != version['segment_count']:
            raise pymongo.errors.OperationFailure("Failed to write all the chunks. Saw %s expecting %s. "
                                                  "Parent: %s. Segments: %s" %
                                                  (seen_chunks, version['segment_count'], parent_id,
                                                   list(collection.find(spec, projection={'_id': 1, 'segment': 1}))))

        if version.get(FW_POINTERS_CONFIG_KEY) == FwPointersCfg.HYBRID.name and ARCTIC_FORWARD_POINTERS_RECONCILE:
            seen_chunks_reverse_pointers = mongo_count(collection, filter={'symbol': symbol, 'parent': parent_id})
            if seen_chunks != seen_chunks_reverse_pointers:
                raise pymongo.errors.OperationFailure("Failed to reconcile forward pointer chunks ({}). "
                                                      "Parent {}. "
                                                      "Reverse pointers segments #: {}. "
                                                      "Forward pointers segments #: {}.".format(
                    symbol, parent_id, seen_chunks_reverse_pointers, seen_chunks)) 
Example 9
Project: arctic   Author: man-group   File: version_store.py    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def _insert_version(self, version):
        try:
            # Keep here the mongo_retry to avoid incrementing versions and polluting the DB with garbage segments,
            # upon intermittent Mongo errors
            # If, however, we get a DuplicateKeyError, suppress it and raise OperationFailure, so that the method-scoped
            # mongo_retry re-tries and creates a new version, to overcome the issue.
            mongo_retry(self._versions.insert_one)(version)
        except DuplicateKeyError as err:
            logger.exception(err)
            raise OperationFailure("A version with the same _id exists, force a clean retry") 
Example 10
Project: apk_api_key_extractor   Author: alessandrodd   File: mongodb_dump.py    License: Apache License 2.0 5 votes vote down vote up
def dump_strings(self, entries):
        operations = []
        for entry in entries:
            operations.append(pymongo.UpdateOne({'_id': entry.value}, {'$inc': {'count': 1}}, upsert=True))
        if len(operations) > 0:
            try:
                self.strings_collection.bulk_write(operations, ordered=False)
            except pymongo.errors.BulkWriteError as bwe:
                print(bwe.details)
                # filter out "key too large to index" exceptions, which have error code 17280
                # we don't care about them
                filtered_errors = filter(lambda x: x['code'] != 17280, bwe.details['writeErrors'])
                if len(list(filtered_errors)) > 0:
                    raise 
Example 11
Project: sacred   Author: IDSIA   File: failing_mongo_mock.py    License: MIT License 5 votes vote down vote up
def __init__(
        self,
        max_calls_before_failure=2,
        exception_to_raise=pymongo.errors.AutoReconnect,
        **kwargs
    ):
        super().__init__(**kwargs)
        self._max_calls_before_failure = max_calls_before_failure
        self.exception_to_raise = exception_to_raise
        self._exception_to_raise = exception_to_raise 
Example 12
Project: sacred   Author: IDSIA   File: failing_mongo_mock.py    License: MIT License 5 votes vote down vote up
def insert_one(self, document, session=None):
        self._calls += 1
        if self._calls > self._max_calls_before_failure:
            raise pymongo.errors.ConnectionFailure
        else:
            return super().insert_one(document) 
Example 13
Project: sacred   Author: IDSIA   File: failing_mongo_mock.py    License: MIT License 5 votes vote down vote up
def update_one(self, filter, update, upsert=False, session=None):
        self._calls += 1
        if self._calls > self._max_calls_before_failure:
            raise pymongo.errors.ConnectionFailure
        else:
            return super().update_one(filter, update, upsert) 
Example 14
Project: sacred   Author: IDSIA   File: test_queue_mongo_observer.py    License: MIT License 5 votes vote down vote up
def mongo_obs(monkeypatch):
    client = ReconnectingMongoClient(
        max_calls_before_reconnect=10,
        max_calls_before_failure=1,
        exception_to_raise=pymongo.errors.ServerSelectionTimeoutError,
    )
    fs = gridfs.GridFS(client.sacred)
    monkeypatch.setattr(pymongo, "MongoClient", lambda *args, **kwargs: client)
    monkeypatch.setattr(gridfs, "GridFS", lambda _: fs)

    return QueuedMongoObserver(interval=0.01, retry_interval=0.01) 
Example 15
Project: sacred   Author: IDSIA   File: test_queue_mongo_observer.py    License: MIT License 5 votes vote down vote up
def test_mongo_observer_failed_event_updates_run(mongo_obs, sample_run):
    mongo_obs.started_event(**sample_run)

    fail_trace = "lots of errors and\nso\non..."
    mongo_obs.failed_event(fail_time=T2, fail_trace=fail_trace)

    assert mongo_obs.runs.count_documents({}) == 1
    db_run = mongo_obs.runs.find_one()
    assert db_run["stop_time"] == T2
    assert db_run["status"] == "FAILED"
    assert db_run["fail_trace"] == fail_trace 
Example 16
Project: sacred   Author: IDSIA   File: mongo.py    License: MIT License 5 votes vote down vote up
def save(self):
        import pymongo.errors

        try:
            self.runs.update_one(
                {"_id": self.run_entry["_id"]}, {"$set": self.run_entry}
            )
        except pymongo.errors.AutoReconnect:
            pass  # just wait for the next save
        except pymongo.errors.InvalidDocument:
            raise ObserverError(
                "Run contained an unserializable entry." "(most likely in the info)"
            ) 
Example 17
Project: sacred   Author: IDSIA   File: mongo.py    License: MIT License 5 votes vote down vote up
def final_save(self, attempts):
        import pymongo.errors

        for i in range(attempts):
            try:
                self.runs.update_one(
                    {"_id": self.run_entry["_id"]},
                    {"$set": self.run_entry},
                    upsert=True,
                )
                return
            except pymongo.errors.AutoReconnect:
                if i < attempts - 1:
                    time.sleep(1)
            except pymongo.errors.ConnectionFailure:
                pass
            except pymongo.errors.InvalidDocument:
                self.run_entry = force_bson_encodeable(self.run_entry)
                print(
                    "Warning: Some of the entries of the run were not "
                    "BSON-serializable!\n They have been altered such that "
                    "they can be stored, but you should fix your experiment!"
                    "Most likely it is either the 'info' or the 'result'.",
                    file=sys.stderr,
                )

        os.makedirs(self.failure_dir, exist_ok=True)
        with NamedTemporaryFile(
            suffix=".pickle",
            delete=False,
            prefix="sacred_mongo_fail_{}_".format(self.run_entry["_id"]),
            dir=self.failure_dir,
        ) as f:
            pickle.dump(self.run_entry, f)
            print(
                "Warning: saving to MongoDB failed! "
                "Stored experiment entry in '{}'".format(f.name),
                file=sys.stderr,
            ) 
Example 18
Project: sacred   Author: IDSIA   File: mongo.py    License: MIT License 5 votes vote down vote up
def save(self):
        import pymongo

        try:
            self.runs.update_one(
                {"_id": self.run_entry["_id"]}, {"$set": self.run_entry}
            )
        except pymongo.errors.InvalidDocument as exc:
            raise ObserverError(
                "Run contained an unserializable entry. (most likely in the info)"
            ) from exc 
Example 19
Project: sacred   Author: IDSIA   File: mongo.py    License: MIT License 5 votes vote down vote up
def final_save(self, attempts):
        import pymongo

        try:
            self.runs.update_one(
                {"_id": self.run_entry["_id"]}, {"$set": self.run_entry}, upsert=True
            )
            return

        except pymongo.errors.InvalidDocument:
            self.run_entry = force_bson_encodeable(self.run_entry)
            print(
                "Warning: Some of the entries of the run were not "
                "BSON-serializable!\n They have been altered such that "
                "they can be stored, but you should fix your experiment!"
                "Most likely it is either the 'info' or the 'result'.",
                file=sys.stderr,
            )

            with NamedTemporaryFile(
                suffix=".pickle", delete=False, prefix="sacred_mongo_fail_"
            ) as f:
                pickle.dump(self.run_entry, f)
                print(
                    "Warning: saving to MongoDB failed! "
                    "Stored experiment entry in '{}'".format(f.name),
                    file=sys.stderr,
                )

        raise ObserverError("Warning: saving to MongoDB failed!") 
Example 20
Project: armory   Author: twosixlabs   File: base.py    License: MIT License 5 votes vote down vote up
def _send_to_mongo(self, mongo_host: str, output: dict):
        """
        Send results to a Mongo database at mongo_host
        """
        client = pymongo.MongoClient(mongo_host, MONGO_PORT)
        db = client[MONGO_DATABASE]
        col = db[MONGO_COLLECTION]
        logger.info(
            f"Sending evaluation results to MongoDB instance {mongo_host}:{MONGO_PORT}"
        )
        try:
            col.insert_one(output)
        except pymongo.errors.PyMongoError as e:
            logger.error(f"Encountered error {e} sending evaluation results to MongoDB") 
Example 21
def test_ignore_errors_deprovision_mysql(self, mock_drop_user, mock_drop_database, mock_get_cursor, mock_consul):
        """
        Test mysql is set as deprovision when ignoring errors.
        """
        self.instance = OpenEdXInstanceFactory()
        self.instance.mysql_provisioned = True
        self.instance.deprovision_mysql(ignore_errors=True)
        self.assertFalse(self.instance.mysql_provisioned) 
Example 22
def test_ignore_errors_deprovision_mongo(self, mock_mongo_client_cls, *mock_methods):
        """
        Test mongo is set as deprovision when ignoring errors.
        """
        self.instance = OpenEdXInstanceFactory()
        self.instance.mongo_provisioned = True
        self.instance.deprovision_mongo(ignore_errors=True)
        self.assertFalse(self.instance.mongo_provisioned) 
Example 23
def test_ignore_errors_deprovision_rabbitmq(self, mock_rabbitmq_request, mock_consul):
        """
        Test rabbitmq is set as deprovision when ignoring errors.
        """
        self.instance.rabbitmq_provisioned = True
        self.instance.deprovision_rabbitmq(ignore_errors=True)
        self.assertFalse(self.instance.rabbitmq_provisioned) 
Example 24
Project: ParadoxTrading   Author: ppaanngggg   File: ReceiveDailyAbstract.py    License: MIT License 5 votes vote down vote up
def storeRaw(self, _tradingday: str, _raw_data: typing.Any):
        """
        store raw data into mongodb

        :param _tradingday: which day to store
        :param _raw_data: raw data from fetchRaw(...)
        :return: None
        """
        logging.info('{} storeRaw: {}'.format(
            self.COLLECTION_NAME, _tradingday
        ))
        try:
            self.mongo_coll.insert_one({
                'TradingDay': _tradingday,
                'Raw': _raw_data,
            })
        except pymongo.errors.DuplicateKeyError as e:
            logging.warning(e)
            if self.replace_all:
                self.mongo_coll.replace_one(
                    {'TradingDay': _tradingday},
                    {'TradingDay': _tradingday, 'Raw': _raw_data}
                )
            else:
                tmp = input('Replace existing data?(y/n/a): ')
                if tmp == 'y' or tmp == 'a':
                    self.mongo_coll.replace_one(
                        {'TradingDay': _tradingday},
                        {'TradingDay': _tradingday, 'Raw': _raw_data}
                    )
                    if tmp == 'a':
                        self.replace_all = True 
Example 25
Project: PyChemia   Author: MaterialsDiscovery   File: db.py    License: MIT License 5 votes vote down vote up
def has_connection(host='localhost'):
    if not HAS_PYMONGO:
        return False
    import pymongo
    try:
        maxSevSelDelay = 2
        client = pymongo.MongoClient(host, serverSelectionTimeoutMS=maxSevSelDelay)
        client.server_info()  # force connection on a request as the
        # connect=True parameter of MongoClient seems
        # to be useless here
        return True
    except pymongo.errors.ServerSelectionTimeoutError as err:
        # do whatever you need
        print(err)
        return False 
Example 26
Project: Fox-V3   Author: bobloy   File: mongodb.py    License: GNU Affero General Public License v3.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super(MongoDatabaseAdapter, self).__init__(**kwargs)
        from pymongo import MongoClient
        from pymongo.errors import OperationFailure

        self.database_name = self.kwargs.get(
            'database', 'chatterbot-database'
        )
        self.database_uri = self.kwargs.get(
            'database_uri', 'mongodb://localhost:27017/'
        )

        # Use the default host and port
        self.client = MongoClient(self.database_uri)

        # Increase the sort buffer to 42M if possible
        try:
            self.client.admin.command({'setParameter': 1, 'internalQueryExecMaxBlockingSortBytes': 44040192})
        except OperationFailure:
            pass

        # Specify the name of the database
        self.database = self.client[self.database_name]

        # The mongo collection of statement documents
        self.statements = self.database['statements']

        # The mongo collection of conversation documents
        self.conversations = self.database['conversations']

        # Set a requirement for the text attribute to be unique
        self.statements.create_index('text', unique=True)

        self.base_query = Query() 
Example 27
Project: Fox-V3   Author: bobloy   File: mongodb.py    License: GNU Affero General Public License v3.0 5 votes vote down vote up
def update(self, statement):
        from pymongo import UpdateOne
        from pymongo.errors import BulkWriteError

        data = statement.serialize()

        operations = []

        update_operation = UpdateOne(
            {'text': statement.text},
            {'$set': data},
            upsert=True
        )
        operations.append(update_operation)

        # Make sure that an entry for each response is saved
        for response_dict in data.get('in_response_to', []):
            response_text = response_dict.get('text')

            # $setOnInsert does nothing if the document is not created
            update_operation = UpdateOne(
                {'text': response_text},
                {'$set': response_dict},
                upsert=True
            )
            operations.append(update_operation)

        try:
            self.statements.bulk_write(operations, ordered=False)
        except BulkWriteError as bwe:
            # Log the details of a bulk write error
            self.logger.error(str(bwe.details))

        return statement 
Example 28
Project: pastepwn   Author: d-Rickyy-b   File: mongodb.py    License: MIT License 5 votes vote down vote up
def store(self, paste):
        self.logger.debug("Storing paste {0}".format(paste.key))

        try:
            self._insert_data(paste.to_dict())
        except pymongo.errors.DuplicateKeyError:
            self.logger.debug("Duplicate key '{0}' - Not storing paste".format(paste.key)) 
Example 29
Project: allura   Author: apache   File: auth.py    License: Apache License 2.0 5 votes vote down vote up
def upsert(cls):
        r = cls.query.get()
        if r is not None:
            return r
        try:
            r = cls(_id=0)
            session(r).flush(r)
            return r
        except pymongo.errors.DuplicateKeyError:  # pragma no cover
            session(r).flush(r)
            r = cls.query.get()
            return r 
Example 30
Project: allura   Author: apache   File: auth.py    License: Apache License 2.0 5 votes vote down vote up
def upsert(cls, username):
        u = cls.query.get(username=username)
        if u is not None:
            return u
        try:
            u = cls(username=username)
            session(u).flush(u)
        except pymongo.errors.DuplicateKeyError:
            session(u).expunge(u)
            u = cls.query.get(username=username)
        return u