Python pymongo.ASCENDING Examples
The following are 30 code examples for showing how to use pymongo.ASCENDING(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
You may check out the related API usage on the sidebar.
You may also want to check out all available functions/classes of the module
pymongo
, or try the search function
.
Example 1
Project: rate.sx Author: chubin File: mng.py License: MIT License | 6 votes |
def _get_collection(self, collection_name=None): if collection_name: if collection_name in self.allowed_collections: coins = self.client.ratesx[collection_name] else: raise KeyError("Not allowed collection name: %s" % collection_name) else: coins = self.coins if collection_name \ and collection_name not in self.client.ratesx.collection_names(): if collection_name.startswith('coins_'): coins.create_index([('symbol', ASCENDING)], unique=False) coins.create_index([('timestamp', ASCENDING)], unique=False) coins.create_index([('symbol', ASCENDING), ('timestamp', ASCENDING)], unique=True) if collection_name.startswith('currencies_'): coins.create_index([('timestamp', ASCENDING)], unique=False) return coins
Example 2
Project: arctic Author: man-group File: _ndarray_store.py License: GNU Lesser General Public License v2.1 | 6 votes |
def _ensure_index(collection): try: collection.create_index([('symbol', pymongo.HASHED)], background=True) # We keep it only for its uniqueness collection.create_index([('symbol', pymongo.ASCENDING), ('sha', pymongo.ASCENDING)], unique=True, background=True) # TODO: When/if we remove the segments->versions pointers implementation and keep only the forward pointers, # we can remove the 'parent' from the index. collection.create_index([('symbol', pymongo.ASCENDING), ('parent', pymongo.ASCENDING), ('segment', pymongo.ASCENDING)], unique=True, background=True) # Used for efficient SHA-based read queries that have index ranges collection.create_index([('symbol', pymongo.ASCENDING), ('sha', pymongo.ASCENDING), ('segment', pymongo.ASCENDING)], unique=True, background=True) except OperationFailure as e: if "can't use unique indexes" in str(e): return raise
Example 3
Project: arctic Author: man-group File: test_chunkstore.py License: GNU Lesser General Public License v2.1 | 6 votes |
def test_rename(chunkstore_lib): df = create_test_data(size=10, cols=5) chunkstore_lib.write('test', df, chunk_size='D') assert_frame_equal(chunkstore_lib.read('test'), df) chunkstore_lib.rename('test', 'new_name') assert_frame_equal(chunkstore_lib.read('new_name'), df) with pytest.raises(Exception) as e: chunkstore_lib.rename('new_name', 'new_name') assert('already exists' in str(e.value)) with pytest.raises(NoDataFoundException) as e: chunkstore_lib.rename('doesnt_exist', 'temp') assert('No data found for doesnt_exist' in str(e.value)) assert('test' not in chunkstore_lib.list_symbols()) # read out all chunks that have symbol set to 'test'. List should be empty chunks = [] for x in chunkstore_lib._collection.find({SYMBOL: 'test'}, sort=[(START, pymongo.ASCENDING)],): chunks.append(x) assert(len(chunks) == 0)
Example 4
Project: aiohttp_admin Author: aio-libs File: mongo.py License: Apache License 2.0 | 6 votes |
def list(self, request): await require(request, Permissions.view) possible_fields = [k.name for k in self._schema.keys] q = validate_query(request.query, possible_fields) paging = calc_pagination(q, self._primary_key) filters = q.get('_filters') query = {} if filters: query = create_filter(filters, self._schema) sort_direction = ASCENDING if paging.sort_dir == ASC else DESCENDING cursor = (self._collection.find(query) .skip(paging.offset) .limit(paging.limit) .sort(paging.sort_field, sort_direction)) entities = await cursor.to_list(paging.limit) count = await self._collection.count_documents(query) headers = {'X-Total-Count': str(count)} return json_response(entities, headers=headers)
Example 5
Project: counterblock Author: CounterpartyXCP File: database.py License: MIT License | 6 votes |
def get_block_indexes_for_dates(start_dt=None, end_dt=None): """Returns a 2 tuple (start_block, end_block) result for the block range that encompasses the given start_date and end_date unix timestamps""" if start_dt is None: start_block_index = config.BLOCK_FIRST else: start_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$lte": start_dt}}, sort=[("block_time", pymongo.DESCENDING)]) start_block_index = config.BLOCK_FIRST if not start_block else start_block['block_index'] if end_dt is None: end_block_index = config.state['my_latest_block']['block_index'] else: end_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$gte": end_dt}}, sort=[("block_time", pymongo.ASCENDING)]) if not end_block: end_block_index = config.mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)])['block_index'] else: end_block_index = end_block['block_index'] return (start_block_index, end_block_index)
Example 6
Project: DevilYuan Author: moyuanz File: DyStockMongoDbEngine.py License: MIT License | 6 votes |
def updateTradeDays(self, dates): collection = self._getTradeDayTableCollection() # create index collection.create_index([('datetime', pymongo.ASCENDING)], unique=True) # update into DB try: for date in dates: flt = {'datetime': date['datetime']} result = collection.update_one(flt, {'$set':{'tradeDay': date['tradeDay']}}, upsert=True) if not (result.acknowledged and (result.matched_count == 1 or result.upserted_id is not None)): self._info.print("更新交易日数据到MongoDB失败: date={}, raw_result={}".format(date, result.raw_result), DyLogData.error) return False except Exception as ex: self._info.print("更新交易日数据到MongoDB异常: {}".format(str(ex) + ', ' + str(ex.details)), DyLogData.error) return False return True
Example 7
Project: DevilYuan Author: moyuanz File: DyStockMongoDbEngine.py License: MIT License | 6 votes |
def updateStockCodes(self, codes): collection = self._getCodeTableCollection() # create index collection.create_index([('code', pymongo.ASCENDING)], unique=True) # update into DB try: for code in codes: flt = {'code': code['code']} collection.update_one(flt, {'$set':{'name': code['name']}}, upsert=True) except Exception as ex: self._info.print("更新股票代码数据到MongoDB异常:{0}".format(str(ex) + ', ' + str(ex.details)), DyLogData.error) return False return True
Example 8
Project: DevilYuan Author: moyuanz File: DyStockMongoDbEngine.py License: MIT License | 6 votes |
def getStockMarketDate(self, code, name=None): """ 获取个股上市日期 由于数据库的数据限制,有可能是个股数据在数据库里的最早信息 """ collection = self._getStockDaysDb()[code] flt = {'datetime': {'$lt':datetime.now()}} try: cursor = collection.find(flt).sort('datetime', pymongo.ASCENDING).limit(1) except Exception as ex: self._info.print("MongoDB Exception({0}): @getStockMarketDate{1}:{2}, 日线数据".format(str(ex) + ', ' + str(ex.details), code, name), DyLogData.error) return None for d in cursor: return d['datetime'].strftime('%Y-%m-%d') return None
Example 9
Project: DevilYuan Author: moyuanz File: DyStockMongoDbEngine.py License: MIT License | 6 votes |
def updateSectorStockCodes(self, sectorCode, date, codes): collection = self._client[self.sectorCodeDbMap[sectorCode]][date] # create index collection.create_index([('code', pymongo.ASCENDING)], unique=True) # update into DB try: for code in codes: flt = {'code': code['code']} collection.update_one(flt, {'$set': {'name': code['name']}}, upsert=True) except Exception as ex: self._info.print("更新[{0}]股票代码数据[{1}]到MongoDB异常:{2}".format(DyStockCommon.sectors[sectorCode], date, str(ex) + ', ' + str(ex.details)), DyLogData.error) return False return True
Example 10
Project: QUANTAXIS Author: QUANTAXIS File: save_position.py License: MIT License | 6 votes |
def save_position(message, collection=DATABASE.positions): """save account Arguments: message {[type]} -- [description] Keyword Arguments: collection {[type]} -- [description] (default: {DATABASE}) """ try: collection.create_index( [("account_cookie", ASCENDING), ("portfolio_cookie", ASCENDING), ("user_cookie", ASCENDING), ("position_id", ASCENDING)], unique=True) except: pass collection.update( {'account_cookie': message['account_cookie'], 'position_id': message['position_id'], 'portfolio_cookie': message['portfolio_cookie'], 'user_cookie': message['user_cookie']}, {'$set': message}, upsert=True )
Example 11
Project: QUANTAXIS Author: QUANTAXIS File: save_account.py License: MIT License | 6 votes |
def save_account(message, collection=DATABASE.account): """save account Arguments: message {[type]} -- [description] Keyword Arguments: collection {[type]} -- [description] (default: {DATABASE}) """ try: collection.create_index( [("account_cookie", ASCENDING), ("user_cookie", ASCENDING), ("portfolio_cookie", ASCENDING)], unique=True) except: pass collection.update( {'account_cookie': message['account_cookie'], 'portfolio_cookie': message['portfolio_cookie'], 'user_cookie': message['user_cookie']}, {'$set': message}, upsert=True )
Example 12
Project: og-miner Author: opendns File: mongo.py License: MIT License | 6 votes |
def extract(self): # TODO : Should be an exporter plugin graph = { 'meta': {}, # self.__meta, 'properties': {} # self.__properties } graph['nodes'] = list() for v in self.__vertices.find().sort('id', pymongo.ASCENDING): v.pop("_id") # Remove MongoDB document ID graph['nodes'].append(v) graph['edges'] = list() for e in self.__edges.find().sort("src", pymongo.ASCENDING): e.pop("_id") # Remove MongoDB document ID graph['edges'].append(e) graph['tokens'] = list(); for t in self.__tokens.find().sort('id', pymongo.ASCENDING): t.pop("_id") # Remove MongoDB document ID t['id'] = str(t['id']) t['ts'] = time.mktime(t['ts'].timetuple()) graph['tokens'].append(t) return graph
Example 13
Project: NowTrade Author: edouardpoitras File: data_connection.py License: MIT License | 6 votes |
def get_data(self, symbol, start, end, symbol_in_column=True): """ Returns a dataframe of the symbol data requested. """ from pymongo import ASCENDING symbol = str(symbol).upper() results = self.database[symbol].find({'_id': \ {'$gte': start, '$lte': end}}\ ).sort('datetime', ASCENDING) ret = pd.DataFrame.from_dict(list(results)) if len(ret) < 1: raise NoDataException() ret.rename(columns={'open': 'Open', \ 'high': 'High', \ 'low': 'Low', \ 'close': 'Close', \ 'volume': 'Volume', \ 'adj_close': 'Adj Close', \ '_id': 'Date'}, \ inplace=True) ret = ret.set_index('Date') if symbol_in_column: ret.rename(columns=lambda name: '%s_%s' %(symbol, name), inplace=True) return ret
Example 14
Project: arctic Author: man-group File: tickstore.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _ensure_index(self): collection = self._collection collection.create_index([(SYMBOL, pymongo.ASCENDING), (START, pymongo.ASCENDING)], background=True) collection.create_index([(START, pymongo.ASCENDING)], background=True) self._metadata.create_index([(SYMBOL, pymongo.ASCENDING)], background=True, unique=True)
Example 15
Project: arctic Author: man-group File: tickstore.py License: GNU Lesser General Public License v2.1 | 5 votes |
def min_date(self, symbol): """ Return the minimum datetime stored for a particular symbol Parameters ---------- symbol : `str` symbol name for the item """ res = self._collection.find_one({SYMBOL: symbol}, projection={ID: 0, START: 1}, sort=[(START, pymongo.ASCENDING)]) if res is None: raise NoDataFoundException("No Data found for {}".format(symbol)) return utc_dt_to_local_dt(res[START])
Example 16
Project: arctic Author: man-group File: toplevel.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _ensure_index(self): collection = self._collection collection.create_index([('start', pymongo.ASCENDING)], background=True)
Example 17
Project: arctic Author: man-group File: toplevel.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _get_library_metadata(self, date_range): """ Retrieve the libraries for the given date range, the assumption is that the date ranges do not overlap and they are CLOSED_CLOSED. At the moment the date range is mandatory """ if date_range is None: raise Exception("A date range must be provided") if not (date_range.start and date_range.end): raise Exception("The date range {0} must contain a start and end date".format(date_range)) start = date_range.start if date_range.start.tzinfo is not None else date_range.start.replace(tzinfo=mktz()) end = date_range.end if date_range.end.tzinfo is not None else date_range.end.replace(tzinfo=mktz()) query = {'$or': [{'start': {'$lte': start}, 'end': {'$gte': start}}, {'start': {'$gte': start}, 'end': {'$lte': end}}, {'start': {'$lte': end}, 'end': {'$gte': end}}]} cursor = self._collection.find(query, projection={'library_name': 1, 'start': 1, 'end': 1}, sort=[('start', pymongo.ASCENDING)]) results = [] for res in cursor: start = res['start'] if date_range.start.tzinfo is not None and start.tzinfo is None: start = start.replace(tzinfo=mktz("UTC")).astimezone(tz=date_range.start.tzinfo) end = res['end'] if date_range.end.tzinfo is not None and end.tzinfo is None: end = end.replace(tzinfo=mktz("UTC")).astimezone(tz=date_range.end.tzinfo) results.append(TickStoreLibrary(res['library_name'], DateRange(start, end, CLOSED_CLOSED))) return results
Example 18
Project: arctic Author: man-group File: chunkstore.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _ensure_index(self): self._symbols.create_index([(SYMBOL, pymongo.ASCENDING)], unique=True, background=True) self._collection.create_index([(SYMBOL, pymongo.HASHED)], background=True) self._collection.create_index([(SYMBOL, pymongo.ASCENDING), (SHA, pymongo.ASCENDING)], unique=True, background=True) self._collection.create_index([(SYMBOL, pymongo.ASCENDING), (START, pymongo.ASCENDING), (END, pymongo.ASCENDING), (SEGMENT, pymongo.ASCENDING)], unique=True, background=True) self._collection.create_index([(SYMBOL, pymongo.ASCENDING), (START, pymongo.ASCENDING), (SEGMENT, pymongo.ASCENDING)], unique=True, background=True) self._collection.create_index([(SEGMENT, pymongo.ASCENDING)], unique=False, background=True) self._mdata.create_index([(SYMBOL, pymongo.ASCENDING), (START, pymongo.ASCENDING), (END, pymongo.ASCENDING)], unique=True, background=True)
Example 19
Project: arctic Author: man-group File: chunkstore.py License: GNU Lesser General Public License v2.1 | 5 votes |
def get_chunk_ranges(self, symbol, chunk_range=None, reverse=False): """ Returns a generator of (Start, End) tuples for each chunk in the symbol Parameters ---------- symbol: str the symbol for the given item in the DB chunk_range: None, or a range object allows you to subset the chunks by range reverse: boolean return the chunk ranges in reverse order Returns ------- generator """ sym = self._get_symbol_info(symbol) if not sym: raise NoDataFoundException("Symbol does not exist.") c = CHUNKER_MAP[sym[CHUNKER]] # all symbols have a segment 0 spec = {SYMBOL: symbol, SEGMENT: 0} if chunk_range is not None: spec.update(CHUNKER_MAP[sym[CHUNKER]].to_mongo(chunk_range)) for x in self._collection.find(spec, projection=[START, END], sort=[(START, pymongo.ASCENDING if not reverse else pymongo.DESCENDING)]): yield (c.chunk_to_str(x[START]), c.chunk_to_str(x[END]))
Example 20
Project: arctic Author: man-group File: metadata_store.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _ensure_index(self): self.create_index([('symbol', pymongo.ASCENDING), ('start_time', pymongo.DESCENDING)], unique=True, background=True)
Example 21
Project: arctic Author: man-group File: metadata_store.py License: GNU Lesser General Public License v2.1 | 5 votes |
def prepend(self, symbol, metadata, start_time=None): """ Prepend a metadata entry for `symbol` Parameters ---------- symbol : `str` symbol name for the item metadata : `dict` to be persisted start_time : `datetime.datetime` when metadata becomes effective Default: datetime.datetime.min """ if metadata is None: return if start_time is None: start_time = dt.min old_metadata = self.find_one({'symbol': symbol}, sort=[('start_time', pymongo.ASCENDING)]) if old_metadata is not None: if old_metadata['start_time'] <= start_time: raise ValueError('start_time={} is later than the first metadata @{}'.format(start_time, old_metadata['start_time'])) if old_metadata['metadata'] == metadata: self.find_one_and_update({'symbol': symbol}, {'$set': {'start_time': start_time}}, sort=[('start_time', pymongo.ASCENDING)]) old_metadata['start_time'] = start_time return old_metadata end_time = old_metadata.get('start_time') else: end_time = None document = {'_id': bson.ObjectId(), 'symbol': symbol, 'metadata': metadata, 'start_time': start_time} if end_time is not None: document['end_time'] = end_time mongo_retry(self.insert_one)(document) logger.debug('Finished writing metadata for %s', symbol) return document
Example 22
Project: arctic Author: man-group File: version_store.py License: GNU Lesser General Public License v2.1 | 5 votes |
def _ensure_index(self): collection = self._collection collection.snapshots.create_index([('name', pymongo.ASCENDING)], unique=True, background=True) collection.versions.create_index([('symbol', pymongo.ASCENDING), ('_id', pymongo.DESCENDING)], background=True) collection.versions.create_index([('symbol', pymongo.ASCENDING), ('version', pymongo.DESCENDING)], unique=True, background=True) collection.version_nums.create_index('symbol', unique=True, background=True) for th in _TYPE_HANDLERS: th._ensure_index(collection)
Example 23
Project: arctic Author: man-group File: test_chunkstore.py License: GNU Lesser General Public License v2.1 | 5 votes |
def test_quarterly_data(chunkstore_lib): df = DataFrame(data={'data': np.random.randint(0, 100, size=366)}, index=pd.date_range('2016-01-01', '2016-12-31')) df.index.name = 'date' chunkstore_lib.write('quarterly', df, chunk_size='Q') assert_frame_equal(df, chunkstore_lib.read('quarterly')) assert(len(chunkstore_lib.read('quarterly', chunk_range=(None, '2016-01-05'))) == 5) count = 0 for _ in chunkstore_lib._collection.find({SYMBOL: 'quarterly'}, sort=[(START, pymongo.ASCENDING)],): count += 1 assert(count == 4)
Example 24
Project: recruit Author: Frank-qlu File: helpers.py License: Apache License 2.0 | 5 votes |
def _index_list(key_or_list, direction=None): """Helper to generate a list of (key, direction) pairs. Takes such a list, or a single key, or a single key and direction. """ if direction is not None: return [(key_or_list, direction)] else: if isinstance(key_or_list, str): return [(key_or_list, pymongo.ASCENDING)] elif not isinstance(key_or_list, (list, tuple)): raise TypeError("if no direction is specified, " "key_or_list must be an instance of list") return key_or_list
Example 25
Project: recruit Author: Frank-qlu File: grid_file.py License: Apache License 2.0 | 5 votes |
def _ensure_index(self): if not object.__getattribute__(self, "_ensured_index"): self._coll.chunks.ensure_index( [("files_id", ASCENDING), ("n", ASCENDING)], unique=True) object.__setattr__(self, "_ensured_index", True)
Example 26
Project: avrae Author: avrae File: new_character.py License: GNU General Public License v3.0 | 5 votes |
def from_db(mdb): import pymongo from bson import ObjectId coll_names = await mdb.list_collection_names() if "old_characters" not in coll_names: print("Renaming characters to old_characters...") await mdb.characters.rename("old_characters") else: print("Dropping characters_bak and making backup...") if "characters_bak" in coll_names: await mdb.characters_bak.drop() await mdb.characters.rename("characters_bak") num_old_chars = await mdb.old_characters.count_documents({}) print(f"Migrating {num_old_chars} characters...") async for old_char in mdb.old_characters.find({}): new_char = migrate(old_char).to_dict() new_char['_id'] = ObjectId(old_char['_id']) try: await mdb.characters.insert_one(new_char) except: pass print("Creating compound index on owner|upstream...") await mdb.characters.create_index([("owner", pymongo.ASCENDING), ("upstream", pymongo.ASCENDING)], unique=True) num_chars = await mdb.characters.count_documents({}) print(f"Done migrating {num_chars}/{num_old_chars} characters.") if num_chars == num_old_chars: print("It's probably safe to drop the collections old_characters and characters_bak now.")
Example 27
Project: avrae Author: avrae File: new_bestiary.py License: GNU General Public License v3.0 | 5 votes |
def from_db(mdb): import pymongo from bson import ObjectId coll_names = await mdb.list_collection_names() if "old_bestiaries" not in coll_names: print("Renaming bestiaries to old_bestiaries...") await mdb.bestiaries.rename("old_bestiaries") else: print("Dropping bestiaries_bak and making backup...") if "bestiaries_bak" in coll_names: await mdb.bestiaries_bak.drop() await mdb.bestiaries.rename("bestiaries_bak") num_old_bestiaries = await mdb.old_bestiaries.count_documents({}) print(f"Migrating {num_old_bestiaries} bestiaries...") async for old_bestiary in mdb.old_bestiaries.find({}): new_char = migrate(old_bestiary) new_char['_id'] = ObjectId(old_bestiary['_id']) await mdb.bestiaries.insert_one(new_char) print("Creating compound index on owner|critterdb_id...") await mdb.bestiaries.create_index([("owner", pymongo.ASCENDING), ("critterdb_id", pymongo.ASCENDING)], unique=True) num_bestiaries = await mdb.old_bestiaries.count_documents({}) print(f"Done migrating {num_bestiaries}/{num_old_bestiaries} bestiaries.") if num_bestiaries == num_old_bestiaries: print("It's probably safe to drop the collections old_bestiaries and bestiaries_bak now.")
Example 28
Project: avrae Author: avrae File: customization.py License: GNU General Public License v3.0 | 5 votes |
def migrate_aliases(rdb, mdb): num_aliases = 0 num_users = 0 aliases = rdb.jget("cmd_aliases") for user, useraliases in aliases.items(): num_users += 1 print(f"Migrating aliases for {user}...") for name, commands in useraliases.items(): num_aliases += 1 print(f"Migrating alias {name}...") data = { "owner": user, "name": name, "commands": commands } print("Inserting into aliases...") result = await mdb.aliases.insert_one(data) print(result.inserted_id) print() print("Creating compound index on owner|name...") await mdb.aliases.create_index([("owner", pymongo.ASCENDING), ("name", pymongo.ASCENDING)], unique=True) print(f"Done! Migrated {num_aliases} aliases for {num_users} users.\n\n")
Example 29
Project: avrae Author: avrae File: customization.py License: GNU General Public License v3.0 | 5 votes |
def migrate_snippets(rdb, mdb): num_snippets = 0 num_users = 0 snippets = rdb.jget("damage_snippets") for user, usersnippets in snippets.items(): num_users += 1 print(f"Migrating snippets for {user}...") for name, snippet in usersnippets.items(): num_snippets += 1 print(f"Migrating snippet {name}...") data = { "owner": user, "name": name, "snippet": snippet } print("Inserting into snippets...") result = await mdb.snippets.insert_one(data) print(result.inserted_id) print() print("Creating compound index on owner|name...") await mdb.snippets.create_index([("owner", pymongo.ASCENDING), ("name", pymongo.ASCENDING)], unique=True) print(f"Done! Migrated {num_snippets} snippets for {num_users} users.\n\n")
Example 30
Project: avrae Author: avrae File: customization.py License: GNU General Public License v3.0 | 5 votes |
def migrate_servsnippets(rdb, mdb): num_snippets = 0 num_servers = 0 snippets = rdb.jget("server_snippets", {}) for server, servsnippets in snippets.items(): num_servers += 1 print(f"Migrating snippets for {server}...") for name, snippet in servsnippets.items(): num_snippets += 1 print(f"Migrating snippet {name}...") data = { "server": server, "name": name, "snippet": snippet } print("Inserting into servsnippets...") result = await mdb.servsnippets.insert_one(data) print(result.inserted_id) print() print("Creating compound index on server|name...") await mdb.servsnippets.create_index([("server", pymongo.ASCENDING), ("name", pymongo.ASCENDING)], unique=True) print(f"Done! Migrated {num_snippets} snippets for {num_servers} servers.\n\n")