Java Code Examples for com.mongodb.BasicDBObject#put()

The following examples show how to use com.mongodb.BasicDBObject#put() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MongoDB.java    From act with GNU General Public License v3.0 6 votes vote down vote up
private List<DBObject> keywordInCascade(String in_field, String keyword) {
  List<DBObject> cascades = new ArrayList<DBObject>();
  BasicDBObject query = new BasicDBObject();
  query.put(in_field, keyword);

  BasicDBObject keys = new BasicDBObject();

  DBCursor cur = this.dbCascades.find(query, keys);
  while (cur.hasNext()) {
    DBObject o = cur.next();
    cascades.add( convertDBObjectToCascade(o) );
  }
  cur.close();

  return cascades;
}
 
Example 2
Source File: EventParserTest.java    From ingestion with Apache License 2.0 6 votes vote down vote up
private DBObject buildExpectedObject() {
    DBObject dbObject = new BasicDBObject();
    BasicDBObject object2 = new BasicDBObject();
    BasicDBList locList = new BasicDBList();
    locList.add(111.11);
    locList.add(222.22);
    dbObject.put("field1", "Point1");
    dbObject.put("field2", locList);
    dbObject.put("field3", object2);
    dbObject.put("field6", "Point3");

    object2.put("field4", "Point2");
    object2.put("field5", locList);

    return dbObject;
}
 
Example 3
Source File: MongoDB.java    From act with GNU General Public License v3.0 6 votes vote down vote up
public List<Seq> getSeqWithSARConstraints() {
  List<Seq> seqs = new ArrayList<Seq>();
  BasicDBObject query = new BasicDBObject();
  query.put("sar_constraints", new BasicDBObject("$exists", true));

  BasicDBObject keys = new BasicDBObject();

  DBCursor cur = this.dbSeq.find(query, keys);
  while (cur.hasNext()) {
    DBObject o = cur.next();
    seqs.add( convertDBObjectToSeq(o) );
  }
  cur.close();

  return seqs;
}
 
Example 4
Source File: BatchJobMongoDA.java    From secure-data-service with Apache License 2.0 6 votes vote down vote up
@Override
public boolean createFileLatch(String jobId, List<String> fileEntries) {
    try {
        final BasicDBObject latchObject = new BasicDBObject();
        latchObject.put(BATCHJOBID_FIELDNAME, jobId);
        latchObject.put(FILES, fileEntries);

        RetryMongoCommand retry = new RetryMongoCommand() {

            @Override
            public Object execute() {
                batchJobMongoTemplate.getCollection(FILE_ENTRY_LATCH).insert(latchObject, WriteConcern.SAFE);
                return null;
            }
        };

        retry.executeOperation(numberOfRetries);

    } catch (MongoException me) {
        if (me.getCode() == DUP_KEY_CODE) {
            LOG.debug(me.getMessage());
        }
        return false;
    }
    return true;
}
 
Example 5
Source File: MarketDataServiceBasicImpl.java    From redtorch with MIT License 6 votes vote down vote up
@Override
public List<BarField> queryTodayBar1MinList(long startTimestamp, long endTimestamp, String unifiedSymbol) {
	try {
		Document filter = new Document();
		Document dateDocument = new Document();
		dateDocument.put("$gte", startTimestamp);
		dateDocument.put("$lte", endTimestamp);
		filter.put("actionTimestamp", dateDocument);
		filter.put("unifiedSymbol", unifiedSymbol);

		BasicDBObject sortBO = new BasicDBObject();
		sortBO.put("actionTimestamp", 1);
		long beginTime = System.currentTimeMillis();
		List<Document> documentList = this.todayMarketDataDBClient.find(todayMarketDataDBName, COLLECTION_NAME_BAR_1_MIN, filter, sortBO);
		logger.info("查询Bar数据,数据库{},集合{},操作耗时{}ms,共{}条数据", todayMarketDataDBName, COLLECTION_NAME_BAR_1_MIN, (System.currentTimeMillis() - beginTime), documentList.size());
		return documentListToBarList(documentList, MarketDataDBTypeEnum.MDDT_TD.getValueDescriptor().getName());
	} catch (Exception e) {
		logger.error("查询当日1分钟数据发生错误", e);
	}
	return new ArrayList<>();
}
 
Example 6
Source File: MarketDataServiceBasicImpl.java    From redtorch with MIT License 6 votes vote down vote up
@Override
public List<TickField> queryTodayTickList(long startTimestamp, long endTimestamp, String unifiedSymbol) {
	try {
		Document filter = new Document();
		Document dateDocument = new Document();
		dateDocument.put("$gte", startTimestamp);
		dateDocument.put("$lte", endTimestamp);
		filter.put("actionTimestamp", dateDocument);
		filter.put("unifiedSymbol", unifiedSymbol);

		BasicDBObject sortBO = new BasicDBObject();
		sortBO.put("actionTimestamp", 1);
		long beginTime = System.currentTimeMillis();
		String collectionName = COLLECTION_NAME_TICK;
		List<Document> documentList = this.todayMarketDataDBClient.find(todayMarketDataDBName, collectionName, filter, sortBO);
		logger.info("查询Tick数据,数据库{},集合{},操作耗时{}ms,共{}条数据", todayMarketDataDBName, collectionName, (System.currentTimeMillis() - beginTime), documentList.size());
		return documentListToTickList(documentList, MarketDataDBTypeEnum.MDDT_TD.getValueDescriptor().getName());
	} catch (Exception e) {
		logger.error("查询当日Tick数据发生错误", e);
	}
	return new ArrayList<>();
}
 
Example 7
Source File: UsageTermUrlSet.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public BasicDBObject getBasicDBObject() {
  BasicDBObject usageTermUrlSetBasicDBObject = new BasicDBObject("usage_term", usageTerm);
  BasicDBList urlsBasicDBList = new BasicDBList();
  for (String url : urlSet) {
    urlsBasicDBList.add(url);
  }
  usageTermUrlSetBasicDBObject.put("urls", urlsBasicDBList);
  return usageTermUrlSetBasicDBObject;
}
 
Example 8
Source File: Feature.java    From XBDD with Apache License 2.0 5 votes vote down vote up
@PUT
@Path("/comments/{product}/{major}.{minor}.{servicePack}/{build}/{featureId:.+}")
@Consumes(MediaType.APPLICATION_JSON)
public Response updateCommentWithPatch(@BeanParam final Coordinates coordinates, @PathParam("featureId") final String featureId,
		final BasicDBObject patch) {
	try {
		final DBCollection collection = this.mongoLegacyDb.getCollection("features");
		final BasicDBObject example = coordinates.getReportCoordinatesQueryObject().append("id", featureId);
		final BasicDBObject storedFeature = (BasicDBObject) collection.findOne(example);

		final String scenarioId = (String) patch.get("scenarioId");
		final String label = (String) patch.get("label");
		final String content = (String) patch.get("content");

		final BasicDBObject featureToUpdate = (BasicDBObject) storedFeature.copy();
		final BasicDBObject scenarioToUpdate = getScenarioById(scenarioId, featureToUpdate);
		scenarioToUpdate.put(label, content);

		if (label.equals("testing-tips")) {
			final DBCollection tips = this.mongoLegacyDb.getCollection("testingTips");
			updateTestingTipsForScenario(tips, scenarioToUpdate, coordinates, featureId);
		}
		featureToUpdate.put("statusLastEditedBy", LoggedInUserUtil.getLoggedInUser().getDisplay());
		featureToUpdate.put("lastEditOn", new Date());
		featureToUpdate.put("calculatedStatus", calculateStatusForFeature(featureToUpdate));
		collection.save(featureToUpdate);
		if (label.equals("testing-tips")) {
			Feature.embedTestingTips(featureToUpdate, coordinates, this.mongoLegacyDb);
		}
		return Response.ok().build();
	} catch (final Throwable th) {
		th.printStackTrace();
		return Response.serverError().build();
	}

}
 
Example 9
Source File: MongoDBSnapshotStore.java    From swellrt with Apache License 2.0 5 votes vote down vote up
protected void deleteSnapshot(WaveletName waveletName) throws PersistenceException {

    BasicDBObject criteria = new BasicDBObject();
    criteria.put(WAVE_ID_FIELD, ModernIdSerialiser.INSTANCE.serialiseWaveId(waveletName.waveId));
    criteria.put(WAVELET_ID_FIELD, ModernIdSerialiser.INSTANCE.serialiseWaveletId(waveletName.waveletId));

    try {
      // Using Journaled Write Concern
      // (http://docs.mongodb.org/manual/core/write-concern/#journaled)
      collection.withWriteConcern(WriteConcern.JOURNALED).deleteMany(criteria);
    } catch (MongoException e) {
      throw new PersistenceException(e);
    }
  }
 
Example 10
Source File: Feature.java    From XBDD with Apache License 2.0 5 votes vote down vote up
private void updateAllSteps(final BasicDBList steps, final String status) {
	for (final Object step : steps) {
		final BasicDBObject dbStep = (BasicDBObject) step;
		final BasicDBObject result = (BasicDBObject) dbStep.get("result");
		result.put("manualStatus", status);
	}
}
 
Example 11
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public List<Long> getRxnsWithEnzyme(String enzyme, Long org, List<Long> substrates) {
  BasicDBObject query = new BasicDBObject();
  query.put("ecnum", enzyme);
  query.put("organisms.id", org);
  for (Long substrate : substrates) {
    BasicDBObject mainQuery = new BasicDBObject();
    mainQuery.put("$ne", substrate);
    BasicDBList queryList = new BasicDBList();
    BasicDBObject productQuery = new BasicDBObject();
    productQuery.put("enz_summary.products.pubchem", mainQuery);
    BasicDBObject substrateQuery = new BasicDBObject();
    substrateQuery.put("enz_summary.substrates.pubchem", mainQuery);
    queryList.add(substrateQuery);
    queryList.add(productQuery);
    query.put("$or", queryList);
  }
  DBCursor cur = this.dbReactions.find(query);

  List<Long> reactions = new ArrayList<Long>();
  while (cur.hasNext()) {
    DBObject o = cur.next();
    long id = (Integer) o.get("_id"); // checked: db type IS int
    reactions.add(id);
  }
  cur.close();
  return reactions;
}
 
Example 12
Source File: ProfilingWriter.java    From mongodb-slow-operations-profiler with GNU Affero General Public License v3.0 5 votes vote down vote up
public Date getNewest(MongoDbAccessor mongo, ServerAddress adr, String db) {
    try {
        final MongoCollection<Document> profileCollection = getProfileCollection(mongo);

        if(adr != null) {
            final BasicDBObject query = new BasicDBObject();
            final BasicDBObject fields = new BasicDBObject();
            final BasicDBObject sort = new BasicDBObject();
            query.put("adr", adr.getHost() + ":" + adr.getPort());
            query.put("db", db);
            fields.put("_id", Integer.valueOf(0));
            fields.put("ts", Integer.valueOf(1));
            sort.put("ts", Integer.valueOf(-1));
            
            final MongoCursor<Document> c = profileCollection.find(query).projection(fields).sort(sort).limit(1).iterator();
            try {
                if(c.hasNext()) {
                    final Document obj = c.next();
                    final Object ts = obj.get("ts");
                    if(ts != null) {
                        return (Date)ts;
                    }
                }
            }finally {
            	c.close();
            }
        }
    }catch(Exception e) {
        LOG.error("Couldn't get newest entry for {}/{}", new Object[]{adr, db, e});

    }
    return null;
    
}
 
Example 13
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public void submitToActOrganismNameDB(Organism o) {
  BasicDBObject doc = new BasicDBObject();
  doc.put("org_id", o.getUUID());
  doc.put("name", o.getName());
  // TODO: support NCBI ids too.
  if(this.dbOrganismNames == null) {
    System.out.print("Organism: " + o);
  } else {
    this.dbOrganismNames.insert(doc);
  }
}
 
Example 14
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public List<Reaction> getRxnsWithAll(List<Long> reactants, List<Long> products) {

    if (reactants.size() == 0 && products.size() == 0) {
      throw new IllegalArgumentException("Reactants and products both empty! Query would return entire DB.");
    }
    BasicDBObject query = new BasicDBObject();

    if (!reactants.isEmpty()) {
      BasicDBList substrateIds = new BasicDBList();
      substrateIds.addAll(reactants);
      query.put("enz_summary.substrates.pubchem", new BasicDBObject("$all", substrateIds));
    }

    if (!products.isEmpty()) {
      BasicDBList productIds = new BasicDBList();
      productIds.addAll(products);
      query.put("enz_summary.products.pubchem", new BasicDBObject("$all", productIds));
    }

    DBCursor cur = this.dbReactions.find(query);
    List<Reaction> reactions = new ArrayList<Reaction>();

    try {
      while (cur.hasNext()) {
        DBObject o = cur.next();
        reactions.add(convertDBObjectToReaction(o));
      }
    } finally {
      cur.close();
    }

    return reactions;
  }
 
Example 15
Source File: Recents.java    From XBDD with Apache License 2.0 5 votes vote down vote up
@PUT
@Path("/build/{product}/{major}.{minor}.{servicePack}/{build}")
@Produces(MediaType.APPLICATION_JSON)
public Response addBuildToRecents(@BeanParam final Coordinates coordinates) {

	final DBObject buildCoords = coordinates.getReportCoordinates();

	final DBCollection collection = this.mongoLegacyDb.getCollection("users");

	final BasicDBObject user = new BasicDBObject();
	user.put("user_id", LoggedInUserUtil.getLoggedInUser().getUserId());

	final DBObject blank = new BasicDBObject();
	final DBObject doc = collection.findAndModify(user, blank, blank, false, new BasicDBObject("$set", user), true, true);

	if (doc.containsField("recentBuilds")) {
		final BasicDBList buildArray = (BasicDBList) doc.get("recentBuilds");
		if (buildArray.contains(buildCoords)) {
			// BasicDBObject toMove = (BasicDBObject) featureArray.get(featureArray.indexOf(featureDetails));
			buildArray.remove(buildCoords);
			buildArray.add(buildCoords);
			collection.update(user, new BasicDBObject("$set", new BasicDBObject("recentBuilds", buildArray)));
		} else {
			if (buildArray.size() >= 5) {
				collection.update(user, new BasicDBObject("$pop", new BasicDBObject("recentBuilds", "-1")));
			}
			collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentBuilds", buildCoords)));
		}
	} else {
		collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentBuilds", buildCoords)));
	}

	return Response.ok().build();
}
 
Example 16
Source File: BatchJobMongoDA.java    From secure-data-service with Apache License 2.0 5 votes vote down vote up
@Override
public boolean updateFileEntryLatch(String batchJobId, String filename) {
    final BasicDBObject query = new BasicDBObject();
    query.put(BATCHJOBID_FIELDNAME, batchJobId);

    BasicDBObject files = new BasicDBObject("files", filename);
    final BasicDBObject update = new BasicDBObject("$pull", files);
    RetryMongoCommand retry = new RetryMongoCommand() {

        @Override
        public Object execute() {

            return batchJobMongoTemplate.getCollection(FILE_ENTRY_LATCH).findAndModify(query, null, null, false,
                    update, true, false);
        }

    };
    DBObject fileEntryLatch = (DBObject) retry.executeOperation(numberOfRetries);

    List<String> file = (List<String>) fileEntryLatch.get("files");

    if (file == null || file.isEmpty() ) {
        return true;
    }

    return false;
}
 
Example 17
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 5 votes vote down vote up
@Override
public void pushSample(DBObject sample, boolean isList, BasicDBList resultIds) {
	
	if(isList){
		
		// Use the batch API to send a number of samples
		updateBatch((BasicDBList)sample);			
	}
	else if(sample != null){
		
		// This is a document, place it straight in appropriate collection
		BasicDBObject doc = ((BasicDBObject) sample);
		long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
		DBCollection collection = collectionAllocator.getCollection(timestamp);
		
		// Ask the id allocator for the query
		BasicDBObject query = this.idFactory.getQuery(sample.get(Sample.SOURCE_KEY), timestamp);
		
		// Build the update clause using the ops list
		BasicDBObject update = new BasicDBObject();
		for(RollupOperation rollupOp : this.rollupOps){
			
			DBObject updateClause = rollupOp.getUpdateClause(sample);
			
			// Check for top level operators that already exist so they dont overwrite
			for(String key : updateClause.keySet()){
				BasicDBObject existingClause = (BasicDBObject) update.get(key);
				if(existingClause != null){
					// Merge the arguments to the top level op
					existingClause.putAll((DBObject)updateClause.get(key));
				} else {
					update.put(key, updateClause.get(key));
				}
			}
		}
		
		collection.update(query, update, true, false);
	}
}
 
Example 18
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public void updateChemicalWithBrenda(Chemical c, String brendaName) {
  long id = alreadyEntered(c);

  if(id < 0) {
    System.err.println("Update chemical with brenda: " + brendaName + " can't find matching inchi");
    return;
  }
  BasicDBObject query = new BasicDBObject();
  query.put("_id", id);
  BasicDBObject update = new BasicDBObject();
  update.put("$push", new BasicDBObject("names.brenda",brendaName.toLowerCase()));
  this.dbChemicals.update(query, update);

}
 
Example 19
Source File: BingCacheMongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
public BasicDBObject getNameSearchResultDBObjectFromName(String formattedName) {
  BasicDBObject whereQuery = new BasicDBObject();
  BasicDBObject allFields = new BasicDBObject();
  whereQuery.put("name", formattedName);
  return (BasicDBObject) dbBingCache.findOne(whereQuery, allFields);
}
 
Example 20
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void updateBatch(BasicDBList sample) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchUpdate(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			// Ask the id allocator for the query
			BasicDBObject query = this.idFactory.getQuery(doc.get(Sample.SOURCE_KEY), timestamp);
			
			// Build the update clause using the ops list
			BasicDBObject update = new BasicDBObject();
			for(RollupOperation rollupOp : this.rollupOps){
				
				DBObject updateClause = rollupOp.getUpdateClause(doc);
				
				// Check for top level operators that already exist so they dont overwrite
				for(String key : updateClause.keySet()){
					BasicDBObject existingClause = (BasicDBObject) update.get(key);
					if(existingClause != null){
						// Merge the arguments to the top level op
						existingClause.putAll((DBObject)updateClause.get(key));
					} else {
						update.put(key, updateClause.get(key));
					}
				}
			}
			
			currentOp.find(query).upsert().updateOne(update);
		}		
		
		// Finalize the last batch
		executeBatchUpdate(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		throw ex;
	}
}