Java Code Examples for com.mongodb.BasicDBList#addAll()

The following examples show how to use com.mongodb.BasicDBList#addAll() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReachablesProjectionUpdate.java    From act with GNU General Public License v3.0 6 votes vote down vote up
public void updateDatabase(DBCollection reachables) {
  for (String product : products) {
    // The query object for this product
    BasicDBObject newProductQuery = new BasicDBObject().append(INCHI_KEY, product);

    // DB list of the substrates of this projection
    BasicDBList substrateList = new BasicDBList();
    substrateList.addAll(substrates);

    // DB list of the one RO associated with this projection
    BasicDBList roList = new BasicDBList();
    roList.addAll(ros);

    // The full entry to be added to the product's precursor list
    BasicDBObject precursorEntry = new BasicDBObject()
        .append(SUBSTRATES_KEY, substrateList)
        .append(RO_KEY, roList);

    // The command to push the precursor entry onto the precursor list
    BasicDBObject precursors = new BasicDBObject();
    precursors.append("$push", new BasicDBObject(PRECURSOR_KEY, precursorEntry));

    // Do the update!
    reachables.update(newProductQuery, precursors, UPSERT, NO_MULTI);
  }
}
 
Example 2
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public BasicDBObject createCofactorDoc(Cofactor c, Long ID) {
  BasicDBObject doc = new BasicDBObject();

  doc.put("_id", ID);
  doc.put("InChI", c.getInChI());

  BasicDBList names = new BasicDBList();
  names.addAll(c.getNames());
  doc.put("names", names);

  return doc;
}
 
Example 3
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public void updateChemicalWithRoBinningInformation(long id, List<Integer> matchedROs) {
  BasicDBObject query = new BasicDBObject("_id", id);
  BasicDBObject createDerivedDataContainer = new BasicDBObject("$set", new BasicDBObject("derived_data", new BasicDBObject()));
  this.dbChemicals.update(query, createDerivedDataContainer);

  BasicDBList listOfRos = new BasicDBList();
  listOfRos.addAll(matchedROs);

  BasicDBObject updateDerivedDataContainerWithMatchedRos =
      new BasicDBObject("$set", new BasicDBObject("derived_data.matched_ros", listOfRos));
  this.dbChemicals.update(query, updateDerivedDataContainerWithMatchedRos);
}
 
Example 4
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public List<Reaction> getRxnsWithAll(List<Long> reactants, List<Long> products) {

    if (reactants.size() == 0 && products.size() == 0) {
      throw new IllegalArgumentException("Reactants and products both empty! Query would return entire DB.");
    }
    BasicDBObject query = new BasicDBObject();

    if (!reactants.isEmpty()) {
      BasicDBList substrateIds = new BasicDBList();
      substrateIds.addAll(reactants);
      query.put("enz_summary.substrates.pubchem", new BasicDBObject("$all", substrateIds));
    }

    if (!products.isEmpty()) {
      BasicDBList productIds = new BasicDBList();
      productIds.addAll(products);
      query.put("enz_summary.products.pubchem", new BasicDBObject("$all", productIds));
    }

    DBCursor cur = this.dbReactions.find(query);
    List<Reaction> reactions = new ArrayList<Reaction>();

    try {
      while (cur.hasNext()) {
        DBObject o = cur.next();
        reactions.add(convertDBObjectToReaction(o));
      }
    } finally {
      cur.close();
    }

    return reactions;
  }
 
Example 5
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public void updateReferences(Seq seq) {
  BasicDBObject query = new BasicDBObject().append("_id", seq.getUUID());
  DBObject obj = this.dbSeq.findOne(query);
  BasicDBList refs = new BasicDBList();

  List<DBObject> newReferences = new ArrayList<>();
  for (JSONObject ref : seq.getReferences()) {
    newReferences.add(MongoDBToJSON.conv(ref));
  }

  refs.addAll(newReferences);
  obj.put("references", refs);
  this.dbSeq.update(query, obj);
}
 
Example 6
Source File: MongoDB.java    From act with GNU General Public License v3.0 5 votes vote down vote up
public DBCursor fetchNamesAndUsageForInchis(Set<String> inchis) {
  BasicDBList inchiList = new BasicDBList();
  inchiList.addAll(inchis);
  BasicDBObject inClause = new BasicDBObject("$in", inchiList);
  BasicDBObject whereQuery = new BasicDBObject("InChI", inClause);
  whereQuery.put("xref.BING", new BasicDBObject("$exists", true));
  BasicDBObject fields = new BasicDBObject();
  fields.put("InChI", true);
  fields.put("names.brenda", true);
  fields.put("xref", true);
  DBCursor cursor = dbChemicals.find(whereQuery, fields);
  return cursor;
}
 
Example 7
Source File: Search.java    From XBDD with Apache License 2.0 5 votes vote down vote up
@GET
@Path("/{product}/{major}.{minor}.{servicePack}/{build}")
@Produces(MediaType.APPLICATION_JSON)
public Response getSearchResults(@BeanParam final Coordinates coordinates, @QueryParam("keywords") final String keyword) {
	final String[] searchCategories = { "name", "description", "tags.name", "elements.name", "elements.description",
			"elements.steps.name", "elements.tags.name" };
	final List<String> searchWords = Arrays.asList(keyword.split("\\s+"));
	final DBCollection collection = this.mongoLegacyDb.getCollection("features");
	final List<DBObject> searchResults = new ArrayList<>();

	final QueryBuilder queryBuilder = QueryBuilder.getInstance();
	final DBCursor results = collection.find(queryBuilder.getSearchQuery(searchWords, coordinates, searchCategories));

	while (results.hasNext()) {
		final DBObject doc = results.next();
		searchResults.add(doc);
	}

	searchResults.sort(new DBObjectComparator(searchWords));

	while (searchResults.size() > SEARCH_LIMIT) {
		searchResults.remove(searchResults.size() - 1);
	}

	final BasicDBList basicDBList = new BasicDBList();
	basicDBList.addAll(searchResults);

	return Response.ok(SerializerUtil.serialise(basicDBList)).build();
}
 
Example 8
Source File: Feature.java    From XBDD with Apache License 2.0 5 votes vote down vote up
private BasicDBList constructEditStepChanges(final DBObject currentVersion, final DBObject previousVersion) {
	final BasicDBList stepChanges = new BasicDBList();
	final BasicDBList scenarios = (BasicDBList) currentVersion.get("elements");
	final BasicDBList prevScenarios = (BasicDBList) previousVersion.get("elements");
	if (scenarios != null) {
		for (int i = 0; i < scenarios.size(); i++) {
			stepChanges.addAll(updateScenarioSteps((BasicDBObject) scenarios.get(i), (BasicDBObject) prevScenarios.get(i)));
		}
	}
	return stepChanges;
}
 
Example 9
Source File: RenderDao.java    From render with GNU General Public License v2.0 5 votes vote down vote up
public void cloneStack(final StackId fromStackId,
                       final StackId toStackId,
                       final List<Double> zValues,
                       final Boolean skipTransforms)
        throws IllegalArgumentException, IllegalStateException {

    MongoUtil.validateRequiredParameter("fromStackId", fromStackId);
    MongoUtil.validateRequiredParameter("toStackId", toStackId);

    if ((skipTransforms == null) || (! skipTransforms)) {
        final MongoCollection<Document> fromTransformCollection = getTransformCollection(fromStackId);
        final MongoCollection<Document> toTransformCollection = getTransformCollection(toStackId);
        cloneCollection(fromTransformCollection, toTransformCollection, new Document());
    }

    final Document filterQuery = new Document();
    if ((zValues != null) && (zValues.size() > 0)) {
        final BasicDBList list = new BasicDBList();
        list.addAll(zValues);
        final Document zFilter = new Document(QueryOperators.IN, list);
        filterQuery.append("z", zFilter);
    }

    final MongoCollection<Document> fromTileCollection = getTileCollection(fromStackId);
    final MongoCollection<Document> toTileCollection = getTileCollection(toStackId);
    cloneCollection(fromTileCollection, toTileCollection, filterQuery);
}
 
Example 10
Source File: MongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
private static BasicDBList compare(BasicDBList l, BasicDBList refl, boolean listsAreSet) {
  boolean different = false;
  BasicDBList diff = new BasicDBList();

  if (!listsAreSet) {
    // lists are to be treated as ordered sets and so we can compare element by element
    for (int i = 0; i < l.size(); i++){
      Object val = l.get(i);
      Object refv = refl.get(i);
      Object d;
      if ((d = compare(val, refv, listsAreSet)) != null) {
        different = true;
        diff.add(d);
      } else {
        // elements at this index are identical, but we don't want to muck up the order
        // in case future elements are not identical... so add a null to the diff,
        // BUT IMP: do not set the flag that the list is different
        diff.add(null);
      }
    }
  } else {
    // lists are to be treated as unordered sets: we try to match each element best
    // effort to any one of the list elements, and if it does proceed greedily

    // we keep this as a list as opposed to a true set because the original (ref)
    // and the current (new) might have (identical) replicates, and so should not
    // be flagged different because of that.
    List<Object> refset = new ArrayList<Object>();
    refset.addAll(refl);

    for (Object e : l) {
      boolean matches_some = false;
      for (Object eref : refset) {
        if (compare(e, eref, listsAreSet) == null) {
          // this object matches something, great, lets move to the next object
          // also remove the matched object from the ref list, so that we have
          // a 1-1 mapping between this and the ref list object
          matches_some = true;
          refset.remove(eref);
          break;
        }
      }
      if (!matches_some) {
        // if this object in new list could not be matched against something,
        // the lists are different
        different = true;
        diff.add(e);
      }
    }

    if (refset.size() != 0) {
      // still some elements remain in the ref list, i.e., sets different
      different = true;
      diff.addAll(refset);
    }

  }

  return different ? diff : null;
}
 
Example 11
Source File: MongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
public static BasicDBObject createChemicalDoc(Chemical c, Long ID) {
  BasicDBObject doc = new BasicDBObject();

  doc.put("_id", ID);

  doc.put("canonical", c.getCanon());

  doc.put("SMILES", c.getSmiles());
  doc.put("InChI", c.getInChI());
  doc.put("InChIKey", c.getInChIKey());

  doc.put("isCofactor", c.isCofactor());
  doc.put("isNative", c.isNative());

  BasicDBObject names = new BasicDBObject();
  BasicDBList synonyms = new BasicDBList();
  synonyms.addAll(c.getSynonyms());
  names.put("synonyms", synonyms);

  BasicDBList pubchemNames = new BasicDBList();

  for (String type : c.getPubchemNameTypes()) {
    String[] temp = c.getPubchemNames(type);
    BasicDBList dbNames = new BasicDBList();
    for (String t : temp) {
      dbNames.add(t);
    }
    BasicDBObject dbNameObj = new BasicDBObject();
    dbNameObj.put("type", type);
    dbNameObj.put("values", dbNames);
    pubchemNames.add(dbNameObj);
  }
  names.put("pubchem", pubchemNames);
  BasicDBList brendaNames = new BasicDBList(); // will really get its fields later if initial install
  brendaNames.addAll(c.getBrendaNames()); // but for cases where we call it post install, we construct full chem entry
  names.put("brenda", brendaNames);

  doc.put("names", names);

  BasicDBObject xrefs = new BasicDBObject();
  xrefs.put("pubchem", c.getPubchemID());
  int cnt = 0;
  for (REFS xrefTyp : Chemical.REFS.values()) {
    if (c.getRef(xrefTyp) != null) {
      xrefs.put(xrefTyp.name(), MongoDBToJSON.conv((JSONObject)c.getRef(xrefTyp)));
      cnt++;
    }
  }
  doc.put("xref", xrefs);

  doc.put("estimateEnergy", c.getEstimatedEnergy());

  doc.put("keywords", c.getKeywords());
  doc.put("keywords_case_insensitive", c.getCaseInsensitiveKeywords());

  doc.put("csid", c.getChemSpiderID());
  doc.put("num_vendors", c.getChemSpiderNumUniqueVendors());
  doc.put("vendors", MongoDBToJSON.conv(c.getChemSpiderVendorXrefs()));

  return doc;
}
 
Example 12
Source File: MongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
<X> BasicDBList to_dblist(Set<X> set) {
  BasicDBList dblist = new BasicDBList();
  if (set != null) dblist.addAll(set);
  return dblist;
}
 
Example 13
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void updateBatch(BasicDBList sample) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchUpdate(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			// Ask the id allocator for the query
			BasicDBObject query = this.idFactory.getQuery(doc.get(Sample.SOURCE_KEY), timestamp);
			
			// Build the update clause using the ops list
			BasicDBObject update = new BasicDBObject();
			for(RollupOperation rollupOp : this.rollupOps){
				
				DBObject updateClause = rollupOp.getUpdateClause(doc);
				
				// Check for top level operators that already exist so they dont overwrite
				for(String key : updateClause.keySet()){
					BasicDBObject existingClause = (BasicDBObject) update.get(key);
					if(existingClause != null){
						// Merge the arguments to the top level op
						existingClause.putAll((DBObject)updateClause.get(key));
					} else {
						update.put(key, updateClause.get(key));
					}
				}
			}
			
			currentOp.find(query).upsert().updateOne(update);
		}		
		
		// Finalize the last batch
		executeBatchUpdate(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		throw ex;
	}
}
 
Example 14
Source File: RawStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void storeBatch(BasicDBList sample, BasicDBList resultList) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			SampleId _id = this.idFactory.createId(doc);
			doc.put(Sample.ID_KEY, _id.toObject());
			resultList.add(_id.toObject());
			long timestamp = doc.getLong(Sample.TS_KEY);
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchWrite(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			currentOp.insert(doc);
		}		
		
		// Finalize the last batch
		executeBatchWrite(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		// TODO : we also need to handle the result Ids here as well,
		// the failed doc Ids must be pulled from the resultList
		throw ex;
	}
}