Java Code Examples for com.mongodb.BasicDBList#size()

The following examples show how to use com.mongodb.BasicDBList#size() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: QueryProperties.java    From birt with Eclipse Public License 1.0 6 votes vote down vote up
static DBObject[] getSecondaryObjectSets( DBObject exprObj )
{
    if( !(exprObj instanceof BasicDBList) )
        return null;    // no secondary element(s)

    BasicDBList objList = (BasicDBList)exprObj;
    if( objList.size() <= 1 )
        return null;
    
    // return the second and remaining DBObject(s) from the list
    List<DBObject> secondaryObjList = new ArrayList<DBObject>(objList.size()-1);
    for( int i=1; i < objList.size(); i++ )
    {
        Object value = objList.get(i);
        if( value instanceof DBObject )
            secondaryObjList.add( (DBObject)value );
        else // ignore elements that are not DBObject
            logInvalidTagValue( value );
    }

    if( secondaryObjList.isEmpty() )
        return null;
    return (DBObject[])secondaryObjList.toArray( new DBObject[secondaryObjList.size()] );
}
 
Example 2
Source File: MongoFieldMapping.java    From bluima with Apache License 2.0 6 votes vote down vote up
public static void readFieldFromDb(String fieldKey, String range,
        Annotation a, Feature f, BasicDBObject dbO, JCas jCas) {

    if (dbO.containsField(fieldKey)) {

        if (range.equals("String")) {
            a.setStringValue(f, dbO.getString(fieldKey));
        } else if (range.equals("StringArray")) {
            BasicDBList vals = (BasicDBList) dbO.get(fieldKey);
            StringArray sa = new StringArray(jCas, vals.size());
            for (int i = 0; i < vals.size(); i++) {
                sa.set(i, vals.get(i).toString());
            }
            a.setFeatureValue(f, sa);
        } else if (range.equals("Integer")) {
            a.setIntValue(f, dbO.getInt(fieldKey));
        } else if (range.equals("Float")) {
            a.setFloatValue(f, (float) dbO.getDouble(fieldKey));
        } else if (range.equals("Boolean")) {
            a.setBooleanValue(f, dbO.getBoolean(fieldKey));
        } else {
            LOG.warn("range not supported " + range);
        }
    }
}
 
Example 3
Source File: GameDataManager.java    From gameserver with Apache License 2.0 6 votes vote down vote up
/**
 * Get the game data from database as a two dimension double value array.
 * 
 * @param key
 * @return
 */
public int[][] getGameDataAsIntArrayArray(GameDataKey key) {
	Object obj = getValueFromDatabase(key);
	if ( obj == NULL ) {
		return new int[0][0];
	} else if ( obj instanceof double[] ) {
		return (int[][])obj;
	} else {
		BasicDBList list = (BasicDBList)obj;
		int[][] array = new int[list.size()][];
		for ( int i=0; i<array.length; i++ ) {
			BasicDBList dbList = (BasicDBList)list.get(i);
			array[i] = new int[dbList.size()];
			for ( int j=0; j<array[i].length; j++ ) {
				array[i][j] = (Integer)dbList.get(j);
			}
		}
		return array;
	}
}
 
Example 4
Source File: MongoEntity.java    From secure-data-service with Apache License 2.0 6 votes vote down vote up
private static Map<String, String[]> convertDBMap(Map<String, Object> dbMap) {
    Map<String, String[]> map = new HashMap<String, String[]>();

    for (String key : dbMap.keySet()) {
        Object value = dbMap.get(key);
        if (value instanceof BasicDBList) {
            BasicDBList list = (BasicDBList)value;
            String[] array = new String[list.size()];
            int index = 0;
            for (Object item : list) {
                array[index] = item.toString();
                index++;
            }
            map.put(key, array);
        }
    }

    return map;
}
 
Example 5
Source File: GameDataManager.java    From gameserver with Apache License 2.0 6 votes vote down vote up
/**
 * Get the game data from database as a two dimension double value array.
 * 
 * @param key
 * @return
 */
public double[][] getGameDataAsDoubleArrayArray(GameDataKey key) {
	Object obj = getValueFromDatabase(key);
	if ( obj == NULL ) {
		return new double[0][0];
	} else if ( obj instanceof double[] ) {
		return (double[][])obj;
	} else {
		BasicDBList list = (BasicDBList)obj;
		double[][] array = new double[list.size()][];
		for ( int i=0; i<array.length; i++ ) {
			BasicDBList dbList = (BasicDBList)list.get(i);
			array[i] = new double[dbList.size()];
			for ( int j=0; j<array[i].length; j++ ) {
				array[i][j] = (Double)dbList.get(j);
			}
		}
		return array;
	}
}
 
Example 6
Source File: GameDataManager.java    From gameserver with Apache License 2.0 6 votes vote down vote up
/**
 * Get the game data from database as a double value array.
 * If it does not exist in database, or is illegal, return an empty double array.
 * 
 * @param key
 * @param defaultValue
 * @return
 */
public double[] getGameDataAsDoubleArray(GameDataKey key) {
	Object obj = getValueFromDatabase(key);
	if ( obj == NULL ) {
		return new double[0];
	} else if ( obj instanceof double[] ) {
		return (double[])obj;
	} else {
		BasicDBList list = (BasicDBList)obj;
		double[] array = new double[list.size()];
		for ( int i=0; i<array.length; i++ ) {
			array[i] = (Double)list.get(i);
		}
		return array;
	}
}
 
Example 7
Source File: Feature.java    From XBDD with Apache License 2.0 5 votes vote down vote up
private BasicDBList constructEditStepChanges(final DBObject currentVersion, final DBObject previousVersion) {
	final BasicDBList stepChanges = new BasicDBList();
	final BasicDBList scenarios = (BasicDBList) currentVersion.get("elements");
	final BasicDBList prevScenarios = (BasicDBList) previousVersion.get("elements");
	if (scenarios != null) {
		for (int i = 0; i < scenarios.size(); i++) {
			stepChanges.addAll(updateScenarioSteps((BasicDBObject) scenarios.get(i), (BasicDBObject) prevScenarios.get(i)));
		}
	}
	return stepChanges;
}
 
Example 8
Source File: Recents.java    From XBDD with Apache License 2.0 5 votes vote down vote up
@PUT
@Path("/feature/{product}/{major}.{minor}.{servicePack}/{build}/{id:.+}")
@Produces(MediaType.APPLICATION_JSON)
public Response addFeatureToRecents(@QueryParam("name") final String featureName,
		@BeanParam final Coordinates coordinates,
		@PathParam("id") final String featureID) {

	final BasicDBObject featureDetails = new BasicDBObject("name", featureName);
	featureDetails.put("product", coordinates.getProduct());
	featureDetails.put("version", coordinates.getVersionString());
	featureDetails.put("build", coordinates.getBuild());
	featureDetails.put("id", featureID);

	final DBCollection collection = this.mongoLegacyDb.getCollection("users");

	final BasicDBObject user = new BasicDBObject();
	user.put("user_id", LoggedInUserUtil.getLoggedInUser().getUserId());

	final DBObject blank = new BasicDBObject();
	final DBObject doc = collection.findAndModify(user, blank, blank, false, new BasicDBObject("$set", user), true, true);

	if (doc.containsField("recentFeatures")) {
		final BasicDBList featureArray = (BasicDBList) doc.get("recentFeatures");
		if (featureArray.contains(featureDetails)) {
			featureArray.remove(featureDetails);
			featureArray.add(featureDetails);
			collection.update(user, new BasicDBObject("$set", new BasicDBObject("recentFeatures", featureArray)));
		} else {
			if (featureArray.size() >= 5) {
				collection.update(user, new BasicDBObject("$pop", new BasicDBObject("recentFeatures", "-1")));
			}
			collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentFeatures", featureDetails)));
		}
	} else {
		collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentFeatures", featureDetails)));
	}

	return Response.ok().build();
}
 
Example 9
Source File: Recents.java    From XBDD with Apache License 2.0 5 votes vote down vote up
@PUT
@Path("/build/{product}/{major}.{minor}.{servicePack}/{build}")
@Produces(MediaType.APPLICATION_JSON)
public Response addBuildToRecents(@BeanParam final Coordinates coordinates) {

	final DBObject buildCoords = coordinates.getReportCoordinates();

	final DBCollection collection = this.mongoLegacyDb.getCollection("users");

	final BasicDBObject user = new BasicDBObject();
	user.put("user_id", LoggedInUserUtil.getLoggedInUser().getUserId());

	final DBObject blank = new BasicDBObject();
	final DBObject doc = collection.findAndModify(user, blank, blank, false, new BasicDBObject("$set", user), true, true);

	if (doc.containsField("recentBuilds")) {
		final BasicDBList buildArray = (BasicDBList) doc.get("recentBuilds");
		if (buildArray.contains(buildCoords)) {
			// BasicDBObject toMove = (BasicDBObject) featureArray.get(featureArray.indexOf(featureDetails));
			buildArray.remove(buildCoords);
			buildArray.add(buildCoords);
			collection.update(user, new BasicDBObject("$set", new BasicDBObject("recentBuilds", buildArray)));
		} else {
			if (buildArray.size() >= 5) {
				collection.update(user, new BasicDBObject("$pop", new BasicDBObject("recentBuilds", "-1")));
			}
			collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentBuilds", buildCoords)));
		}
	} else {
		collection.update(user, new BasicDBObject("$addToSet", new BasicDBObject("recentBuilds", buildCoords)));
	}

	return Response.ok().build();
}
 
Example 10
Source File: ContentCache.java    From socialite with Apache License 2.0 5 votes vote down vote up
private List<Content> getContentFromDocument(final String contentKey){
    List<Content> result = null;
    if(this.dbCache != null){
        BasicDBList dbContent = (BasicDBList) this.dbCache.get(contentKey);
        if(dbContent != null){
            result = new ArrayList<Content>(dbContent.size());
            for(int i = 0; i < dbContent.size(); ++i){
                result.add(new Content((DBObject) dbContent.get(i)));
            }
        }            
    }
    return result;
}
 
Example 11
Source File: MongoDbDeltaStoreUtil.java    From swellrt with Apache License 2.0 5 votes vote down vote up
public static TransformedWaveletDelta deserializeTransformedWaveletDelta(DBObject dbObject)
    throws PersistenceException {

  ParticipantId author = deserializeParicipantId((DBObject) dbObject.get(FIELD_AUTHOR));
  HashedVersion resultingVersion =
      deserializeHashedVersion((DBObject) dbObject.get(FIELD_RESULTINGVERSION));
  long applicationTimestamp = (Long) dbObject.get(FIELD_APPLICATIONTIMESTAMP);

  BasicDBList dbOps = (BasicDBList) dbObject.get(FIELD_OPS);
  ImmutableList.Builder<WaveletOperation> operations = ImmutableList.builder();

  int numOperations = dbOps.size();

  // Code analog to ProtoDeltaStoreDataSerializer.deserialize
  for (int i = 0; i < numOperations; i++) {

    WaveletOperationContext context;
    if (i == numOperations - 1) {
      context = new WaveletOperationContext(author, applicationTimestamp, 1, resultingVersion);
    } else {
      context = new WaveletOperationContext(author, applicationTimestamp, 1);
    }
    operations.add(deserializeWaveletOperation((DBObject) dbOps.get(i), context));
  }

  return new TransformedWaveletDelta(author, resultingVersion, applicationTimestamp,
      operations.build());
}
 
Example 12
Source File: ModelLifecycle.java    From secure-data-service with Apache License 2.0 5 votes vote down vote up
private String[] toArray(BasicDBList dbList) {
    String[] array = new String[dbList.size()];
    
    int index = 0;
    for (Object item : dbList) {
        array[index] = item.toString();
        index++;
    }
    
    return array;
}
 
Example 13
Source File: MongoDbDeltaStoreUtil.java    From incubator-retired-wave with Apache License 2.0 5 votes vote down vote up
public static TransformedWaveletDelta deserializeTransformedWaveletDelta(DBObject dbObject)
    throws PersistenceException {

  ParticipantId author = deserializeParicipantId((DBObject) dbObject.get(FIELD_AUTHOR));
  HashedVersion resultingVersion =
      deserializeHashedVersion((DBObject) dbObject.get(FIELD_RESULTINGVERSION));
  long applicationTimestamp = (Long) dbObject.get(FIELD_APPLICATIONTIMESTAMP);

  BasicDBList dbOps = (BasicDBList) dbObject.get(FIELD_OPS);
  ImmutableList.Builder<WaveletOperation> operations = ImmutableList.builder();

  int numOperations = dbOps.size();

  // Code analog to ProtoDeltaStoreDataSerializer.deserialize
  for (int i = 0; i < numOperations; i++) {

    WaveletOperationContext context;
    if (i == numOperations - 1) {
      context = new WaveletOperationContext(author, applicationTimestamp, 1, resultingVersion);
    } else {
      context = new WaveletOperationContext(author, applicationTimestamp, 1);
    }
    operations.add(deserializeWaveletOperation((DBObject) dbOps.get(i), context));
  }

  return new TransformedWaveletDelta(author, resultingVersion, applicationTimestamp,
      operations.build());
}
 
Example 14
Source File: MongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
private static BasicDBList compare(BasicDBList l, BasicDBList refl, boolean listsAreSet) {
  boolean different = false;
  BasicDBList diff = new BasicDBList();

  if (!listsAreSet) {
    // lists are to be treated as ordered sets and so we can compare element by element
    for (int i = 0; i < l.size(); i++){
      Object val = l.get(i);
      Object refv = refl.get(i);
      Object d;
      if ((d = compare(val, refv, listsAreSet)) != null) {
        different = true;
        diff.add(d);
      } else {
        // elements at this index are identical, but we don't want to muck up the order
        // in case future elements are not identical... so add a null to the diff,
        // BUT IMP: do not set the flag that the list is different
        diff.add(null);
      }
    }
  } else {
    // lists are to be treated as unordered sets: we try to match each element best
    // effort to any one of the list elements, and if it does proceed greedily

    // we keep this as a list as opposed to a true set because the original (ref)
    // and the current (new) might have (identical) replicates, and so should not
    // be flagged different because of that.
    List<Object> refset = new ArrayList<Object>();
    refset.addAll(refl);

    for (Object e : l) {
      boolean matches_some = false;
      for (Object eref : refset) {
        if (compare(e, eref, listsAreSet) == null) {
          // this object matches something, great, lets move to the next object
          // also remove the matched object from the ref list, so that we have
          // a 1-1 mapping between this and the ref list object
          matches_some = true;
          refset.remove(eref);
          break;
        }
      }
      if (!matches_some) {
        // if this object in new list could not be matched against something,
        // the lists are different
        different = true;
        diff.add(e);
      }
    }

    if (refset.size() != 0) {
      // still some elements remain in the ref list, i.e., sets different
      different = true;
      diff.addAll(refset);
    }

  }

  return different ? diff : null;
}
 
Example 15
Source File: MongoDB.java    From act with GNU General Public License v3.0 4 votes vote down vote up
private String getReactantFromMongoDocument(BasicDBObject family, String which, int i) {
  BasicDBList o = (BasicDBList)((DBObject)family.get("enz_summary")).get(which);
  if (i >= o.size())
    return "";
  return "" + (Long)((DBObject)o.get(i)).get("pubchem");
}
 
Example 16
Source File: MongodbInputDiscoverFieldsImpl.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
private static void processList( BasicDBList list, String path, String name, Map<String, MongoField> lookup ) {

    if ( list.size() == 0 ) {
      return; // can't infer anything about an empty list
    }

    String nonPrimitivePath = path + "[-]"; //$NON-NLS-1$
    String primitivePath = path;

    for ( int i = 0; i < list.size(); i++ ) {
      Object element = list.get( i );

      if ( element instanceof BasicDBObject ) {
        processRecord( (BasicDBObject) element, nonPrimitivePath, name + "[" + i + //$NON-NLS-1$
            ":" + i + "]", lookup ); //$NON-NLS-1$ //$NON-NLS-2$
      } else if ( element instanceof BasicDBList ) {
        processList( (BasicDBList) element, nonPrimitivePath, name + "[" + i + //$NON-NLS-1$
            ":" + i + "]", lookup ); //$NON-NLS-1$ //$NON-NLS-2$
      } else {
        // some sort of primitive
        String finalPath = primitivePath + "[" + i + "]"; //$NON-NLS-1$ //$NON-NLS-2$
        String finalName = name + "[" + i + "]"; //$NON-NLS-1$ //$NON-NLS-2$
        if ( !lookup.containsKey( finalPath ) ) {
          MongoField newField = new MongoField();
          int kettleType = mongoToKettleType( element );
          // Following suit of mongoToKettleType by interpreting null as String type
          newField.m_mongoType = String.class;
          if ( element != null ) {
            newField.m_mongoType = element.getClass();
          }
          newField.m_fieldName = finalPath;
          newField.m_fieldPath = finalName;
          newField.m_kettleType = ValueMeta.getTypeDesc( kettleType );
          newField.m_percentageOfSample = 1;

          lookup.put( finalPath, newField );
        } else {
          // update max indexes in array parts of name
          MongoField m = lookup.get( finalPath );
          Class<?> elementClass = String.class;
          if ( element != null ) {
            elementClass = element.getClass();
          }
          if ( !m.m_mongoType.isAssignableFrom( elementClass ) ) {
            m.m_disparateTypes = true;
          }
          m.m_percentageOfSample++;
          updateMinMaxArrayIndexes( m, finalName );
        }
      }
    }
  }
 
Example 17
Source File: MongoField.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * Convert a mongo array object to a Kettle field value (for the field defined in this path)
 * 
 * @param mongoList
 *          the array to convert
 * @return the kettle field value
 * @throws KettleException
 *           if a problem occurs
 */
public Object convertToKettleValue( BasicDBList mongoList ) throws KettleException {

  if ( mongoList == null ) {
    return null;
  }

  if ( m_tempParts.size() == 0 ) {
    throw new KettleException( BaseMessages.getString( PKG, "MongoDbInput.ErrorMessage.MalformedPathArray" ) ); //$NON-NLS-1$
  }

  String part = m_tempParts.remove( 0 );
  if ( !( part.charAt( 0 ) == '[' ) ) {
    // we're expecting an array at this point - this document does not
    // contain our field
    return null;
  }

  String index = part.substring( 1, part.indexOf( ']' ) );
  int arrayI = 0;
  try {
    arrayI = Integer.parseInt( index.trim() );
  } catch ( NumberFormatException e ) {
    throw new KettleException( BaseMessages.getString( PKG,
        "MongoDbInput.ErrorMessage.UnableToParseArrayIndex", index ) ); //$NON-NLS-1$
  }

  if ( part.indexOf( ']' ) < part.length() - 1 ) {
    // more dimensions to the array
    part = part.substring( part.indexOf( ']' ) + 1, part.length() );
    m_tempParts.add( 0, part );
  }

  if ( arrayI >= mongoList.size() || arrayI < 0 ) {
    return null;
  }

  Object element = mongoList.get( arrayI );

  if ( element == null ) {
    return null;
  }

  if ( m_tempParts.size() == 0 ) {
    // we're expecting a leaf primitive - let's see if that's what we have
    // here...
    return getKettleValue( element );
  }

  if ( element instanceof BasicDBObject ) {
    return convertToKettleValue( ( (BasicDBObject) element ) );
  }

  if ( element instanceof BasicDBList ) {
    return convertToKettleValue( ( (BasicDBList) element ) );
  }

  // must mean we have a primitive here, but we're expecting to process more
  // path so this doesn't match us - return null
  return null;
}
 
Example 18
Source File: MongoNativeExtractor.java    From deep-spark with Apache License 2.0 4 votes vote down vote up
/**
 * Calculate splits.
 *
 * @param collection the collection
 * @return the deep partition [ ]
 */
private DeepPartition[] calculateSplits(DBCollection collection) {

    BasicDBList splitData = getSplitData(collection);
    List<ServerAddress> serverAddressList = collection.getDB().getMongo().getServerAddressList();

    if (splitData == null) {
        Pair<BasicDBList, List<ServerAddress>> pair = getSplitDataCollectionShardEnviroment(getShards(collection),
                collection.getDB().getName(),
                collection.getName());
        splitData = pair.left;
        serverAddressList = pair.right;
    }

    Object lastKey = null; // Lower boundary of the first min split

    List<String> stringHosts = new ArrayList<>();

    for (ServerAddress serverAddress : serverAddressList) {
        stringHosts.add(serverAddress.toString());
    }
    int i = 0;

    MongoPartition[] partitions = new MongoPartition[splitData.size() + 1];

    for (Object aSplitData : splitData) {

        BasicDBObject currentKey = (BasicDBObject) aSplitData;

        Object currentO = currentKey.get(MONGO_DEFAULT_ID);

        partitions[i] = new MongoPartition(mongoDeepJobConfig.getRddId(), i, new DeepTokenRange(lastKey,
                currentO, stringHosts), MONGO_DEFAULT_ID);

        lastKey = currentO;
        i++;
    }
    QueryBuilder queryBuilder = QueryBuilder.start(MONGO_DEFAULT_ID);
    queryBuilder.greaterThanEquals(lastKey);
    partitions[i] = new MongoPartition(0, i, new DeepTokenRange(lastKey, null, stringHosts), MONGO_DEFAULT_ID);
    return partitions;
}
 
Example 19
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void updateBatch(BasicDBList sample) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchUpdate(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			// Ask the id allocator for the query
			BasicDBObject query = this.idFactory.getQuery(doc.get(Sample.SOURCE_KEY), timestamp);
			
			// Build the update clause using the ops list
			BasicDBObject update = new BasicDBObject();
			for(RollupOperation rollupOp : this.rollupOps){
				
				DBObject updateClause = rollupOp.getUpdateClause(doc);
				
				// Check for top level operators that already exist so they dont overwrite
				for(String key : updateClause.keySet()){
					BasicDBObject existingClause = (BasicDBObject) update.get(key);
					if(existingClause != null){
						// Merge the arguments to the top level op
						existingClause.putAll((DBObject)updateClause.get(key));
					} else {
						update.put(key, updateClause.get(key));
					}
				}
			}
			
			currentOp.find(query).upsert().updateOne(update);
		}		
		
		// Finalize the last batch
		executeBatchUpdate(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		throw ex;
	}
}
 
Example 20
Source File: RawStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void storeBatch(BasicDBList sample, BasicDBList resultList) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			SampleId _id = this.idFactory.createId(doc);
			doc.put(Sample.ID_KEY, _id.toObject());
			resultList.add(_id.toObject());
			long timestamp = doc.getLong(Sample.TS_KEY);
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchWrite(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			currentOp.insert(doc);
		}		
		
		// Finalize the last batch
		executeBatchWrite(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		// TODO : we also need to handle the result Ids here as well,
		// the failed doc Ids must be pulled from the resultList
		throw ex;
	}
}