Java Code Examples for com.mongodb.BasicDBObject#getLong()

The following examples show how to use com.mongodb.BasicDBObject#getLong() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RawStorageInterceptor.java    From hvdf with Apache License 2.0 6 votes vote down vote up
public void pushSample(DBObject sample, boolean isList, BasicDBList resultList) {
	
	if(isList){
		
		// Use the batch API to send a number of samples
		storeBatch((BasicDBList)sample, resultList);			
	}
	else if(sample != null){
		
		// Create an oid to embed the sample time
		BasicDBObject doc = ((BasicDBObject) sample);
		SampleId _id = this.idFactory.createId(sample);
		sample.put(Sample.ID_KEY, _id.toObject());
		resultList.add(_id.toObject());

		// Get the correct slice from the allocator and insert
		long timestamp = doc.getLong(Sample.TS_KEY);
		DBCollection collection = collectionAllocator.getCollection(timestamp);
		collection.insert(doc);
	}
}
 
Example 2
Source File: TransmitterRawData.java    From xDrip with GNU General Public License v3.0 5 votes vote down vote up
public TransmitterRawData(BasicDBObject src) {
	TransmissionId = src.getInt("TransmissionId");
	TransmitterId  = src.getString("TransmitterId");
	RawValue       = src.getInt("RawValue");
	FilteredValue  = src.getInt("FilteredValue");
	BatteryLife    = src.getInt("BatteryLife");
	ReceivedSignalStrength = src.getInt("ReceivedSignalStrength");
	CaptureDateTime = src.getLong("CaptureDateTime");
	UploaderBatteryLife = src.getInt("UploaderBatteryLife");
}
 
Example 3
Source File: LibreWifiData.java    From xDrip with GNU General Public License v3.0 5 votes vote down vote up
public LibreWifiData(BasicDBObject src) {

        BlockBytes = src.getString("BlockBytes");
        CaptureDateTime = src.getLong("CaptureDateTime");
        ChecksumOk = src.getInt("ChecksumOk");
        DebugInfo = src.getString("DebugInfo");
        TomatoBatteryLife = src.getInt("TomatoBatteryLife");
        UploaderBatteryLife = src.getInt("UploaderBatteryLife");
        Uploaded = src.getInt("Uploaded");
        HwVersion = src.getString("HwVersion");
        FwVersion = src.getString("FwVersion");
        SensorId = src.getString("SensorId");
        patchUid = src.getString("patchUid");
        patchInfo = src.getString("patchInfo");
    }
 
Example 4
Source File: TransmitterRawData.java    From xDrip-plus with GNU General Public License v3.0 5 votes vote down vote up
public TransmitterRawData(BasicDBObject src) {
	TransmissionId = src.getInt("TransmissionId");
	TransmitterId  = src.getString("TransmitterId");
	RawValue       = src.getInt("RawValue");
	FilteredValue  = src.getInt("FilteredValue");
	BatteryLife    = src.getInt("BatteryLife");
	ReceivedSignalStrength = src.getInt("ReceivedSignalStrength");
	CaptureDateTime = src.getLong("CaptureDateTime");
	UploaderBatteryLife = src.getInt("UploaderBatteryLife");
}
 
Example 5
Source File: LibreWifiData.java    From xDrip-plus with GNU General Public License v3.0 5 votes vote down vote up
public LibreWifiData(BasicDBObject src) {

        BlockBytes = src.getString("BlockBytes");
        CaptureDateTime = src.getLong("CaptureDateTime");
        ChecksumOk = src.getInt("ChecksumOk");
        DebugInfo = src.getString("DebugInfo");
        TomatoBatteryLife = src.getInt("TomatoBatteryLife");
        UploaderBatteryLife = src.getInt("UploaderBatteryLife");
        Uploaded = src.getInt("Uploaded");
        HwVersion = src.getString("HwVersion");
        FwVersion = src.getString("FwVersion");
        SensorId = src.getString("SensorId");
        patchUid = src.getString("patchUid");
        patchInfo = src.getString("patchInfo");
    }
 
Example 6
Source File: TransmitterRawData.java    From xDrip-Experimental with GNU General Public License v3.0 5 votes vote down vote up
public TransmitterRawData(BasicDBObject src) {
	TransmissionId = src.getInt("TransmissionId");
	TransmitterId  = src.getString("TransmitterId");
	RawValue       = src.getInt("RawValue");
	FilteredValue  = src.getInt("FilteredValue");
	BatteryLife    = src.getInt("BatteryLife");
	ReceivedSignalStrength = src.getInt("ReceivedSignalStrength");
	CaptureDateTime = src.getLong("CaptureDateTime");
	UploaderBatteryLife = src.getInt("UploaderBatteryLife");
}
 
Example 7
Source File: TransmitterRawData.java    From xDrip with GNU General Public License v3.0 5 votes vote down vote up
public TransmitterRawData(BasicDBObject src) {
	TransmissionId = src.getInt("TransmissionId");
	TransmitterId  = src.getString("TransmitterId");
	RawValue       = src.getInt("RawValue");
	FilteredValue  = src.getInt("FilteredValue");
	BatteryLife    = src.getInt("BatteryLife");
	ReceivedSignalStrength = src.getInt("ReceivedSignalStrength");
	CaptureDateTime = src.getLong("CaptureDateTime");
	UploaderBatteryLife = src.getInt("UploaderBatteryLife");
}
 
Example 8
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 5 votes vote down vote up
@Override
public void pushSample(DBObject sample, boolean isList, BasicDBList resultIds) {
	
	if(isList){
		
		// Use the batch API to send a number of samples
		updateBatch((BasicDBList)sample);			
	}
	else if(sample != null){
		
		// This is a document, place it straight in appropriate collection
		BasicDBObject doc = ((BasicDBObject) sample);
		long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
		DBCollection collection = collectionAllocator.getCollection(timestamp);
		
		// Ask the id allocator for the query
		BasicDBObject query = this.idFactory.getQuery(sample.get(Sample.SOURCE_KEY), timestamp);
		
		// Build the update clause using the ops list
		BasicDBObject update = new BasicDBObject();
		for(RollupOperation rollupOp : this.rollupOps){
			
			DBObject updateClause = rollupOp.getUpdateClause(sample);
			
			// Check for top level operators that already exist so they dont overwrite
			for(String key : updateClause.keySet()){
				BasicDBObject existingClause = (BasicDBObject) update.get(key);
				if(existingClause != null){
					// Merge the arguments to the top level op
					existingClause.putAll((DBObject)updateClause.get(key));
				} else {
					update.put(key, updateClause.get(key));
				}
			}
		}
		
		collection.update(query, update, true, false);
	}
}
 
Example 9
Source File: RollupStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void updateBatch(BasicDBList sample) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			long timestamp = this.rollupPeriod * (doc.getLong(Sample.TS_KEY) / this.rollupPeriod);			
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchUpdate(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			// Ask the id allocator for the query
			BasicDBObject query = this.idFactory.getQuery(doc.get(Sample.SOURCE_KEY), timestamp);
			
			// Build the update clause using the ops list
			BasicDBObject update = new BasicDBObject();
			for(RollupOperation rollupOp : this.rollupOps){
				
				DBObject updateClause = rollupOp.getUpdateClause(doc);
				
				// Check for top level operators that already exist so they dont overwrite
				for(String key : updateClause.keySet()){
					BasicDBObject existingClause = (BasicDBObject) update.get(key);
					if(existingClause != null){
						// Merge the arguments to the top level op
						existingClause.putAll((DBObject)updateClause.get(key));
					} else {
						update.put(key, updateClause.get(key));
					}
				}
			}
			
			currentOp.find(query).upsert().updateOne(update);
		}		
		
		// Finalize the last batch
		executeBatchUpdate(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		throw ex;
	}
}
 
Example 10
Source File: RawStorageInterceptor.java    From hvdf with Apache License 2.0 4 votes vote down vote up
private void storeBatch(BasicDBList sample, BasicDBList resultList) {
	
	// The batch may span collection splits, so maintain
	// a current collection and batch operation
	BulkWriteOperation currentOp = null;
	int currentOpOffset = 0;
	int sampleIdx = 0;
	DBCollection currentColl = null;	
	
	logger.debug("Received batch of size : {}", sample.size());
	
	try{
		for(; sampleIdx < sample.size(); ++sampleIdx){
			
			// prepare the sample to batch
			BasicDBObject doc = (BasicDBObject) (sample.get(sampleIdx));
			SampleId _id = this.idFactory.createId(doc);
			doc.put(Sample.ID_KEY, _id.toObject());
			resultList.add(_id.toObject());
			long timestamp = doc.getLong(Sample.TS_KEY);
			DBCollection collection = collectionAllocator.getCollection(timestamp);
			
			// if the collection has changed, commit the current
			// batch to the collection and start new
			if(collection.equals(currentColl) == false){
				executeBatchWrite(currentOp, sample);
				currentColl = collection;
				currentOp = collection.initializeUnorderedBulkOperation();
				currentOpOffset = sampleIdx;
			}
			
			// put the doc insert into the batch
			currentOp.insert(doc);
		}		
		
		// Finalize the last batch
		executeBatchWrite(currentOp, sample);		
		
	} catch(Exception ex){
		
		// One of the bulk writes has failed
		BasicDBList failedDocs = new BasicDBList();
		if(ex instanceof BulkWriteException){
			
			// We need to figure out the failures and remove the writes
			// that worked from the batch
			int batchSize = sampleIdx - currentOpOffset;
			BulkWriteException bwex = (BulkWriteException)ex;
			int errorCount = bwex.getWriteErrors().size(); 
			if(errorCount < batchSize){
				
				for(BulkWriteError we : bwex.getWriteErrors()){
					failedDocs.add(sample.get(currentOpOffset + we.getIndex()));
				}
				
				// since we have accounted for the failures in the current
				// batch, move the offset forward to the last sample
				currentOpOffset = sampleIdx;					
			}
		}
		
		// If this happened part way through the batch, send remaining 
		// docs to failed list and update sample to contain only failed docs
		if(currentOpOffset > 0){
			for(; currentOpOffset < sample.size(); ++currentOpOffset)
				failedDocs.add(sample.get(currentOpOffset));
			sample.clear();
			sample.addAll(failedDocs);	
		}
		
		// TODO : we also need to handle the result Ids here as well,
		// the failed doc Ids must be pulled from the resultList
		throw ex;
	}
}