org.influxdb.dto.Point Java Examples

The following examples show how to use org.influxdb.dto.Point. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: InfluxDbSourceTest.java    From hazelcast-jet-contrib with Apache License 2.0 7 votes vote down vote up
private void fillCpuData(InfluxDB influxDB) {
    long lastTime = 0;
    for (int value = 0; value < VALUE_COUNT; value++) {
        // loop until the value of currentTimeMillis changes
        // workaround for https://github.com/influxdata/influxdb-java/issues/586 which prevents us from
        // assigning the time explicitly. If two items have the same time, the last one wins.
        while (System.currentTimeMillis() == lastTime) {
            sleepMillis(1);
        }
        Cpu cpu = new Cpu("localhost", (double) value);
        influxDB.write(DATABASE_NAME,
                "autogen",
                Point.measurementByPOJO(cpu.getClass())
                        .addFieldsFromPOJO(cpu)
                        .build()
        );
    }
}
 
Example #2
Source File: BatchProcessorTest.java    From influxdb-java with MIT License 6 votes vote down vote up
@Test
public void testBatchWriteWithDifferenctRp() throws InterruptedException, IOException {
    InfluxDB mockInfluxDB = mock(InfluxDBImpl.class);
    BatchProcessor batchProcessor = BatchProcessor.builder(mockInfluxDB).actions(Integer.MAX_VALUE)
        .interval(1, TimeUnit.NANOSECONDS).build();

    Point point = Point.measurement("cpu").field("6", "").build();
    BatchProcessor.HttpBatchEntry batchEntry1 = new BatchProcessor.HttpBatchEntry(point, "db1", "rp_1");
    BatchProcessor.HttpBatchEntry batchEntry2 = new BatchProcessor.HttpBatchEntry(point, "db1", "rp_2");

    batchProcessor.put(batchEntry1);
    batchProcessor.put(batchEntry2);

    Thread.sleep(200); // wait for scheduler
    // same dbname with different rp should write two batchs instead of only one.
    verify(mockInfluxDB, times(2)).write(any(BatchPoints.class));
}
 
Example #3
Source File: TicketTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
 * Test for ticket #303
 *
 */
@Test
public void testTicket303() {
	String dbName = "ticket303_" + System.currentTimeMillis();
	this.influxDB.query(new Query("CREATE DATABASE " + dbName));


               Date rundate1 = new Date() ;
               long rundate1Sec = rundate1.getTime() / 1000;



         Point point1 = Point
                           .measurement("TestSlash")
                           .time(rundate1Sec, TimeUnit.SECONDS)
                           .tag("precision", "Second")
                           .addField("MultipleSlash" ,  "echo \\\".ll 12.0i\\\";")
                           .build();
	this.influxDB.write(dbName, TestUtils.defaultRetentionPolicy(this.influxDB.version()), point1);
	this.influxDB.query(new Query("DROP DATABASE " + dbName));
}
 
Example #4
Source File: PerformanceTests.java    From influxdb-java with MIT License 6 votes vote down vote up
@Test
public void testWriteSinglePointPerformance() {
	String dbName = "write_" + System.currentTimeMillis();
	this.influxDB.query(new Query("CREATE DATABASE " + dbName));
	this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS);
	String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version());
	long start = System.currentTimeMillis();
	for (int j = 0; j < SINGLE_POINT_COUNT; j++) {
		Point point = Point.measurement("cpu")
				.addField("idle", (double) j)
				.addField("user", 2.0 * j)
				.addField("system", 3.0 * j).build();
		this.influxDB.write(dbName, rp, point);
	}
	this.influxDB.disableBatch();
	System.out.println("Single Point Write for " + SINGLE_POINT_COUNT + " writes of Points took:" + (System.currentTimeMillis() - start));
	this.influxDB.query(new Query("DROP DATABASE " + dbName));
}
 
Example #5
Source File: BatchProcessorTest.java    From influxdb-java with MIT License 6 votes vote down vote up
@Test
public void testSchedulerExceptionHandling() throws InterruptedException, IOException {
    InfluxDB mockInfluxDB = mock(InfluxDBImpl.class);
    BatchProcessor batchProcessor = BatchProcessor.builder(mockInfluxDB).actions(Integer.MAX_VALUE)
        .interval(1, TimeUnit.NANOSECONDS).build();

    doThrow(new RuntimeException()).when(mockInfluxDB).write(any(BatchPoints.class));

    Point point = Point.measurement("cpu").field("6", "").build();
    BatchProcessor.HttpBatchEntry batchEntry1 = new BatchProcessor.HttpBatchEntry(point, "db1", "");
    BatchProcessor.HttpBatchEntry batchEntry2 = new BatchProcessor.HttpBatchEntry(point, "db2", "");

    batchProcessor.put(batchEntry1);
    Thread.sleep(200); // wait for scheduler

    // first try throws an exception
    verify(mockInfluxDB, times(1)).write(any(BatchPoints.class));

    batchProcessor.put(batchEntry2);
    Thread.sleep(200); // wait for scheduler
    // without try catch the 2nd time does not occur
    verify(mockInfluxDB, times(2)).write(any(BatchPoints.class));
}
 
Example #6
Source File: MetricMapper.java    From flink with Apache License 2.0 6 votes vote down vote up
static Point map(MeasurementInfo info, Instant timestamp, Histogram histogram) {
	HistogramStatistics statistics = histogram.getStatistics();
	return builder(info, timestamp)
		.addField("count", statistics.size())
		.addField("min", statistics.getMin())
		.addField("max", statistics.getMax())
		.addField("mean", statistics.getMean())
		.addField("stddev", statistics.getStdDev())
		.addField("p50", statistics.getQuantile(.50))
		.addField("p75", statistics.getQuantile(.75))
		.addField("p95", statistics.getQuantile(.95))
		.addField("p98", statistics.getQuantile(.98))
		.addField("p99", statistics.getQuantile(.99))
		.addField("p999", statistics.getQuantile(.999))
		.build();
}
 
Example #7
Source File: UDPInfluxDBTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
 * When batch of points' size is over UDP limit, the expected exception is
 * java.lang.RuntimeException: java.net.SocketException: The message is
 * larger than the maximum supported by the underlying transport: Datagram
 * send failed
 *
 * @throws Exception
 */
@Test
public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception {
    //prepare data
    List<String> lineProtocols = new ArrayList<String>();
    int i = 0;
    int length = 0;
    while (true) {
        Point point = Point.measurement("udp_single_poit").addField("v", i).build();
        String lineProtocol = point.lineProtocol();
        length += (lineProtocol.getBytes("utf-8")).length;
        lineProtocols.add(lineProtocol);
        if (length > 65535) {
            break;
        }
    }
    //write batch of string which size is over 64K
    Assertions.assertThrows(RuntimeException.class, () -> {
        this.influxDB.write(UDP_PORT, lineProtocols);
    });
}
 
Example #8
Source File: InfluxDbSinkTest.java    From hazelcast-jet-contrib with Apache License 2.0 6 votes vote down vote up
@Test
public void test_influxDbSink_nonExistingDb() {
    IList<Integer> measurements = jet.getList("mem_usage");
    IntStream.range(0, VALUE_COUNT).forEach(measurements::add);
    influxdbContainer.getNewInfluxDB();

    Pipeline p = Pipeline.create();
    int startTime = 0;
    p.readFrom(Sources.list(measurements))
     .map(index -> Point.measurement("mem_usage")
                        .time(startTime + index, TimeUnit.MILLISECONDS)
                        .addField("value", index)
                        .build())
     .writeTo(InfluxDbSinks.influxDb(influxdbContainer.getUrl(), "non-existing", USERNAME, PASSWORD));

    expected.expectMessage("database not found: \"non-existing\"");
    jet.newJob(p).join();
}
 
Example #9
Source File: InfluxDBSink.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
@Override
public void invoke(MetricEvent metricEvent, Context context) throws Exception {
    if (StringUtils.isNullOrWhitespaceOnly(metricEvent.getName())) {
        throw new RuntimeException("No measurement defined");
    }

    Point.Builder builder = Point.measurement(metricEvent.getName())
            .time(metricEvent.getTimestamp(), TimeUnit.MILLISECONDS);

    if (!CollectionUtil.isNullOrEmpty(metricEvent.getFields())) {
        builder.fields(metricEvent.getFields());
    }

    if (!CollectionUtil.isNullOrEmpty(metricEvent.getTags())) {
        builder.tag(metricEvent.getTags());
    }

    Point point = builder.build();
    influxDBClient.write(point);
}
 
Example #10
Source File: InfluxdbDao.java    From dapeng-soa with Apache License 2.0 6 votes vote down vote up
public void writePoint(DataPoint dataPoint) {
    if (null == influxDB) {
        influxDB = getInfluxDBConnection();
    }
    long now = System.currentTimeMillis();
    Point.Builder commit = Point.measurement(dataPoint.bizTag);
    dataPoint.values.forEach(commit::addField);
    dataPoint.tags.forEach(commit::tag);
    commit.time(dataPoint.getTimestamp() == 0 ? now : dataPoint.getTimestamp(), TimeUnit.MILLISECONDS);
    try {
        influxDB.write(dataPoint.database, "", commit.build());
    } finally {
        if (influxDB != null) {
            influxDB.close();
        }
    }
}
 
Example #11
Source File: GenericRecordConverter.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Override
public List<Point> getPoints(Record record) throws OnRecordErrorException {
  List<Point> points = new ArrayList<>();

  verifyRequireFieldsPresent(record);

  final String measurementName = record.get(conf.measurementField).getValueAsString();

  for (String fieldPath : conf.valueFields) {
    if (!record.has(fieldPath)) {
      continue;
    }
    Point.Builder point = Point
        .measurement(measurementName)
        .tag(RecordConverterUtil.getTags(conf.tagFields, record))
        .field(CollectdRecordConverter.stripPathPrefix(fieldPath), record.get(fieldPath).getValue());

    if (!conf.timeField.isEmpty()) {
      point.time(getTime(record), conf.timeUnit);
    }

    points.add(point.build());
  }
  return points;
}
 
Example #12
Source File: TicketTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
 * Test for ticket #39
 *
 */
@Test
public void testTicket39() {
	String dbName = "ticket39_" + System.currentTimeMillis();
	this.influxDB.query(new Query("CREATE DATABASE " + dbName));
	BatchPoints batchPoints = BatchPoints
			.database(dbName)
			.tag("async", "true")
			.retentionPolicy(TestUtils.defaultRetentionPolicy(this.influxDB.version()))
			.consistency(InfluxDB.ConsistencyLevel.ALL)
			.build();
	Point.Builder builder = Point.measurement("my_type");
	builder.addField("my_field", "string_value");
	Point point = builder.build();
	batchPoints.point(point);
	this.influxDB.write(batchPoints);
	this.influxDB.query(new Query("DROP DATABASE " + dbName));
}
 
Example #13
Source File: InfluxLogger.java    From Okra with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    String dbName = "centos_test_db";

    InfluxDB influxDB = InfluxDBFactory.connect("http://192.168.0.71:18086", "influxdbUser", "influxdbPsw");

    // Flush every 2000 Points, at least every 100ms
    influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS);

    for (int i = 0; i < 50; i++) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        Point point2 = Point.measurement("disk")
                .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                .addField("used", Math.random() * 80L)
                .addField("free", Math.random() * 30L)
                .build();
        influxDB.write(dbName, "autogen", point2);
    }

    System.out.println();
}
 
Example #14
Source File: BatchOptionsTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
 * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }.
 * @throws InterruptedException
 */
@Test
public void testHandlerOnRetryImpossible() throws InterruptedException {

  String dbName = "write_unittest_" + System.currentTimeMillis();

  try {
    BiConsumer<Iterable<Point>, Throwable> mockHandler = mock(BiConsumer.class);
    BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler).flushDuration(100);

    influxDB.setDatabase(dbName);
    influxDB.enableBatch(options);

    writeSomePoints(influxDB, 1);

    Thread.sleep(200);
    verify(mockHandler, times(1)).accept(any(), any());

    QueryResult result = influxDB.query(new Query("select * from weather", dbName));
    Assertions.assertNull(result.getResults().get(0).getSeries());
  } finally {
    influxDB.disableBatch();
  }

}
 
Example #15
Source File: TicketTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
 * Test for ticket #38
 *
 */
@Test
public void testTicket38() {
	String dbName = "ticket38_" + System.currentTimeMillis();
	this.influxDB.query(new Query("CREATE DATABASE " + dbName));
	Point point1 = Point
			.measurement("metric")
			.time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
			.addField("value", 5.0)
			.tag("host", "host A")
			.tag("host", "host-B")
			.tag("host", "host-\"C")
			.tag("region", "region")
			.build();
	this.influxDB.write(dbName, TestUtils.defaultRetentionPolicy(this.influxDB.version()), point1);
	this.influxDB.query(new Query("DROP DATABASE " + dbName));
}
 
Example #16
Source File: ManagementDAO.java    From skywalking with Apache License 2.0 6 votes vote down vote up
@Override
public void insert(final Model model, final ManagementData managementData) throws IOException {
    final WhereQueryImpl<SelectQueryImpl> query = select()
        .column(ID_COLUMN).column(NAME)
        .from(client.getDatabase(), UITemplate.INDEX_NAME)
        .where(eq(ID_COLUMN, managementData.id()));
    QueryResult.Series series = client.queryForSingleSeries(query);
    if (log.isDebugEnabled()) {
        log.debug("SQL: {} result: {}", query.getCommand(), series);
    }
    if (series != null && series.getValues().size() > 0) {
        return;
    }

    Point point = Point.measurement(UITemplate.INDEX_NAME)
                       .tag(InfluxConstants.TagName.ID_COLUMN, managementData.id())
                       .time(1L, TimeUnit.NANOSECONDS)
                       .fields(storageBuilder.data2Map(managementData)).build();
    client.write(point);
}
 
Example #17
Source File: InfluxDBImpl.java    From influxdb-java with MIT License 6 votes vote down vote up
private InfluxDB enableBatch(final int actions, final int flushDuration, final int jitterDuration,
                             final TimeUnit durationTimeUnit, final ThreadFactory threadFactory,
                             final BiConsumer<Iterable<Point>, Throwable> exceptionHandler) {
  if (this.batchEnabled.get()) {
    throw new IllegalStateException("BatchProcessing is already enabled.");
  }
  this.batchProcessor = BatchProcessor
          .builder(this)
          .actions(actions)
          .exceptionHandler(exceptionHandler)
          .interval(flushDuration, jitterDuration, durationTimeUnit)
          .threadFactory(threadFactory)
          .consistencyLevel(consistency)
          .build();
  this.batchEnabled.set(true);
  return this;
}
 
Example #18
Source File: InfluxDBConverter.java    From RuuviCollector with MIT License 6 votes vote down vote up
public static BatchPoints toLegacyInflux(EnhancedRuuviMeasurement measurement) {
    List<Point> points = new ArrayList<>();
    createAndAddLegacyFormatPointIfNotNull(points, "temperature", measurement.getTemperature(), null, null);
    createAndAddLegacyFormatPointIfNotNull(points, "humidity", measurement.getHumidity(), null, null);
    createAndAddLegacyFormatPointIfNotNull(points, "pressure", measurement.getPressure(), null, null);
    createAndAddLegacyFormatPointIfNotNull(points, "acceleration", measurement.getAccelerationX(), "axis", "x");
    createAndAddLegacyFormatPointIfNotNull(points, "acceleration", measurement.getAccelerationY(), "axis", "y");
    createAndAddLegacyFormatPointIfNotNull(points, "acceleration", measurement.getAccelerationZ(), "axis", "z");
    createAndAddLegacyFormatPointIfNotNull(points, "acceleration", measurement.getAccelerationTotal(), "axis", "total");
    createAndAddLegacyFormatPointIfNotNull(points, "batteryVoltage", measurement.getBatteryVoltage(), null, null);
    createAndAddLegacyFormatPointIfNotNull(points, "rssi", measurement.getRssi(), null, null);
    return BatchPoints
        .database(Config.getInfluxDatabase())
        .tag("protocolVersion", String.valueOf(measurement.getDataFormat()))
        .tag("mac", measurement.getMac())
        .points(points.toArray(new Point[points.size()]))
        .build();
}
 
Example #19
Source File: InfluxDBUploader.java    From xDrip-plus with GNU General Public License v3.0 6 votes vote down vote up
private Point createGlucosePoint(BgReading record) {
    // TODO DisplayGlucose option
    final BigDecimal delta = new BigDecimal(record.calculated_value_slope * 5 * 60 * 1000)
            .setScale(3, BigDecimal.ROUND_HALF_UP);

    return Point.measurement("glucose")
            .time(record.getEpochTimestamp(), TimeUnit.MILLISECONDS)
            .addField("value_mmol", record.calculated_value_mmol())
            .addField("value_mgdl", record.getMgdlValue())
            .addField("direction", record.slopeName())
            .addField("filtered", record.ageAdjustedFiltered() * 1000)
            .addField("unfiltered", record.usedRaw() * 1000)
            .addField("rssi", 100)
            .addField("noise", record.noiseValue())
            .addField("delta", delta)
            .build();
}
 
Example #20
Source File: InfluxDBUploader.java    From xDrip with GNU General Public License v3.0 6 votes vote down vote up
private Point createCalibrationPoint(Calibration record) {
    Point.Builder builder = Point.measurement("calibration")
            .time(record.timestamp, TimeUnit.MILLISECONDS)
            .tag("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"))
            .tag("type", "cal");

    if (record.check_in) {
        builder.addField("slope", record.first_slope)
                .addField("intercept", record.first_intercept)
                .addField("scale", record.first_scale);
    } else {
        builder.addField("slope", (1000 / record.slope))
                .addField("intercept", ((record.intercept * -1000) / record.slope))
                .addField("scale", 1);
    }

    return builder.build();
}
 
Example #21
Source File: CollectdRecordConverter.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Override
public List<Point> getPoints(Record record) throws OnRecordErrorException {
  List<Point> points = new ArrayList<>();

  List<String> fieldNames = getValueFields(record);
  for (String fieldName : fieldNames) {
    points.add(Point
        .measurement(joiner.join(getMeasurementBaseName(record), fieldName))
        .tag(getTags(conf.tagFields, record))
        .time(getTime(record), getTimePrecision(record))
        .field("value", record.get(FIELD_PATH_PREFIX + fieldName).getValue())
        .build()
    );
  }
  return points;
}
 
Example #22
Source File: InfluxDBConverterTest.java    From RuuviCollector with MIT License 6 votes vote down vote up
@Test
void toInfluxShouldWorkCorrectlyWithPerTagSettings() {
    final Properties props = new Properties();
    props.put("storage.values", "whitelist");
    props.put("storage.values.list", "pressure");
    props.put("tag.BBBBBBBBBBBB.storage.values", "blacklist");
    props.put("tag.BBBBBBBBBBBB.storage.values.list", "accelerationX,accelerationY,accelerationZ");
    props.put("tag.CCCCCCCCCCCC.storage.values", "whitelist");
    props.put("tag.CCCCCCCCCCCC.storage.values.list", "temperature,humidity");
    Config.readConfigFromProperties(props);

    final EnhancedRuuviMeasurement measurement = createMeasurement();
    final Point point = InfluxDBConverter.toInflux(measurement);
    assertPointContainsOnly(point, "pressure");

    final EnhancedRuuviMeasurement measurement2 = createMeasurement();
    measurement2.setMac("BBBBBBBBBBBB");
    final Point point2 = InfluxDBConverter.toInflux(measurement2);
    assertPointContainsAllValuesBut(point2, "accelerationX", "accelerationY", "accelerationZ");

    final EnhancedRuuviMeasurement measurement3 = createMeasurement();
    measurement3.setMac("CCCCCCCCCCCC");
    final Point point3 = InfluxDBConverter.toInflux(measurement3);
    assertPointContainsOnly(point3, "temperature", "humidity");
}
 
Example #23
Source File: StatsCollectorTest.java    From cloudstack with Apache License 2.0 6 votes vote down vote up
@Test
public void writeBatchesTest() {
    InfluxDB influxDbConnection = Mockito.mock(InfluxDB.class);
    Mockito.doNothing().when(influxDbConnection).write(Mockito.any(Point.class));
    Builder builder = Mockito.mock(Builder.class);
    BatchPoints batchPoints = Mockito.mock(BatchPoints.class);
    PowerMockito.mockStatic(BatchPoints.class);
    PowerMockito.when(BatchPoints.database(DEFAULT_DATABASE_NAME)).thenReturn(builder);
    Mockito.when(builder.build()).thenReturn(batchPoints);
    Map<String, String> tagsToAdd = new HashMap<>();
    tagsToAdd.put("hostId", "1");
    Map<String, Object> fieldsToAdd = new HashMap<>();
    fieldsToAdd.put("total_memory_kbs", 10000000);
    Point point = Point.measurement("measure").tag(tagsToAdd).time(System.currentTimeMillis(), TimeUnit.MILLISECONDS).fields(fieldsToAdd).build();
    List<Point> points = new ArrayList<>();
    points.add(point);
    Mockito.when(batchPoints.point(point)).thenReturn(batchPoints);

    statsCollector.writeBatches(influxDbConnection, DEFAULT_DATABASE_NAME, points);

    Mockito.verify(influxDbConnection).write(batchPoints);
}
 
Example #24
Source File: InfluxDBMapper.java    From influxdb-java with MIT License 6 votes vote down vote up
private void setField(
    final Point.Builder pointBuilder,
    final Class<?> fieldType,
    final String columnName,
    final Object value) {
  if (boolean.class.isAssignableFrom(fieldType) || Boolean.class.isAssignableFrom(fieldType)) {
    pointBuilder.addField(columnName, (boolean) value);
  } else if (long.class.isAssignableFrom(fieldType) || Long.class.isAssignableFrom(fieldType)) {
    pointBuilder.addField(columnName, (long) value);
  } else if (double.class.isAssignableFrom(fieldType)
      || Double.class.isAssignableFrom(fieldType)) {
    pointBuilder.addField(columnName, (double) value);
  } else if (int.class.isAssignableFrom(fieldType) || Integer.class.isAssignableFrom(fieldType)) {
    pointBuilder.addField(columnName, (int) value);
  } else if (String.class.isAssignableFrom(fieldType)) {
    pointBuilder.addField(columnName, (String) value);
  } else {
    throw new InfluxDBMapperException(
        "Unsupported type " + fieldType + " for column " + columnName);
  }
}
 
Example #25
Source File: MetricMapper.java    From flink with Apache License 2.0 6 votes vote down vote up
static Point map(MeasurementInfo info, Instant timestamp, Histogram histogram) {
	HistogramStatistics statistics = histogram.getStatistics();
	return builder(info, timestamp)
		.addField("count", statistics.size())
		.addField("min", statistics.getMin())
		.addField("max", statistics.getMax())
		.addField("mean", statistics.getMean())
		.addField("stddev", statistics.getStdDev())
		.addField("p50", statistics.getQuantile(.50))
		.addField("p75", statistics.getQuantile(.75))
		.addField("p95", statistics.getQuantile(.95))
		.addField("p98", statistics.getQuantile(.98))
		.addField("p99", statistics.getQuantile(.99))
		.addField("p999", statistics.getQuantile(.999))
		.build();
}
 
Example #26
Source File: InfluxInsertRequest.java    From skywalking with Apache License 2.0 6 votes vote down vote up
public InfluxInsertRequest(Model model, StorageData storageData, StorageBuilder storageBuilder) {
    Map<String, Object> objectMap = storageBuilder.data2Map(storageData);

    for (ModelColumn column : model.getColumns()) {
        Object value = objectMap.get(column.getColumnName().getName());

        if (value instanceof StorageDataComplexObject) {
            fields.put(
                column.getColumnName().getStorageName(),
                ((StorageDataComplexObject) value).toStorageData()
            );
        } else {
            fields.put(column.getColumnName().getStorageName(), value);
        }
    }
    builder = Point.measurement(model.getName())
                   .addField(InfluxConstants.ID_COLUMN, storageData.id())
                   .fields(fields);
}
 
Example #27
Source File: InfluxDBTest.java    From influxdb-java with MIT License 6 votes vote down vote up
/**
   * When batch of points' size is over UDP limit, the expected exception
   * is java.lang.RuntimeException: java.net.SocketException:
   * The message is larger than the maximum supported by the underlying transport: Datagram send failed
   * @throws Exception
   */
  @Test
  public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception {
      //prepare data
      List<String> lineProtocols = new ArrayList<String>();
      int i = 0;
      int length = 0;
      while ( true ) {
          Point point = Point.measurement("udp_single_poit").addField("v", i).build();
          String lineProtocol = point.lineProtocol();
          length += (lineProtocol.getBytes("utf-8")).length;
          lineProtocols.add(lineProtocol);
          if( length > 65535 ){
              break;
          }
      }
      //write batch of string which size is over 64K
      Assertions.assertThrows(RuntimeException.class, () -> {
	this.influxDB.write(UDP_PORT, lineProtocols);
});
  }
 
Example #28
Source File: InfluxdbGateway.java    From redis-rdb-cli with Apache License 2.0 6 votes vote down vote up
@Override
public boolean save(List<MonitorPoint> points) {
    //
    if (points.isEmpty()) {
        return false;
    }

    //
    try {
        for (Point p : toPoints(points)) influxdb.write(p);
        return true;
    } catch (Throwable t) {
        logger.error("failed to save points.", t);
        return false;
    }
}
 
Example #29
Source File: InfluxDBConverterTest.java    From RuuviCollector with MIT License 5 votes vote down vote up
private static void assertPointContainsOnly(final Point point, final String... values) {
    final Collection<String> shouldContain = new ArrayList<>(Arrays.asList(values));
    shouldContain.add("mac");
    shouldContain.add("dataFormat");
    shouldContain.add("time");
    final Collection<String> shouldNotContain = new ArrayList<>(allValues());
    shouldNotContain.removeIf(shouldContain::contains);
    assertPoint(point, shouldContain, shouldNotContain);
}
 
Example #30
Source File: InfluxDBConverterTest.java    From RuuviCollector with MIT License 5 votes vote down vote up
@Test
void toInfluxFalseShouldGiveOnlyRawValues() {
    final EnhancedRuuviMeasurement measurement = createMeasurement();
    final Point point = InfluxDBConverter.toInflux(measurement, false);
    assertPointContainsAllValuesBut(point,
        "accelerationTotal",
        "absoluteHumidity",
        "dewPoint",
        "equilibriumVaporPressure",
        "airDensity",
        "accelerationAngleFromX",
        "accelerationAngleFromY",
        "accelerationAngleFromZ");
}