Java Code Examples for org.influxdb.InfluxDB#query()

The following examples show how to use org.influxdb.InfluxDB#query() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: InfluxDBContainerWithUserTest.java    From testcontainers-java with MIT License 6 votes vote down vote up
@Test
public void queryForWriteAndRead() {
    InfluxDB influxDB = influxDBContainer.getNewInfluxDB();

    Point point = Point.measurement("cpu")
        .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
        .addField("idle", 90L)
        .addField("user", 9L)
        .addField("system", 1L)
        .build();
    influxDB.write(point);

    Query query = new Query("SELECT idle FROM cpu", DATABASE);
    QueryResult actual = influxDB.query(query);

    assertThat(actual, notNullValue());
    assertThat(actual.getError(), nullValue());
    assertThat(actual.getResults(), notNullValue());
    assertThat(actual.getResults().size(), is(1));

}
 
Example 2
Source File: ExecuteInfluxDBQuery.java    From nifi with Apache License 2.0 6 votes vote down vote up
protected List<QueryResult> executeQuery(final ProcessContext context, String database, String query, TimeUnit timeunit,
                                         int chunkSize) throws InterruptedException {
    final CountDownLatch latch = new CountDownLatch(1);
    InfluxDB influx = getInfluxDB(context);
    Query influxQuery = new Query(query, database);

    if (chunkSize > 0) {
        List<QueryResult> results = new LinkedList<>();
        influx.query(influxQuery, chunkSize, result -> {
            if (isQueryDone(result.getError())) {
                latch.countDown();
            } else {
                results.add(result);
            }
        });
        latch.await();

        return results;
    } else {
        return Collections.singletonList(influx.query(influxQuery, timeunit));
    }
}
 
Example 3
Source File: InfluxdbResource.java    From camel-quarkus with Apache License 2.0 5 votes vote down vote up
@Unremovable
@Singleton
@javax.enterprise.inject.Produces
InfluxDB createInfluxDbConnection() {
    InfluxDB influxDbConnection = InfluxDBFactory.connect(connectionUrl);
    influxDbConnection.query(new Query("CREATE DATABASE " + DB_NAME));

    return influxDbConnection;
}
 
Example 4
Source File: InfluxDbSinkTest.java    From hazelcast-jet-contrib with Apache License 2.0 5 votes vote down vote up
@Test
public void test_influxDbSink() {
    IList<Integer> measurements = jet.getList("mem_usage");
    for (int i = 0; i < VALUE_COUNT; i++) {
        measurements.add(i);
    }

    InfluxDB db = influxdbContainer.getNewInfluxDB();
    db.query(new Query("DROP SERIES FROM mem_usage"));

    Pipeline p = Pipeline.create();

    int startTime = 0;
    p.readFrom(Sources.list(measurements))
     .map(index -> Point.measurement("mem_usage")
                        .time(startTime + index, TimeUnit.MILLISECONDS)
                        .addField("value", index)
                        .build())
     .writeTo(InfluxDbSinks.influxDb(influxdbContainer.getUrl(), DATABASE_NAME, USERNAME, PASSWORD));

    jet.newJob(p).join();

    List<Result> results = db.query(new Query("SELECT * FROM mem_usage")).getResults();
    assertEquals(1, results.size());
    List<Series> seriesList = results.get(0).getSeries();
    assertEquals(1, seriesList.size());
    Series series = seriesList.get(0);
    assertEquals(SERIES, series.getName());
    assertEquals(VALUE_COUNT, series.getValues().size());
}
 
Example 5
Source File: InfluxDBConnectionLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
private List<MemoryPoint> getPoints(InfluxDB connection, String query, String databaseName) {

        // Run the query
        Query queryObject = new Query(query, databaseName);
        QueryResult queryResult = connection.query(queryObject);

        // Map it
        InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
        return resultMapper.toPOJO(queryResult, MemoryPoint.class);
    }
 
Example 6
Source File: InfluxdbResource.java    From camel-quarkus with Apache License 2.0 4 votes vote down vote up
void disposeInfluxDbConnection(@Disposes InfluxDB influxDbConnection) {
    influxDbConnection.query(new Query("DROP DATABASE " + DB_NAME, ""));
    influxDbConnection.close();
}
 
Example 7
Source File: CollectionCommonStatHandler.java    From EserKnife with Apache License 2.0 4 votes vote down vote up
@Override
public Boolean execute() {
    try {

        List<NodeCommonStatInfo> nodeCommonStatInfos = new ArrayList<NodeCommonStatInfo>();
        NodeCommonStatInfo nodeCommonStatInfo = new NodeCommonStatInfo();

        ClusterInfo clusterInfo = JestManager.CLUSTER_MAP.get(clusterName);
        nodeCommonStatInfo.setClusterId(clusterInfo.getId());

        JestResult healthInfo = (JestResult) jestService.httpProxy(clusterName,"/_cluster/health","GET",null);
        JSONObject healthObject = JSONObject.parseObject(healthInfo.getJsonString());


        JestResult clusterStatInfo = (JestResult) jestService.httpProxy(clusterName,"/_cluster/stats","GET",null);
        JSONObject clusterObject = JSONObject.parseObject(clusterStatInfo.getJsonString());


        nodeCommonStatInfo.setClusterStatus(healthObject.getString(Constant.STATUS));
        nodeCommonStatInfo.setNodeCount(healthObject.getLong(Constant.NUMBER_OF_NODES));
        nodeCommonStatInfo.setDataNodeCount(healthObject.getLong(Constant.NUMBER_OF_DATA_NODES));

        JSONObject indicesObject = clusterObject.getJSONObject(Constant.INDICES);
        nodeCommonStatInfo.setIndicesCount(indicesObject.getLong(Constant.COUNT));

        nodeCommonStatInfo.setShardCounts(healthObject.getLong(Constant.ACTIVE_SHARDS));

        JSONObject docObject = indicesObject.getJSONObject(Constant.DOCS);
        nodeCommonStatInfo.setDocCounts(docObject.getLong(Constant.COUNT));

        JSONObject storeObject = indicesObject.getJSONObject(Constant.STORE);
        nodeCommonStatInfo.setStoreSize(storeObject.getLong(Constant.SIZE_IN_BYTES));
        nodeCommonStatInfo.setClusterName(clusterName);
        nodeCommonStatInfo.setCreateTime(executeTime);
        nodeCommonStatInfos.add(nodeCommonStatInfo);
        QueryResult queryResult = null;
        NodeCommonStatInfo nodeCommonStatInfosParam = null;
        if(InflusDbUtil.FLAG){
            InfluxDB influxDB = InflusDbUtil.getConnection();
            Query query = new Query("SELECT last(clusterStatus) as clusterStatus,last(createTime) as createTime FROM common where clusterName = '"+clusterName+"'",InflusDbUtil.DATABASE);
            queryResult = influxDB.query(query);
        }else{
            nodeCommonStatInfosParam = commonService.getLastByParams(nodeCommonStatInfo);
        }
        commonService.batchInsert(nodeCommonStatInfos);
        asyncService.submitFuture(new HealthAlarm(JobKey.buildFutureKey(clusterName, Constant.INDICES, executeTime), clusterName,healthInfo.getJsonString(), queryResult,nodeCommonStatInfosParam));
    } catch (Exception e) {
        LOGGER.error(clusterName+"线程统计信息异常(commonStat)", e);
    }
    return true;
}