Java Code Examples for com.google.common.util.concurrent.Futures#inCompletionOrder()

The following examples show how to use com.google.common.util.concurrent.Futures#inCompletionOrder() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CassandraRepo.java    From monasca-persister with Apache License 2.0 5 votes vote down vote up
public int handleFlush(String id) throws RepoException {
  long startTime = System.nanoTime();

  int flushedCount = 0;
  List<ResultSetFuture> results = new ArrayList<>(queue.size());
  Statement query;
  while ((query = queue.poll()) != null) {
    flushedCount++;
    results.add(session.executeAsync(query));
  }

  List<ListenableFuture<ResultSet>> futures = Futures.inCompletionOrder(results);

  boolean cancel = false;
  Exception ex = null;
  for (ListenableFuture<ResultSet> future : futures) {
    if (cancel) {
      future.cancel(false);
      continue;
    }
    try {
      future.get();
    } catch (InterruptedException | ExecutionException e) {
      cancel = true;
      ex = e;
    }
  }

  commitTimer.update(System.nanoTime() - startTime, TimeUnit.NANOSECONDS);

  if (ex != null) {
    throw new RepoException(ex);
  }
  return flushedCount;
}
 
Example 2
Source File: CassandraMetricRepo.java    From monasca-persister with Apache License 2.0 4 votes vote down vote up
@Override
public int flush(String id) throws RepoException {
  long startTime = System.nanoTime();
  List<ResultSetFuture> results = new ArrayList<>();
  List<Deque<BatchStatement>> list = batches.getAllBatches();
  for (Deque<BatchStatement> q : list) {
    BatchStatement b;
    while ((b = q.poll()) != null) {
      results.add(session.executeAsync(b));
    }
  }

  List<ListenableFuture<ResultSet>> futures = Futures.inCompletionOrder(results);

  boolean cancel = false;
  Exception ex = null;
  for (ListenableFuture<ResultSet> future : futures) {
    if (cancel) {
      future.cancel(false);
      continue;
    }
    try {
      future.get();
    } catch (InterruptedException | ExecutionException e) {
      cancel = true;
      ex = e;
    }
  }

  this.commitTimer.update(System.nanoTime() - startTime, TimeUnit.NANOSECONDS);

  if (ex != null) {
    metricFailed.inc(metricCount);
    throw new RepoException(ex);
  }

  batches.clear();
  int flushCnt = metricCount;
  metricCount = 0;
  metricCompleted.inc(flushCnt);
  return flushCnt;
}
 
Example 3
Source File: MetricService.java    From disthene-reader with MIT License 4 votes vote down vote up
public String getMetricsAsJson(String tenant, List<String> wildcards, long from, long to) throws ExecutionException, InterruptedException, TooMuchDataExpectedException {
    List<String> paths = indexService.getPaths(tenant, wildcards);
    Collections.sort(paths);

    // Calculate rollup etc
    Long now = System.currentTimeMillis() * 1000;
    Long effectiveTo = Math.min(to, now);
    Rollup bestRollup = getRollup(from);
    Long effectiveFrom = (from % bestRollup.getRollup()) == 0 ? from : from + bestRollup.getRollup() - (from % bestRollup.getRollup());
    effectiveTo = effectiveTo - (effectiveTo % bestRollup.getRollup());
    logger.debug("Effective from: " + effectiveFrom);
    logger.debug("Effective to: " + effectiveTo);

    // now build the weird data structures ("in the meanwhile")
    final Map<Long, Integer> timestampIndices = new HashMap<>();
    Long timestamp = effectiveFrom;
    int index = 0;
    while (timestamp <= effectiveTo) {
        timestampIndices.put(timestamp, index++);
        timestamp += bestRollup.getRollup();
    }

    final int length = timestampIndices.size();
    logger.debug("Expected number of data points in series is " + length);
    logger.debug("Expected number of series is " + paths.size());


    // Now let's query C*
    List<ListenableFuture<SinglePathResult>> futures = Lists.newArrayListWithExpectedSize(paths.size());
    for (final String path : paths) {
        Function<ResultSet, SinglePathResult> serializeFunction =
                new Function<ResultSet, SinglePathResult>() {
                    public SinglePathResult apply(ResultSet resultSet) {
                        SinglePathResult result = new SinglePathResult(path);
                        result.makeJson(resultSet, length, timestampIndices);
                        return result;
                    }
                };


        futures.add(
                Futures.transform(
                        cassandraService.executeAsync(tenant, path, bestRollup.getPeriod(), bestRollup.getRollup(), effectiveFrom, effectiveTo),
                        serializeFunction,
                        executorService
                )
        );
    }

    futures = Futures.inCompletionOrder(futures);

    // Build response content JSON
    List<String> singlePathJsons = new ArrayList<>();

    for (ListenableFuture<SinglePathResult> future : futures) {
        SinglePathResult singlePathResult = future.get();
        if (!singlePathResult.isAllNulls()) {
            singlePathJsons.add("\"" + singlePathResult.getPath() + "\":" + singlePathResult.getJson());
        }
    }


    return "{\"from\":" + effectiveFrom + ",\"to\":" + effectiveTo + ",\"step\":" + bestRollup.getRollup() +
            ",\"series\":{" + Joiner.on(",").skipNulls().join(singlePathJsons) + "}}";

}