org.HdrHistogram.DoubleHistogram Java Examples

The following examples show how to use org.HdrHistogram.DoubleHistogram. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HistogramLogProcessor.java    From hazelcast-simulator with Apache License 2.0 6 votes vote down vote up
protected Object[] buildDoubleHistogramStatistics(DoubleHistogram doubleIntervalHistogram, DoubleHistogram accumulatedDoubleHistogram) {
    return new Object[]{((doubleIntervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()),
            // values recorded during the last reporting interval
            doubleIntervalHistogram.getTotalCount(),
            doubleIntervalHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio,
            doubleIntervalHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio,
            doubleIntervalHistogram.getMaxValue() / config.outputValueUnitRatio,
            // values recorded from the beginning until now
            accumulatedDoubleHistogram.getTotalCount(),
            accumulatedDoubleHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio,
            accumulatedDoubleHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio,
            accumulatedDoubleHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio,
            accumulatedDoubleHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio,
            accumulatedDoubleHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio,
            accumulatedDoubleHistogram.getMaxValue() / config.outputValueUnitRatio};
}
 
Example #2
Source File: FeatureNumericHistogramStatistics.java    From geowave with Apache License 2.0 6 votes vote down vote up
@Override
public void fromBinary(final byte[] bytes) {
  final ByteBuffer buffer = super.binaryBuffer(bytes);
  final int endPosition = buffer.getInt();
  try {
    positiveHistogram =
        DoubleHistogram.decodeFromCompressedByteBuffer(buffer, LocalInternalHistogram.class, 0);
    buffer.position(endPosition);
    positiveHistogram.setAutoResize(true);
    if (buffer.get() == (byte) 0x01) {
      negativeHistogram =
          DoubleHistogram.decodeFromCompressedByteBuffer(buffer, LocalInternalHistogram.class, 0);
      negativeHistogram.setAutoResize(true);
    }
  } catch (final DataFormatException e) {
    throw new RuntimeException("Cannot decode statistic", e);
  }
}
 
Example #3
Source File: SnapshotConverter.java    From vespa with Apache License 2.0 6 votes vote down vote up
void outputHistograms(PrintStream output) {
    boolean gotHistogram = false;
    for (Map.Entry<Identifier, UntypedMetric> entry : snapshot.entrySet()) {
        if (entry.getValue().getHistogram() == null) {
            continue;
        }
        gotHistogram = true;
        DoubleHistogram histogram = entry.getValue().getHistogram();
        Identifier id = entry.getKey();
        String metricIdentifier = getIdentifierString(id);
        output.println("# start of metric " + metricIdentifier);
        histogram.outputPercentileDistribution(output, 4, 1.0d, true);
        output.println("# end of metric " + metricIdentifier);
    }
    if (!gotHistogram) {
        output.println("# No histograms currently available.");
    }
}
 
Example #4
Source File: AbstractInternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
@Override
protected void doReadFrom(StreamInput in) throws IOException {
    valueFormatter = ValueFormatterStreams.readOptional(in);
    keys = new double[in.readInt()];
    for (int i = 0; i < keys.length; ++i) {
        keys[i] = in.readDouble();
    }
    long minBarForHighestToLowestValueRatio = in.readLong();
    final int serializedLen = in.readVInt();
    byte[] bytes = new byte[serializedLen];
    in.readBytes(bytes, 0, serializedLen);
    ByteBuffer stateBuffer = ByteBuffer.wrap(bytes);
    try {
        state = DoubleHistogram.decodeFromCompressedByteBuffer(stateBuffer, minBarForHighestToLowestValueRatio);
    } catch (DataFormatException e) {
        throw new IOException("Failed to decode DoubleHistogram for aggregation [" + name + "]", e);
    }
    keyed = in.readBoolean();
}
 
Example #5
Source File: AbstractInternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
public AbstractInternalHDRPercentiles(String name, double[] keys, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
        List<PipelineAggregator> pipelineAggregators,
        Map<String, Object> metaData) {
    super(name, pipelineAggregators, metaData);
    this.keys = keys;
    this.state = state;
    this.keyed = keyed;
    this.valueFormatter = formatter;
}
 
Example #6
Source File: AbstractHDRPercentilesAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
protected DoubleHistogram getState(long bucketOrd) {
    if (bucketOrd >= states.size()) {
        return null;
    }
    final DoubleHistogram state = states.get(bucketOrd);
    return state;
}
 
Example #7
Source File: HDRPercentilesAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public double metric(String name, long bucketOrd) {
    DoubleHistogram state = getState(bucketOrd);
    if (state == null) {
        return Double.NaN;
    } else {
        return state.getValueAtPercentile(Double.parseDouble(name));
    }
}
 
Example #8
Source File: HDRPercentilesAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
    DoubleHistogram state = getState(owningBucketOrdinal);
    if (state == null) {
        return buildEmptyAggregation();
    } else {
        return new InternalHDRPercentiles(name, keys, state, keyed, formatter, pipelineAggregators(), metaData());
    }
}
 
Example #9
Source File: HDRPercentileRanksAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
    DoubleHistogram state = getState(owningBucketOrdinal);
    if (state == null) {
        return buildEmptyAggregation();
    } else {
        return new InternalHDRPercentileRanks(name, keys, state, keyed, formatter, pipelineAggregators(), metaData());
    }
}
 
Example #10
Source File: HDRPercentileRanksAggregator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public double metric(String name, long bucketOrd) {
    DoubleHistogram state = getState(bucketOrd);
    if (state == null) {
        return Double.NaN;
    } else {
        return InternalHDRPercentileRanks.percentileRank(state, Double.valueOf(name));
    }
}
 
Example #11
Source File: DoubleHistogramSerializer.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
@Override
public void serialize(DoubleHistogram doubleHistogram, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
    ByteBuffer allocate = ByteBuffer.allocate(doubleHistogram.getNeededByteBufferCapacity());
    doubleHistogram.encodeIntoByteBuffer(allocate);
    jsonGenerator.writeBinary(allocate.array());
}
 
Example #12
Source File: SimulatorHistogramLogProcessor.java    From hazelcast-simulator with Apache License 2.0 4 votes vote down vote up
protected Object[] buildDoubleHistogramStatistics(DoubleHistogram intervalHistogram, DoubleHistogram accumulatedHistogram) {
    double intervalThroughput = ((double) (intervalHistogram.getTotalCount())
            / (intervalHistogram.getEndTimeStamp() - intervalHistogram.getStartTimeStamp()));

    double totalThroughput = ((double) accumulatedHistogram.getTotalCount())
            / (accumulatedHistogram.getEndTimeStamp() - accumulatedHistogram.getStartTimeStamp());

    return new Object[]{
            ((intervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()),
            (intervalHistogram.getEndTimeStamp() / 1000.0),
            // values recorded during the last reporting interval
            intervalHistogram.getTotalCount(),
            intervalHistogram.getValueAtPercentile(25.0) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(75) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio,
            intervalHistogram.getValueAtPercentile(99.999) / config.outputValueUnitRatio,
            intervalHistogram.getMinValue() / config.outputValueUnitRatio,
            intervalHistogram.getMaxValue() / config.outputValueUnitRatio,
            intervalHistogram.getMean() / config.outputValueUnitRatio,
            intervalHistogram.getStdDeviation() / config.outputValueUnitRatio,
            intervalThroughput / config.outputValueUnitRatio,

            // values recorded from the beginning until now
            accumulatedHistogram.getTotalCount(),
            accumulatedHistogram.getValueAtPercentile(25.0) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(75.0) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio,
            accumulatedHistogram.getValueAtPercentile(99.999) / config.outputValueUnitRatio,
            accumulatedHistogram.getMinValue() / config.outputValueUnitRatio,
            accumulatedHistogram.getMaxValue() / config.outputValueUnitRatio,
            accumulatedHistogram.getMean() / config.outputValueUnitRatio,
            accumulatedHistogram.getStdDeviation() / config.outputValueUnitRatio,
            totalThroughput / config.outputValueUnitRatio,
    };
}
 
Example #13
Source File: FeatureNumericHistogramStatistics.java    From geowave with Apache License 2.0 4 votes vote down vote up
@Override
public Pair<DoubleHistogram, DoubleHistogram> getResult() {
  return Pair.of(negativeHistogram, positiveHistogram);
}
 
Example #14
Source File: FeatureNumericHistogramStatistics.java    From geowave with Apache License 2.0 4 votes vote down vote up
@Override
public InternalDataStatistics<SimpleFeature, Pair<DoubleHistogram, DoubleHistogram>, FieldStatisticsQueryBuilder<Pair<DoubleHistogram, DoubleHistogram>>> create(
    final Short internalDataAdapterId,
    final String fieldName) {
  return new FeatureNumericHistogramStatistics(internalDataAdapterId, fieldName);
}
 
Example #15
Source File: FeatureNumericHistogramStatistics.java    From geowave with Apache License 2.0 4 votes vote down vote up
private DoubleHistogram getNegativeHistogram() {
  if (negativeHistogram == null) {
    negativeHistogram = new LocalDoubleHistogram();
  }
  return negativeHistogram;
}
 
Example #16
Source File: DoubleHistogramDeserializer.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
@Override
public DoubleHistogram deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
    ByteBuffer byteBuffer = ByteBuffer.wrap(jsonParser.getBinaryValue());
    DoubleHistogram doubleHistogram = DoubleHistogram.decodeFromByteBuffer(byteBuffer, 0);
    return doubleHistogram;
}
 
Example #17
Source File: FeatureNumericHistogramStatistics.java    From geowave with Apache License 2.0 4 votes vote down vote up
@Override
public InternalDataStatistics<SimpleFeature, Pair<DoubleHistogram, DoubleHistogram>, FieldStatisticsQueryBuilder<Pair<DoubleHistogram, DoubleHistogram>>> duplicate() {
  return new FeatureNumericHistogramStatistics(adapterId, getFieldName());
}
 
Example #18
Source File: Util.java    From maestro-java with Apache License 2.0 4 votes vote down vote up
public static Histogram getAccumulated(final File histogramFile) throws FileNotFoundException {
    Histogram accumulatedHistogram = null;
    DoubleHistogram accumulatedDoubleHistogram = null;

    HistogramLogReader histogramLogReader = new HistogramLogReader(histogramFile);

    int i = 0;
    while (histogramLogReader.hasNext()) {
        EncodableHistogram eh = histogramLogReader.nextIntervalHistogram();
        if (eh == null) {
            logger.error("The histogram library returned an unexpected null value");
            break;
        }

        if (i == 0) {
            if (eh instanceof DoubleHistogram) {
                accumulatedDoubleHistogram = ((DoubleHistogram) eh).copy();
                accumulatedDoubleHistogram.reset();
                accumulatedDoubleHistogram.setAutoResize(true);
            }
            else {
                accumulatedHistogram = ((Histogram) eh).copy();
                accumulatedHistogram.reset();
                accumulatedHistogram.setAutoResize(true);
            }
        }

        logger.debug("Processing histogram from point in time {} to {}",
                Instant.ofEpochMilli(eh.getStartTimeStamp()), Instant.ofEpochMilli(eh.getEndTimeStamp()));

        if (eh instanceof DoubleHistogram) {
            Objects.requireNonNull(accumulatedDoubleHistogram).add((DoubleHistogram) eh);
        }
        else {
            Objects.requireNonNull(accumulatedHistogram).add((Histogram) eh);
        }

        i++;
    }

    if (accumulatedHistogram == null) {
        throw new EmptyDataSet("The HDR data file did not contain any histogram data");
    }

    return accumulatedHistogram;
}
 
Example #19
Source File: SnapshotConverter.java    From vespa with Apache License 2.0 4 votes vote down vote up
private static List<Tuple2<String, Double>> buildPercentileList(DoubleHistogram histogram) {
    List<Tuple2<String, Double>> prefixAndValues = new ArrayList<>(2);
    prefixAndValues.add(new Tuple2<>("95", histogram.getValueAtPercentile(95.0d)));
    prefixAndValues.add(new Tuple2<>("99", histogram.getValueAtPercentile(99.0d)));
    return prefixAndValues;
}
 
Example #20
Source File: UntypedMetric.java    From vespa with Apache License 2.0 4 votes vote down vote up
public DoubleHistogram getHistogram() {
    return histogram;
}
 
Example #21
Source File: InternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public Iter(double[] percents, DoubleHistogram state) {
    this.percents = percents;
    this.state = state;
    i = 0;
}
 
Example #22
Source File: InternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Override
protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed,
        List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
    return new InternalHDRPercentiles(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
}
 
Example #23
Source File: InternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
        List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
    super(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
}
 
Example #24
Source File: DefaultHistogramHandler.java    From maestro-java with Apache License 2.0 4 votes vote down vote up
private void doSave(final EncodableHistogram eh, final File histogramFile) throws IOException {
    Properties prop = new Properties();

    prop.setProperty("latencyStartTS", Long.toString(eh.getStartTimeStamp()));
    prop.setProperty("latencyEndTS", Long.toString(eh.getEndTimeStamp()));

    prop.setProperty("latencyMaxValue", Double.toString(eh.getMaxValueAsDouble() / unitRatio));


    if (eh instanceof AbstractHistogram) {
        AbstractHistogram ah = (AbstractHistogram) eh;

        prop.setProperty("latency50th", Long.toString(ah.getValueAtPercentile(50.0) / (long) unitRatio));
        prop.setProperty("latency90th", Long.toString(ah.getValueAtPercentile(90.0) / (long) unitRatio));
        prop.setProperty("latency95th", Long.toString(ah.getValueAtPercentile(95.0) / (long) unitRatio));
        prop.setProperty("latency99th", Long.toString(ah.getValueAtPercentile(99.0) / (long) unitRatio));
        prop.setProperty("latency999th", Long.toString(ah.getValueAtPercentile(99.9) / (long) unitRatio));
        prop.setProperty("latency9999th", Long.toString(ah.getValueAtPercentile(99.99) / (long) unitRatio));
        prop.setProperty("latency99999th", Long.toString(ah.getValueAtPercentile(99.999) / (long) unitRatio));
        prop.setProperty("latencyStdDeviation", Double.toString(ah.getStdDeviation() / unitRatio));
        prop.setProperty("latencyTotalCount", Long.toString(ah.getTotalCount()));
        prop.setProperty("latencyMean", Double.toString(ah.getMean() / unitRatio));


    }
    else {
        if (eh instanceof DoubleHistogram) {
            DoubleHistogram dh = (DoubleHistogram) eh;

            prop.setProperty("latency50th", Double.toString(dh.getValueAtPercentile(50.0) / unitRatio));
            prop.setProperty("latency90th", Double.toString(dh.getValueAtPercentile(90.0) / unitRatio));
            prop.setProperty("latency95th", Double.toString(dh.getValueAtPercentile(95.0) / unitRatio));
            prop.setProperty("latency99th", Double.toString(dh.getValueAtPercentile(99.0) / unitRatio));
            prop.setProperty("latency999th", Double.toString(dh.getValueAtPercentile(99.9) / unitRatio));
            prop.setProperty("latency9999th", Double.toString(dh.getValueAtPercentile(99.99) / unitRatio));
            prop.setProperty("latency99999th", Double.toString(dh.getValueAtPercentile(99.999) / unitRatio));
            prop.setProperty("latencyStdDeviation", Double.toString(dh.getStdDeviation() / unitRatio));
            prop.setProperty("latencyTotalCount", Long.toString(dh.getTotalCount()));
            prop.setProperty("latencyMean", Double.toString(dh.getMean() / unitRatio));
        }
    }

    File outFile = new File(histogramFile.getParentFile(), "latency.properties");
    try (OutputStream fos = new BufferedOutputStream(new FileOutputStream(outFile))) {
        prop.store(fos, "hdr-histogram-plotter");
    }
}
 
Example #25
Source File: AbstractInternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
protected abstract AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData);
 
Example #26
Source File: HubMessageHandler.java    From arcusplatform with Apache License 2.0 4 votes vote down vote up
@Override
public void run() {
   try {
      JsonObject metricMessage = new JsonObject();
      metricMessage.addProperty("ts", System.currentTimeMillis());
      metricMessage.addProperty("hst", IrisApplicationInfo.getHostName());
      metricMessage.addProperty("svc", "hub-agent");
      metricMessage.addProperty("ctn", IrisApplicationInfo.getContainerName());
      metricMessage.addProperty("svr", IrisApplicationInfo.getApplicationVersion());

      JsonArray histograms = new JsonArray();
      for (Map.Entry<String,DoubleRecorder> metric : metrics.entrySet()) {
         String name = metric.getKey();
         DoubleRecorder recorder = metric.getValue();

         DoubleHistogram hist = recorder.getIntervalHistogram(recycle.get(name));
         recycle.put(name, hist);


         JsonObject h = IrisMetricsFormat.toJson(
            name,
            hist.getTotalCount(),
            hist.getMinValue(),
            hist.getMaxValue(),
            hist.getMean(),
            hist.getStdDeviation(),
            hist.getValueAtPercentile(0.50),
            hist.getValueAtPercentile(0.75),
            hist.getValueAtPercentile(0.95),
            hist.getValueAtPercentile(0.98),
            hist.getValueAtPercentile(0.99),
            hist.getValueAtPercentile(0.999),
            ImmutableList.of()
         );

         histograms.add(h);
      }

      metricMessage.add("histograms", histograms);
      irisMetricsSender.send(new ProducerRecord<Void, JsonObject>(METRICS_TOPIC, metricMessage));
   } catch (Exception ex) {
      logger.warn("failed to report aggregated metrics:", ex);
   }
}
 
Example #27
Source File: TimeWindowPercentileHistogram.java    From micrometer with Apache License 2.0 4 votes vote down vote up
public TimeWindowPercentileHistogram(Clock clock, DistributionStatisticConfig distributionStatisticConfig,
                                     boolean supportsAggregablePercentiles) {
    super(clock, distributionStatisticConfig, DoubleRecorder.class, supportsAggregablePercentiles);
    intervalHistogram = new DoubleHistogram(percentilePrecision(distributionStatisticConfig));
    initRingBuffer();
}
 
Example #28
Source File: InternalHDRPercentileRanks.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public Iter(double[] values, DoubleHistogram state) {
    this.values = values;
    this.state = state;
    i = 0;
}
 
Example #29
Source File: InternalHDRPercentileRanks.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Override
protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed,
        List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
    return new InternalHDRPercentileRanks(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
}
 
Example #30
Source File: InternalHDRPercentileRanks.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
        List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
    super(name, cdfValues, state, keyed, formatter, pipelineAggregators, metaData);
}