org.apache.flink.metrics.Counter Java Examples

The following examples show how to use org.apache.flink.metrics.Counter. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PrometheusReporterTaskScopeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void removingSingleInstanceOfMetricDoesNotBreakOtherInstances() throws UnirestException {
	Counter counter1 = new SimpleCounter();
	counter1.inc(1);
	Counter counter2 = new SimpleCounter();
	counter2.inc(2);

	taskMetricGroup1.counter("my_counter", counter1);
	taskMetricGroup2.counter("my_counter", counter2);

	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		equalTo(1.));
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues2),
		equalTo(2.));

	taskMetricGroup2.close();
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		equalTo(1.));

	taskMetricGroup1.close();
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		nullValue());
}
 
Example #2
Source File: LocalInputChannel.java    From flink with Apache License 2.0 6 votes vote down vote up
public LocalInputChannel(
	SingleInputGate inputGate,
	int channelIndex,
	ResultPartitionID partitionId,
	ResultPartitionManager partitionManager,
	TaskEventPublisher taskEventPublisher,
	int initialBackoff,
	int maxBackoff,
	Counter numBytesIn,
	Counter numBuffersIn) {

	super(inputGate, channelIndex, partitionId, initialBackoff, maxBackoff, numBytesIn, numBuffersIn);

	this.partitionManager = checkNotNull(partitionManager);
	this.taskEventPublisher = checkNotNull(taskEventPublisher);
}
 
Example #3
Source File: PrometheusReporterTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void metricIsRemovedWhenCollectorIsNotUnregisteredYet() throws UnirestException {
	TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(registry, HOST_NAME, TASK_MANAGER);

	String metricName = "metric";

	Counter metric1 = new SimpleCounter();
	FrontMetricGroup<TaskManagerJobMetricGroup> metricGroup1 = new FrontMetricGroup<>(0, new TaskManagerJobMetricGroup(registry, tmMetricGroup, JobID.generate(), "job_1"));
	reporter.notifyOfAddedMetric(metric1, metricName, metricGroup1);

	Counter metric2 = new SimpleCounter();
	FrontMetricGroup<TaskManagerJobMetricGroup> metricGroup2 = new FrontMetricGroup<>(0, new TaskManagerJobMetricGroup(registry, tmMetricGroup, JobID.generate(), "job_2"));
	reporter.notifyOfAddedMetric(metric2, metricName, metricGroup2);

	reporter.notifyOfRemovedMetric(metric1, metricName, metricGroup1);

	String response = pollMetrics(reporter.getPort()).getBody();

	assertThat(response, not(containsString("job_1")));
}
 
Example #4
Source File: AbstractPrometheusReporter.java    From flink with Apache License 2.0 6 votes vote down vote up
private Collector createCollector(Metric metric, List<String> dimensionKeys, List<String> dimensionValues, String scopedMetricName, String helpString) {
	Collector collector;
	if (metric instanceof Gauge || metric instanceof Counter || metric instanceof Meter) {
		collector = io.prometheus.client.Gauge
			.build()
			.name(scopedMetricName)
			.help(helpString)
			.labelNames(toArray(dimensionKeys))
			.create();
	} else if (metric instanceof Histogram) {
		collector = new HistogramSummaryProxy((Histogram) metric, scopedMetricName, helpString, dimensionKeys, dimensionValues);
	} else {
		log.warn("Cannot create collector for unknown metric type: {}. This indicates that the metric type is not supported by this reporter.",
			metric.getClass().getName());
		collector = null;
	}
	return collector;
}
 
Example #5
Source File: InfluxdbReporterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testMetricRegistration() throws Exception {
	MetricRegistryImpl metricRegistry = createMetricRegistry(InfluxdbReporterOptions.RETENTION_POLICY.defaultValue());
	try {
		String metricName = "TestCounter";
		Counter counter = registerTestMetric(metricName, metricRegistry);

		InfluxdbReporter reporter = (InfluxdbReporter) metricRegistry.getReporters().get(0);
		MeasurementInfo measurementInfo = reporter.counters.get(counter);
		assertNotNull("test metric must be registered in the reporter", measurementInfo);
		assertEquals("taskmanager_" + metricName, measurementInfo.getName());
		assertThat(measurementInfo.getTags(), hasEntry("host", METRIC_HOSTNAME));
		assertThat(measurementInfo.getTags(), hasEntry("tm_id", METRIC_TM_ID));
	} finally {
		metricRegistry.shutdown().get();
	}
}
 
Example #6
Source File: PrometheusReporterTaskScopeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void removingSingleInstanceOfMetricDoesNotBreakOtherInstances() throws UnirestException {
	Counter counter1 = new SimpleCounter();
	counter1.inc(1);
	Counter counter2 = new SimpleCounter();
	counter2.inc(2);

	taskMetricGroup1.counter("my_counter", counter1);
	taskMetricGroup2.counter("my_counter", counter2);

	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		equalTo(1.));
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues2),
		equalTo(2.));

	taskMetricGroup2.close();
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		equalTo(1.));

	taskMetricGroup1.close();
	assertThat(CollectorRegistry.defaultRegistry.getSampleValue("flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
		nullValue());
}
 
Example #7
Source File: UnionWithTempOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void run() throws Exception {
	final Counter numRecordsIn = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
	final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
	
	final Collector<T> output = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
	T reuse = this.taskContext.<T>getInputSerializer(STREAMED_INPUT).getSerializer().createInstance();
	T record;
	
	final MutableObjectIterator<T> input = this.taskContext.getInput(STREAMED_INPUT);
	while (this.running && ((record = input.next(reuse)) != null)) {
		numRecordsIn.inc();
		output.collect(record);
	}
	
	final MutableObjectIterator<T> cache = this.taskContext.getInput(CACHED_INPUT);
	while (this.running && ((record = cache.next(reuse)) != null)) {
		numRecordsIn.inc();
		output.collect(record);
	}
}
 
Example #8
Source File: DatadogHttpReporter.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
	final String name = group.getMetricIdentifier(metricName);

	List<String> tags = new ArrayList<>(configTags);
	tags.addAll(getTagsFromMetricGroup(group));
	String host = getHostFromMetricGroup(group);

	if (metric instanceof Counter) {
		Counter c = (Counter) metric;
		counters.put(c, new DCounter(c, name, host, tags, clock));
	} else if (metric instanceof Gauge) {
		Gauge g = (Gauge) metric;
		gauges.put(g, new DGauge(g, name, host, tags, clock));
	} else if (metric instanceof Meter) {
		Meter m = (Meter) metric;
		// Only consider rate
		meters.put(m, new DMeter(m, name, host, tags, clock));
	} else if (metric instanceof Histogram) {
		LOGGER.warn("Cannot add {} because Datadog HTTP API doesn't support Histogram", metricName);
	} else {
		LOGGER.warn("Cannot add unknown metric type {}. This indicates that the reporter " +
			"does not support this metric type.", metric.getClass().getName());
	}
}
 
Example #9
Source File: StreamTwoInputProcessor.java    From flink with Apache License 2.0 5 votes vote down vote up
private void processRecord2(
		StreamRecord<IN2> record,
		TwoInputStreamOperator<IN1, IN2, ?> streamOperator,
		Counter numRecordsIn) throws Exception {

	streamOperator.setKeyContextElement2(record);
	streamOperator.processElement2(record);
	postProcessRecord(numRecordsIn);
}
 
Example #10
Source File: PrometheusReporterTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * {@link io.prometheus.client.Counter} may not decrease, so report {@link Counter} as {@link io.prometheus.client.Gauge}.
 *
 * @throws UnirestException Might be thrown on HTTP problems.
 */
@Test
public void counterIsReportedAsPrometheusGauge() throws UnirestException {
	Counter testCounter = new SimpleCounter();
	testCounter.inc(7);

	assertThatGaugeIsExported(testCounter, "testCounter", "7.0");
}
 
Example #11
Source File: AbstractPrometheusReporter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void removeMetric(Metric metric, List<String> dimensionValues, Collector collector) {
	if (metric instanceof Gauge) {
		((io.prometheus.client.Gauge) collector).remove(toArray(dimensionValues));
	} else if (metric instanceof Counter) {
		((io.prometheus.client.Gauge) collector).remove(toArray(dimensionValues));
	} else if (metric instanceof Meter) {
		((io.prometheus.client.Gauge) collector).remove(toArray(dimensionValues));
	} else if (metric instanceof Histogram) {
		((HistogramSummaryProxy) collector).remove(dimensionValues);
	} else {
		log.warn("Cannot remove unknown metric type: {}. This indicates that the metric type is not supported by this reporter.",
			metric.getClass().getName());
	}
}
 
Example #12
Source File: AbstractPrometheusReporter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void addMetric(Metric metric, List<String> dimensionValues, Collector collector) {
	if (metric instanceof Gauge) {
		((io.prometheus.client.Gauge) collector).setChild(gaugeFrom((Gauge) metric), toArray(dimensionValues));
	} else if (metric instanceof Counter) {
		((io.prometheus.client.Gauge) collector).setChild(gaugeFrom((Counter) metric), toArray(dimensionValues));
	} else if (metric instanceof Meter) {
		((io.prometheus.client.Gauge) collector).setChild(gaugeFrom((Meter) metric), toArray(dimensionValues));
	} else if (metric instanceof Histogram) {
		((HistogramSummaryProxy) collector).addChild((Histogram) metric, dimensionValues);
	} else {
		log.warn("Cannot add unknown metric type: {}. This indicates that the metric type is not supported by this reporter.",
			metric.getClass().getName());
	}
}
 
Example #13
Source File: InputFormatSourceFunction.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void run(SourceContext<OUT> ctx) throws Exception {
	try {

		Counter completedSplitsCounter = getRuntimeContext().getMetricGroup().counter("numSplitsProcessed");
		if (isRunning && format instanceof RichInputFormat) {
			((RichInputFormat) format).openInputFormat();
		}

		OUT nextElement = serializer.createInstance();
		while (isRunning) {
			format.open(splitIterator.next());

			// for each element we also check if cancel
			// was called by checking the isRunning flag

			while (isRunning && !format.reachedEnd()) {
				nextElement = format.nextRecord(nextElement);
				if (nextElement != null) {
					ctx.collect(nextElement);
				} else {
					break;
				}
			}
			format.close();
			completedSplitsCounter.inc();

			if (isRunning) {
				isRunning = splitIterator.hasNext();
			}
		}
	} finally {
		format.close();
		if (format instanceof RichInputFormat) {
			((RichInputFormat) format).closeInputFormat();
		}
		isRunning = false;
	}
}
 
Example #14
Source File: MetricRegistryImplTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that reporters are notified of added/removed metrics.
 */
@Test
public void testReporterNotifications() throws Exception {
	Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter6.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter7.class.getName());

	MetricRegistryImpl registry = new MetricRegistryImpl(
		MetricRegistryConfiguration.defaultMetricRegistryConfiguration(),
		Arrays.asList(
			ReporterSetup.forReporter("test1", new TestReporter6()),
			ReporterSetup.forReporter("test2", new TestReporter7())));

	TaskManagerMetricGroup root = new TaskManagerMetricGroup(registry, "host", "id");
	root.counter("rootCounter");

	assertTrue(TestReporter6.addedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter6.addedMetricName);

	assertTrue(TestReporter7.addedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter7.addedMetricName);

	root.close();

	assertTrue(TestReporter6.removedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter6.removedMetricName);

	assertTrue(TestReporter7.removedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter7.removedMetricName);

	registry.shutdown().get();
}
 
Example #15
Source File: InputFormatSourceFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void run(SourceContext<OUT> ctx) throws Exception {
	try {

		Counter completedSplitsCounter = getRuntimeContext().getMetricGroup().counter("numSplitsProcessed");
		if (isRunning && format instanceof RichInputFormat) {
			((RichInputFormat) format).openInputFormat();
		}

		OUT nextElement = serializer.createInstance();
		while (isRunning) {
			format.open(splitIterator.next());

			// for each element we also check if cancel
			// was called by checking the isRunning flag

			while (isRunning && !format.reachedEnd()) {
				nextElement = format.nextRecord(nextElement);
				if (nextElement != null) {
					ctx.collect(nextElement);
				} else {
					break;
				}
			}
			format.close();
			completedSplitsCounter.inc();

			if (isRunning) {
				isRunning = splitIterator.hasNext();
			}
		}
	} finally {
		format.close();
		if (format instanceof RichInputFormat) {
			((RichInputFormat) format).closeInputFormat();
		}
		isRunning = false;
	}
}
 
Example #16
Source File: MetricUtils.java    From alibaba-flink-connectors with Apache License 2.0 5 votes vote down vote up
public static Meter registerOutBps(RuntimeContext context, String connectorType) {
	Counter bpsCounter = context.getMetricGroup().addGroup(METRIC_GROUP_SINK)
								.counter(METRICS_SINK_OUT_BPS + "_counter", new SimpleCounter());
	String tag = "";
	if (!StringUtils.isNullOrWhitespaceOnly(connectorType)) {
		tag = ":" + METRICS_TAG_CONNECTOR_TYPE + "=" + connectorType;
	}
	return context.getMetricGroup().addGroup(METRIC_GROUP_SINK)
				.meter(METRICS_SINK_OUT_BPS + tag, new MeterView(bpsCounter, 60));
}
 
Example #17
Source File: PrometheusReporterTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void registeringSameMetricTwiceDoesNotThrowException() {
	Counter counter = new SimpleCounter();
	counter.inc();
	String counterName = "testCounter";

	reporter.notifyOfAddedMetric(counter, counterName, metricGroup);
	reporter.notifyOfAddedMetric(counter, counterName, metricGroup);
}
 
Example #18
Source File: MetricMapperTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testMapCounter() {
	Counter counter = new SimpleCounter();
	counter.inc(42L);

	verifyPoint(
		MetricMapper.map(INFO, TIMESTAMP, counter),
		"count=42");
}
 
Example #19
Source File: TaskIOMetricGroup.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public long getCount() {
	long sum = super.getCount();
	for (Counter counter : internalCounters) {
		sum += counter.getCount();
	}
	return sum;
}
 
Example #20
Source File: AbstractCachedBuildSideJoinDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void run() throws Exception {
	final Counter numRecordsOut = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
	final FlatJoinFunction<IT1, IT2, OT> matchStub = this.taskContext.getStub();
	final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
	
	while (this.running && matchIterator != null && matchIterator.callWithNextKey(matchStub, collector)) {
	}
}
 
Example #21
Source File: MetricDumpSerializerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testJavaSerialization() throws IOException {
	MetricDumpSerialization.MetricDumpSerializer serializer = new MetricDumpSerialization.MetricDumpSerializer();

	final ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
	final ObjectOutputStream oos = new ObjectOutputStream(bos);

	oos.writeObject(serializer.serialize(
		new HashMap<Counter, Tuple2<QueryScopeInfo, String>>(),
		new HashMap<Gauge<?>, Tuple2<QueryScopeInfo, String>>(),
		new HashMap<Histogram, Tuple2<QueryScopeInfo, String>>(),
		new HashMap<Meter, Tuple2<QueryScopeInfo, String>>()));
}
 
Example #22
Source File: MetricQueryService.java    From flink with Apache License 2.0 5 votes vote down vote up
public void removeMetric(Metric metric) {
	runAsync(() -> {
		if (metric instanceof Counter) {
			this.counters.remove(metric);
		} else if (metric instanceof Gauge) {
			this.gauges.remove(metric);
		} else if (metric instanceof Histogram) {
			this.histograms.remove(metric);
		} else if (metric instanceof Meter) {
			this.meters.remove(metric);
		}
	});
}
 
Example #23
Source File: JoinDriver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void run() throws Exception {
	final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
	final FlatJoinFunction<IT1, IT2, OT> joinStub = this.taskContext.getStub();
	final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
	final JoinTaskIterator<IT1, IT2, OT> joinIterator = this.joinIterator;
	
	while (this.running && joinIterator.callWithNextKey(joinStub, collector)) {
	}
}
 
Example #24
Source File: ScheduledDropwizardReporter.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
	final String fullName = group.getMetricIdentifier(metricName, this);

	synchronized (this) {
		if (metric instanceof Counter) {
			counters.put((Counter) metric, fullName);
			registry.register(fullName, new FlinkCounterWrapper((Counter) metric));
		}
		else if (metric instanceof Gauge) {
			gauges.put((Gauge<?>) metric, fullName);
			registry.register(fullName, FlinkGaugeWrapper.fromGauge((Gauge<?>) metric));
		} else if (metric instanceof Histogram) {
			Histogram histogram = (Histogram) metric;
			histograms.put(histogram, fullName);

			if (histogram instanceof DropwizardHistogramWrapper) {
				registry.register(fullName, ((DropwizardHistogramWrapper) histogram).getDropwizardHistogram());
			} else {
				registry.register(fullName, new FlinkHistogramWrapper(histogram));
			}
		} else if (metric instanceof Meter) {
			Meter meter = (Meter) metric;
			meters.put(meter, fullName);

			if (meter instanceof DropwizardMeterWrapper) {
				registry.register(fullName, ((DropwizardMeterWrapper) meter).getDropwizardMeter());
			} else {
				registry.register(fullName, new FlinkMeterWrapper(meter));
			}
		} else {
			log.warn("Cannot add metric of type {}. This indicates that the reporter " +
				"does not support this metric type.", metric.getClass().getName());
		}
	}
}
 
Example #25
Source File: LocalInputChannel.java    From flink with Apache License 2.0 5 votes vote down vote up
public LocalInputChannel(
	SingleInputGate inputGate,
	int channelIndex,
	ResultPartitionID partitionId,
	ResultPartitionManager partitionManager,
	TaskEventPublisher taskEventPublisher,
	Counter numBytesIn,
	Counter numBuffersIn) {

	this(inputGate, channelIndex, partitionId, partitionManager, taskEventPublisher, 0, 0, numBytesIn, numBuffersIn);
}
 
Example #26
Source File: ScheduledDropwizardReporter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
	final String fullName = group.getMetricIdentifier(metricName, this);

	synchronized (this) {
		if (metric instanceof Counter) {
			counters.put((Counter) metric, fullName);
			registry.register(fullName, new FlinkCounterWrapper((Counter) metric));
		}
		else if (metric instanceof Gauge) {
			gauges.put((Gauge<?>) metric, fullName);
			registry.register(fullName, FlinkGaugeWrapper.fromGauge((Gauge<?>) metric));
		} else if (metric instanceof Histogram) {
			Histogram histogram = (Histogram) metric;
			histograms.put(histogram, fullName);

			if (histogram instanceof DropwizardHistogramWrapper) {
				registry.register(fullName, ((DropwizardHistogramWrapper) histogram).getDropwizardHistogram());
			} else {
				registry.register(fullName, new FlinkHistogramWrapper(histogram));
			}
		} else if (metric instanceof Meter) {
			Meter meter = (Meter) metric;
			meters.put(meter, fullName);

			if (meter instanceof DropwizardMeterWrapper) {
				registry.register(fullName, ((DropwizardMeterWrapper) meter).getDropwizardMeter());
			} else {
				registry.register(fullName, new FlinkMeterWrapper(meter));
			}
		} else {
			log.warn("Cannot add metric of type {}. This indicates that the reporter " +
				"does not support this metric type.", metric.getClass().getName());
		}
	}
}
 
Example #27
Source File: AbstractCachedBuildSideJoinDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void run() throws Exception {
	final Counter numRecordsOut = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
	final FlatJoinFunction<IT1, IT2, OT> matchStub = this.taskContext.getStub();
	final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
	
	while (this.running && matchIterator != null && matchIterator.callWithNextKey(matchStub, collector)) {
	}
}
 
Example #28
Source File: MetricRegistryImplTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that reporters are notified of added/removed metrics.
 */
@Test
public void testReporterNotifications() throws Exception {
	Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter6.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter7.class.getName());

	MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config));

	TaskManagerMetricGroup root = new TaskManagerMetricGroup(registry, "host", "id");
	root.counter("rootCounter");

	assertTrue(TestReporter6.addedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter6.addedMetricName);

	assertTrue(TestReporter7.addedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter7.addedMetricName);

	root.close();

	assertTrue(TestReporter6.removedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter6.removedMetricName);

	assertTrue(TestReporter7.removedMetric instanceof Counter);
	assertEquals("rootCounter", TestReporter7.removedMetricName);

	registry.shutdown().get();
}
 
Example #29
Source File: JoinDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void run() throws Exception {
	final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
	final FlatJoinFunction<IT1, IT2, OT> joinStub = this.taskContext.getStub();
	final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
	final JoinTaskIterator<IT1, IT2, OT> joinIterator = this.joinIterator;
	
	while (this.running && joinIterator.callWithNextKey(joinStub, collector)) {
	}
}
 
Example #30
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testOperatorMetricReuse() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOperatorChain(new OperatorID(), new DuplicatingOperator())
		.chain(new OperatorID(), new DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.chain(new OperatorID(), new DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	final TaskMetricGroup taskMetricGroup = new UnregisteredMetricGroups.UnregisteredTaskMetricGroup() {
		@Override
		public OperatorMetricGroup getOrAddOperator(OperatorID operatorID, String name) {
			return new OperatorMetricGroup(NoOpMetricRegistry.INSTANCE, this, operatorID, name);
		}
	};

	final StreamMockEnvironment env = new StreamMockEnvironment(
		testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, new TestTaskStateManager()) {
		@Override
		public TaskMetricGroup getMetricGroup() {
			return taskMetricGroup;
		}
	};

	final Counter numRecordsInCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter();
	final Counter numRecordsOutCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsOutCounter();

	testHarness.invoke(env);
	testHarness.waitForTaskRunning();

	final int numRecords = 5;

	for (int x = 0; x < numRecords; x++) {
		testHarness.processElement(new StreamRecord<>("hello"));
	}
	testHarness.waitForInputProcessing();

	assertEquals(numRecords, numRecordsInCounter.getCount());
	assertEquals(numRecords * 2 * 2 * 2, numRecordsOutCounter.getCount());

	testHarness.endInput();
	testHarness.waitForTaskCompletion();
}