org.apache.flink.shaded.guava18.com.google.common.collect.Maps Java Examples

The following examples show how to use org.apache.flink.shaded.guava18.com.google.common.collect.Maps. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OperatorStateWriter.java    From bravo with Apache License 2.0 6 votes vote down vote up
public OperatorStateWriter(long checkpointId, OperatorState baseOpState, Path newCheckpointBasePath) {
	this.baseOpState = baseOpState;
	this.newCheckpointBasePath = newCheckpointBasePath;
	this.checkpointId = checkpointId;

	proxy = StateMetadataUtils.getKeyedBackendSerializationProxy(baseOpState).orElse(null);
	metaSnapshots = new HashMap<>();
	if (proxy != null) {
		proxy.getStateMetaInfoSnapshots()
				.forEach(ms -> metaSnapshots.put(ms.getName(),
						new StateMetaInfoSnapshot(ms.getName(), ms.getBackendStateType(), ms.getOptionsImmutable(),
								ms.getSerializerSnapshotsImmutable(),
								Maps.transformValues(ms.getSerializerSnapshotsImmutable(),
										TypeSerializerSnapshot::restoreSerializer))));
	}
}
 
Example #2
Source File: StackTraceSampleCoordinator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
PendingStackTraceSample(
		int sampleId,
		ExecutionAttemptID[] tasksToCollect) {

	this.sampleId = sampleId;
	this.startTime = System.currentTimeMillis();
	this.pendingTasks = new HashSet<>(Arrays.asList(tasksToCollect));
	this.stackTracesByTask = Maps.newHashMapWithExpectedSize(tasksToCollect.length);
	this.stackTraceFuture = new CompletableFuture<>();
}
 
Example #3
Source File: UnionInputGate.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public UnionInputGate(InputGate... inputGates) {
	this.inputGates = checkNotNull(inputGates);
	checkArgument(inputGates.length > 1, "Union input gate should union at least two input gates.");

	this.inputGateToIndexOffsetMap = Maps.newHashMapWithExpectedSize(inputGates.length);
	this.inputGatesWithRemainingData = Sets.newHashSetWithExpectedSize(inputGates.length);

	int currentNumberOfInputChannels = 0;

	for (InputGate inputGate : inputGates) {
		if (inputGate instanceof UnionInputGate) {
			// if we want to add support for this, we need to implement pollNextBufferOrEvent()
			throw new UnsupportedOperationException("Cannot union a union of input gates.");
		}

		// The offset to use for buffer or event instances received from this input gate.
		inputGateToIndexOffsetMap.put(checkNotNull(inputGate), currentNumberOfInputChannels);
		inputGatesWithRemainingData.add(inputGate);

		currentNumberOfInputChannels += inputGate.getNumberOfInputChannels();

		// Register the union gate as a listener for all input gates
		inputGate.registerListener(this);
	}

	this.totalNumberOfInputChannels = currentNumberOfInputChannels;
}
 
Example #4
Source File: PulsarTableSource.java    From pulsar-flink with Apache License 2.0 5 votes vote down vote up
public PulsarTableSource(
        Optional<TableSchema> providedSchema,
        Optional<String> proctimeAttribute,
        List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors,
        String serviceUrl,
        String adminUrl,
        Properties properties,
        StartupMode startupMode,
        Map<String, MessageId> specificStartupOffsets,
        String externalSubscriptionName) {

    this.providedSchema = providedSchema;
    this.serviceUrl = checkNotNull(serviceUrl);
    this.adminUrl = checkNotNull(adminUrl);
    this.properties = checkNotNull(properties);
    this.startupMode = startupMode;
    this.specificStartupOffsets = specificStartupOffsets;
    this.externalSubscriptionName = externalSubscriptionName;

    this.caseInsensitiveParams =
            SourceSinkUtils.validateStreamSourceOptions(Maps.fromProperties(properties));

    this.schema = inferTableSchema();

    this.proctimeAttribute = validateProctimeAttribute(proctimeAttribute);
    this.rowtimeAttributeDescriptors = validateRowtimeAttributeDescriptors(rowtimeAttributeDescriptors);
}
 
Example #5
Source File: StackTraceSampleCoordinator.java    From flink with Apache License 2.0 5 votes vote down vote up
PendingStackTraceSample(
		int sampleId,
		ExecutionAttemptID[] tasksToCollect) {

	this.sampleId = sampleId;
	this.startTime = System.currentTimeMillis();
	this.pendingTasks = new HashSet<>(Arrays.asList(tasksToCollect));
	this.stackTracesByTask = Maps.newHashMapWithExpectedSize(tasksToCollect.length);
	this.stackTraceFuture = new CompletableFuture<>();
}
 
Example #6
Source File: UnionInputGate.java    From flink with Apache License 2.0 5 votes vote down vote up
public UnionInputGate(InputGate... inputGates) {
	this.inputGates = checkNotNull(inputGates);
	checkArgument(inputGates.length > 1, "Union input gate should union at least two input gates.");

	this.inputGateToIndexOffsetMap = Maps.newHashMapWithExpectedSize(inputGates.length);
	this.inputGatesWithRemainingData = Sets.newHashSetWithExpectedSize(inputGates.length);

	int currentNumberOfInputChannels = 0;

	synchronized (inputGatesWithData) {
		for (InputGate inputGate : inputGates) {
			if (inputGate instanceof UnionInputGate) {
				// if we want to add support for this, we need to implement pollNext()
				throw new UnsupportedOperationException("Cannot union a union of input gates.");
			}

			// The offset to use for buffer or event instances received from this input gate.
			inputGateToIndexOffsetMap.put(checkNotNull(inputGate), currentNumberOfInputChannels);
			inputGatesWithRemainingData.add(inputGate);

			currentNumberOfInputChannels += inputGate.getNumberOfInputChannels();

			CompletableFuture<?> available = inputGate.isAvailable();

			if (available.isDone()) {
				inputGatesWithData.add(inputGate);
			} else {
				available.thenRun(() -> queueInputGate(inputGate));
			}
		}

		if (!inputGatesWithData.isEmpty()) {
			isAvailable = AVAILABLE;
		}
	}

	this.totalNumberOfInputChannels = currentNumberOfInputChannels;
}
 
Example #7
Source File: BackPressureStatsTrackerImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates {@link OperatorBackPressureStats} from {@link BackPressureStats}.
 */
private OperatorBackPressureStats createOperatorBackPressureStats(BackPressureStats stats) {
	Map<ExecutionAttemptID, Double> backPressureRatiosByTask = stats.getBackPressureRatios();

	// Map task ID to subtask index, because the web interface expects
	// it like that.
	Map<ExecutionAttemptID, Integer> subtaskIndexMap = Maps
			.newHashMapWithExpectedSize(backPressureRatiosByTask.size());

	Set<ExecutionAttemptID> tasks = backPressureRatiosByTask.keySet();

	for (ExecutionVertex task : vertex.getTaskVertices()) {
		ExecutionAttemptID taskId = task.getCurrentExecutionAttempt().getAttemptId();
		if (tasks.contains(taskId)) {
			subtaskIndexMap.put(taskId, task.getParallelSubtaskIndex());
		} else {
			LOG.debug("Outdated stats. A task, which is part of the " +
					"request has been reset.");
		}
	}

	// Back pressure ratios of all tasks. Array position corresponds
	// to sub task index.
	double[] backPressureRatios = new double[backPressureRatiosByTask.size()];

	for (Entry<ExecutionAttemptID, Double> entry : backPressureRatiosByTask.entrySet()) {
		int subtaskIndex = subtaskIndexMap.get(entry.getKey());
		backPressureRatios[subtaskIndex] = entry.getValue();
	}

	return new OperatorBackPressureStats(
			stats.getRequestId(),
			stats.getEndTime(),
			backPressureRatios);
}
 
Example #8
Source File: BackPressureRequestCoordinator.java    From flink with Apache License 2.0 5 votes vote down vote up
PendingBackPressureRequest(
		int requestId,
		ExecutionAttemptID[] tasksToCollect) {

	this.requestId = requestId;
	this.startTime = System.currentTimeMillis();
	this.pendingTasks = new HashSet<>(Arrays.asList(tasksToCollect));
	this.backPressureRatios = Maps.newHashMapWithExpectedSize(tasksToCollect.length);
	this.backPressureStatsFuture = new CompletableFuture<>();
}
 
Example #9
Source File: FlinkPulsarSinkBase.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
public FlinkPulsarSinkBase(
        String adminUrl,
        Optional<String> defaultTopicName,
        ClientConfigurationData clientConf,
        Properties properties,
        TopicKeyExtractor<T> topicKeyExtractor) {

    this.adminUrl = checkNotNull(adminUrl);

    if (defaultTopicName.isPresent()) {
        this.forcedTopic = true;
        this.defaultTopic = defaultTopicName.get();
        this.topicKeyExtractor = null;
    } else {
        this.forcedTopic = false;
        this.defaultTopic = null;
        ClosureCleaner.clean(
                topicKeyExtractor, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
        this.topicKeyExtractor = checkNotNull(topicKeyExtractor);
    }

    this.clientConfigurationData = clientConf;

    this.properties = checkNotNull(properties);

    this.caseInsensitiveParams =
            SourceSinkUtils.toCaceInsensitiveParams(Maps.fromProperties(properties));

    this.producerConf =
            SourceSinkUtils.getProducerParams(Maps.fromProperties(properties));

    this.flushOnCheckpoint =
            SourceSinkUtils.flushOnCheckpoint(caseInsensitiveParams);

    this.failOnWrite =
            SourceSinkUtils.failOnWrite(caseInsensitiveParams);

    CachedPulsarClient.setCacheSize(SourceSinkUtils.getClientCacheSize(caseInsensitiveParams));

    if (this.clientConfigurationData.getServiceUrl() == null) {
        throw new IllegalArgumentException("ServiceUrl must be supplied in the client configuration");
    }
}