Java Code Examples for org.apache.flink.util.InstantiationUtil#clone()

The following examples show how to use org.apache.flink.util.InstantiationUtil#clone() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Kafka08Fetcher.java    From flink with Apache License 2.0 6 votes vote down vote up
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread(
		List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions,
		Node leader,
		ExceptionProxy errorHandler) throws IOException, ClassNotFoundException {
	// each thread needs its own copy of the deserializer, because the deserializer is
	// not necessarily thread safe
	final KafkaDeserializationSchema<T> clonedDeserializer =
			InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader());

	// seed thread with list of fetch partitions (otherwise it would shut down immediately again
	SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>(
			this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue,
			clonedDeserializer, invalidOffsetBehavior);

	brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)",
			runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port()));
	brokerThread.setDaemon(true);
	brokerThread.start();

	LOG.info("Starting thread {}", brokerThread.getName());
	return brokerThread;
}
 
Example 2
Source File: Kafka08Fetcher.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread(
		List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions,
		Node leader,
		ExceptionProxy errorHandler) throws IOException, ClassNotFoundException {
	// each thread needs its own copy of the deserializer, because the deserializer is
	// not necessarily thread safe
	final KafkaDeserializationSchema<T> clonedDeserializer =
			InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader());

	// seed thread with list of fetch partitions (otherwise it would shut down immediately again
	SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>(
			this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue,
			clonedDeserializer, invalidOffsetBehavior);

	brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)",
			runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port()));
	brokerThread.setDaemon(true);
	brokerThread.start();

	LOG.info("Starting thread {}", brokerThread.getName());
	return brokerThread;
}
 
Example 3
Source File: HiveAggSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public AggregateFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	AggregateFunction clone;
	try {
		clone = InstantiationUtil.clone(aggregateFunction);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (AggregateFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 4
Source File: HiveScalarSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public ScalarFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	ScalarFunction clone;
	try {
		clone = InstantiationUtil.clone(function);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (ScalarFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 5
Source File: HiveAggSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public AggregateFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	AggregateFunction clone;
	try {
		clone = InstantiationUtil.clone(aggregateFunction);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (AggregateFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 6
Source File: KinesisDataFetcher.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Register a new subscribed shard state.
 *
 * @param newSubscribedShardState the new shard state that this fetcher is to be subscribed to
 */
public int registerNewSubscribedShardState(KinesisStreamShardState newSubscribedShardState) {
	synchronized (checkpointLock) {
		subscribedShardsState.add(newSubscribedShardState);

		// If a registered shard has initial state that is not SENTINEL_SHARD_ENDING_SEQUENCE_NUM (will be the case
		// if the consumer had already finished reading a shard before we failed and restored), we determine that
		// this subtask has a new active shard
		if (!newSubscribedShardState.getLastProcessedSequenceNum().equals(SentinelSequenceNumber.SENTINEL_SHARD_ENDING_SEQUENCE_NUM.get())) {
			this.numberOfActiveShards.incrementAndGet();
		}

		int shardStateIndex = subscribedShardsState.size() - 1;

		// track all discovered shards for watermark determination
		ShardWatermarkState sws = shardWatermarks.get(shardStateIndex);
		if (sws == null) {
			sws = new ShardWatermarkState();
			try {
				sws.periodicWatermarkAssigner = InstantiationUtil.clone(periodicWatermarkAssigner);
			} catch (Exception e) {
				throw new RuntimeException("Failed to instantiate new WatermarkAssigner", e);
			}
			sws.emitQueue = recordEmitter.getQueue(shardStateIndex);
			sws.lastUpdated = getCurrentTimeMillis();
			sws.lastRecordTimestamp = Long.MIN_VALUE;
			shardWatermarks.put(shardStateIndex, sws);
		}

		return shardStateIndex;
	}
}
 
Example 7
Source File: KinesisDataFetcher.java    From flink with Apache License 2.0 5 votes vote down vote up
private KinesisDeserializationSchema<T> getClonedDeserializationSchema() {
	try {
		return InstantiationUtil.clone(deserializationSchema, runtimeContext.getUserCodeClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		// this really shouldn't happen; simply wrap it around a runtime exception
		throw new RuntimeException(ex);
	}
}
 
Example 8
Source File: TimestampedHiveInputSplit.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TimestampedHiveInputSplit copy(TimestampedHiveInputSplit from) {
	try {
		return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException e) {
		throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e);
	}
}
 
Example 9
Source File: JavaSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public T copy(T from) {
	try {
		return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException e) {
		throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e);
	}
}
 
Example 10
Source File: KryoSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer(
	ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) {
	try {
		return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		throw new CloneFailedException(
			"Could not clone serializer instance of class " + original.getClass(),
			ex);
	}
}
 
Example 11
Source File: HiveTableSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TableFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	TableFunction clone;
	try {
		clone = InstantiationUtil.clone(hiveUdtf);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (TableFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 12
Source File: HiveScalarSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public ScalarFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	ScalarFunction clone;
	try {
		clone = InstantiationUtil.clone(function);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (ScalarFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 13
Source File: JDBCAppendTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TableSink<Row> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
	int[] types = outputFormat.getTypesArray();

	String sinkSchema =
		String.join(", ", IntStream.of(types).mapToObj(JDBCTypeUtil::getTypeName).collect(Collectors.toList()));
	String tableSchema =
		String.join(", ", Stream.of(fieldTypes).map(JDBCTypeUtil::getTypeName).collect(Collectors.toList()));
	String msg = String.format("Schema of output table is incompatible with JDBCAppendTableSink schema. " +
		"Table schema: [%s], sink schema: [%s]", tableSchema, sinkSchema);

	Preconditions.checkArgument(fieldTypes.length == types.length, msg);
	for (int i = 0; i < types.length; ++i) {
		Preconditions.checkArgument(
			JDBCTypeUtil.typeInformationToSqlType(fieldTypes[i]) == types[i],
			msg);
	}

	JDBCAppendTableSink copy;
	try {
		copy = new JDBCAppendTableSink(InstantiationUtil.clone(outputFormat));
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}

	copy.fieldNames = fieldNames;
	copy.fieldTypes = fieldTypes;
	return copy;
}
 
Example 14
Source File: KinesisDataFetcher.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Register a new subscribed shard state.
 *
 * @param newSubscribedShardState the new shard state that this fetcher is to be subscribed to
 */
public int registerNewSubscribedShardState(KinesisStreamShardState newSubscribedShardState) {
	synchronized (checkpointLock) {
		subscribedShardsState.add(newSubscribedShardState);

		// If a registered shard has initial state that is not SENTINEL_SHARD_ENDING_SEQUENCE_NUM (will be the case
		// if the consumer had already finished reading a shard before we failed and restored), we determine that
		// this subtask has a new active shard
		if (!newSubscribedShardState.getLastProcessedSequenceNum().equals(SentinelSequenceNumber.SENTINEL_SHARD_ENDING_SEQUENCE_NUM.get())) {
			this.numberOfActiveShards.incrementAndGet();
		}

		int shardStateIndex = subscribedShardsState.size() - 1;

		// track all discovered shards for watermark determination
		ShardWatermarkState sws = shardWatermarks.get(shardStateIndex);
		if (sws == null) {
			sws = new ShardWatermarkState();
			try {
				sws.periodicWatermarkAssigner = InstantiationUtil.clone(periodicWatermarkAssigner);
			} catch (Exception e) {
				throw new RuntimeException("Failed to instantiate new WatermarkAssigner", e);
			}
			sws.emitQueue = recordEmitter.getQueue(shardStateIndex);
			sws.lastUpdated = getCurrentTimeMillis();
			sws.lastRecordTimestamp = Long.MIN_VALUE;
			shardWatermarks.put(shardStateIndex, sws);
		}

		return shardStateIndex;
	}
}
 
Example 15
Source File: KinesisDataFetcher.java    From flink with Apache License 2.0 5 votes vote down vote up
protected KinesisDeserializationSchema<T> getClonedDeserializationSchema() {
	try {
		return InstantiationUtil.clone(deserializationSchema, runtimeContext.getUserCodeClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		// this really shouldn't happen; simply wrap it around a runtime exception
		throw new RuntimeException(ex);
	}
}
 
Example 16
Source File: JavaSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public T copy(T from) {
	try {
		return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException e) {
		throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e);
	}
}
 
Example 17
Source File: KryoSerializer.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer(
	ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) {
	try {
		return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		throw new CloneFailedException(
			"Could not clone serializer instance of class " + original.getClass(),
			ex);
	}
}
 
Example 18
Source File: JDBCAppendTableSink.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public TableSink<Row> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
	int[] types = outputFormat.getTypesArray();

	String sinkSchema =
		String.join(", ", IntStream.of(types).mapToObj(JDBCTypeUtil::getTypeName).collect(Collectors.toList()));
	String tableSchema =
		String.join(", ", Stream.of(fieldTypes).map(JDBCTypeUtil::getTypeName).collect(Collectors.toList()));
	String msg = String.format("Schema of output table is incompatible with JDBCAppendTableSink schema. " +
		"Table schema: [%s], sink schema: [%s]", tableSchema, sinkSchema);

	Preconditions.checkArgument(fieldTypes.length == types.length, msg);
	for (int i = 0; i < types.length; ++i) {
		Preconditions.checkArgument(
			JDBCTypeUtil.typeInformationToSqlType(fieldTypes[i]) == types[i],
			msg);
	}

	JDBCAppendTableSink copy;
	try {
		copy = new JDBCAppendTableSink(InstantiationUtil.clone(outputFormat));
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}

	copy.fieldNames = fieldNames;
	copy.fieldTypes = fieldTypes;
	return copy;
}
 
Example 19
Source File: KryoSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer(
	ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) {
	try {
		return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		throw new CloneFailedException(
			"Could not clone serializer instance of class " + original.getClass(),
			ex);
	}
}
 
Example 20
Source File: KinesisDataFetcher.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
protected KinesisDeserializationSchema<T> getClonedDeserializationSchema() {
	try {
		return InstantiationUtil.clone(deserializationSchema, runtimeContext.getUserCodeClassLoader());
	} catch (IOException | ClassNotFoundException ex) {
		// this really shouldn't happen; simply wrap it around a runtime exception
		throw new RuntimeException(ex);
	}
}