org.apache.flink.testutils.migration.MigrationVersion Java Examples

The following examples show how to use org.apache.flink.testutils.migration.MigrationVersion. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TypeSerializerSnapshotMigrationITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example #2
Source File: TypeSerializerSnapshotMigrationTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param <T> type of the test data.
 */
public <T> void addWithCompatibilityMatcher(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		Matcher<TypeSerializerSchemaCompatibility<T>> schemaCompatibilityMatcher) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
				TestSpecification.<T>builder(
						getSpecNameForVersion(name, testVersion),
						serializerClass,
						snapshotClass,
						testVersion)
						.withNewSerializerProvider(serializerProvider)
						.withSchemaCompatibilityMatcher(schemaCompatibilityMatcher)
						.withSnapshotDataLocation(
								String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
						.withTestData(
								String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
								DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example #3
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param testSnapshotFilenameProvider provider for the filename of the test snapshot.
 * @param testDataFilenameProvider provider for the filename of the test data.
 * @param testDataCount expected number of records to be read in the test data files.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		TestResourceFilenameSupplier testSnapshotFilenameProvider,
		TestResourceFilenameSupplier testDataFilenameProvider,
		int testDataCount) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
			.withNewSerializerProvider(serializerProvider)
			.withSnapshotDataLocation(testSnapshotFilenameProvider.get(testVersion))
			.withTestData(testDataFilenameProvider.get(testVersion), testDataCount)
		);
	}
}
 
Example #4
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param elementMatcher an {@code hamcrest} matcher to match test data.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	Supplier<? extends TypeSerializer<T>> serializerProvider,
	Matcher<T> elementMatcher)  {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
				.withNewSerializerProvider(serializerProvider)
				.withSnapshotDataLocation(
					String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
				.withTestData(
					String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
					DEFAULT_TEST_DATA_COUNT)
			.withTestDataMatcher(elementMatcher)
		);
	}
}
 
Example #5
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param <T> type of the test data.
 */
public <T> void addWithCompatibilityMatcher(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		Matcher<TypeSerializerSchemaCompatibility<T>> schemaCompatibilityMatcher) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
				TestSpecification.<T>builder(
						getSpecNameForVersion(name, testVersion),
						serializerClass,
						snapshotClass,
						testVersion)
						.withNewSerializerProvider(serializerProvider)
						.withSchemaCompatibilityMatcher(schemaCompatibilityMatcher)
						.withSnapshotDataLocation(
								String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
						.withTestData(
								String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
								DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example #6
Source File: KryoSnapshotMigrationTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static void add(List<Object[]> all,
						String name, Supplier<TypeSerializer<Animal>> supplier,
						TypeSerializerSchemaCompatibility<Animal> expected) {

	TestSpecification<Animal> flink16 = TestSpecification.<Animal>builder(
		MigrationVersion.v1_6 + " " + name,
		KryoSerializer.class,
		KryoSerializerSnapshot.class,
		MigrationVersion.v1_6)
		.withNewSerializerProvider(supplier, expected)
		.withSnapshotDataLocation("flink-1.6-" + name + "-snapshot")
		.withTestData("flink-1.6-" + name + "-data", 2);

	TestSpecification<Animal> flink17 = TestSpecification.<Animal>builder(
		MigrationVersion.v1_7 + " " + name,
		KryoSerializer.class,
		KryoSerializerSnapshot.class,
		MigrationVersion.v1_7)
		.withNewSerializerProvider(supplier, expected)
		.withSnapshotDataLocation("flink-1.7-" + name + "-snapshot")
		.withTestData("flink-1.7-" + name + "-data", 2);

	all.add(new Object[]{flink16});
	all.add(new Object[]{flink17});
}
 
Example #7
Source File: TypeSerializerSnapshotMigrationTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
				.withNewSerializerProvider(serializerProvider)
				.withSnapshotDataLocation(
					String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
				.withTestData(
					String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
					DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example #8
Source File: TypeSerializerSnapshotMigrationTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param testSnapshotFilenameProvider provider for the filename of the test snapshot.
 * @param testDataFilenameProvider provider for the filename of the test data.
 * @param testDataCount expected number of records to be read in the test data files.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		TestResourceFilenameSupplier testSnapshotFilenameProvider,
		TestResourceFilenameSupplier testDataFilenameProvider,
		int testDataCount) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
			.withNewSerializerProvider(serializerProvider)
			.withSnapshotDataLocation(testSnapshotFilenameProvider.get(testVersion))
			.withTestData(testDataFilenameProvider.get(testVersion), testDataCount)
		);
	}
}
 
Example #9
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7,
		MigrationVersion.v1_8);
}
 
Example #10
Source File: AbstractNonKeyedOperatorRestoreTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7,
		MigrationVersion.v1_8);
}
 
Example #11
Source File: TypeSerializerSnapshotMigrationITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case "rocksdb":
			return "type-serializer-snapshot-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case "jobmanager":
			return "type-serializer-snapshot-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example #12
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private TestSpecification(
	String name,
	Class<? extends TypeSerializer<T>> serializerType,
	Class<? extends TypeSerializerSnapshot<T>> snapshotClass,
	MigrationVersion testMigrationVersion) {

	this.name = name;
	this.serializerType = serializerType;
	this.snapshotClass = snapshotClass;
	this.testMigrationVersion = testMigrationVersion;
}
 
Example #13
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static <T> TestSpecification<T> builder(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	MigrationVersion testMigrationVersion) {

	return new TestSpecification<>(
		name,
		(Class<? extends TypeSerializer<T>>) serializerClass,
		(Class<? extends TypeSerializerSnapshot<T>>) snapshotClass,
		testMigrationVersion);
}
 
Example #14
Source File: TypeSerializerSnapshotMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private TypeSerializerSnapshot<ElementT> snapshotUnderTest() {
	DataInputView input = contentsOf(testSpecification.getSnapshotDataLocation());
	try {
		if (!testSpecification.getTestMigrationVersion().isNewerVersionThan(MigrationVersion.v1_6)) {
			return readPre17SnapshotFormat(input);
		} else {
			return readSnapshot(input);
		}
	}
	catch (IOException e) {
		throw new RuntimeException("Unable to read " + testSpecification.getSnapshotDataLocation(),  e);
	}
}
 
Example #15
Source File: TypeSerializerSnapshotMigrationTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static <T> TestSpecification<T> builder(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	MigrationVersion testMigrationVersion) {

	return new TestSpecification<>(
		name,
		(Class<? extends TypeSerializer<T>>) serializerClass,
		(Class<? extends TypeSerializerSnapshot<T>>) snapshotClass,
		testMigrationVersion);
}
 
Example #16
Source File: StatefulJobWBroadcastStateMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example #17
Source File: StatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME:
			return "new-stateful-udf-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case StateBackendLoader.MEMORY_STATE_BACKEND_NAME:
			return "new-stateful-udf-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example #18
Source File: AbstractNonKeyedOperatorRestoreTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example #19
Source File: StatefulJobSavepointMigrationITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example #20
Source File: BucketingSinkMigrationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * The bucket file prefix is the absolute path to the part files, which is stored within the savepoint.
 */
@Parameterized.Parameters(name = "Migration Savepoint / Bucket Files Prefix: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, "/var/folders/v_/ry2wp5fx0y7c1rvr41xy9_700000gn/T/junit9160378385359106772/junit479663758539998903/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_3, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit4273542175898623023/junit3801102997056424640/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_4, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit3198043255809479705/junit8947526563966405708/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_5, "/tmp/junit4927100426019463155/junit2465610012100182280/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_6, "/tmp/junit3459711376354834545/junit5114611885650086135/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_7, "/var/folders/r2/tdhx810x7yxb7q9_brnp49x40000gp/T/junit4288325607215628863/junit8132783417241536320/1970-01-01--08/part-0-"));
}
 
Example #21
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example #22
Source File: BucketingSinkMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * The bucket file prefix is the absolute path to the part files, which is stored within the savepoint.
 */
@Parameterized.Parameters(name = "Migration Savepoint / Bucket Files Prefix: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, "/var/folders/v_/ry2wp5fx0y7c1rvr41xy9_700000gn/T/junit9160378385359106772/junit479663758539998903/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_3, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit4273542175898623023/junit3801102997056424640/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_4, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit3198043255809479705/junit8947526563966405708/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_5, "/tmp/junit4927100426019463155/junit2465610012100182280/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_6, "/tmp/junit3459711376354834545/junit5114611885650086135/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_7, "/var/folders/r2/tdhx810x7yxb7q9_brnp49x40000gp/T/junit4288325607215628863/junit8132783417241536320/1970-01-01--08/part-0-"),
		Tuple2.of(MigrationVersion.v1_8, "/var/folders/rc/84k970r94nz456tb9cdlt30s1j0k94/T/junit7271027454784776053/junit5108755539355247469/1970-01-01--08/part-0-"));
}
 
Example #23
Source File: CEPMigrationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example #24
Source File: ContinuousFileProcessingMigrationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint / Mod Time: {0}")
public static Collection<Tuple2<MigrationVersion, Long>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, 1493116191000L),
		Tuple2.of(MigrationVersion.v1_3, 1496532000000L),
		Tuple2.of(MigrationVersion.v1_4, 1516897628000L),
		Tuple2.of(MigrationVersion.v1_5, 1533639934000L),
		Tuple2.of(MigrationVersion.v1_6, 1534696817000L),
		Tuple2.of(MigrationVersion.v1_7, 1544024599000L));
}
 
Example #25
Source File: StatefulJobWBroadcastStateMigrationITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example #26
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Test restoring from savepoints before version Flink 1.3 should fail if discovery is enabled.
 */
@Test
public void testRestoreFailsWithNonEmptyPreFlink13StatesIfDiscoveryEnabled() throws Exception {
	assumeTrue(testMigrateVersion == MigrationVersion.v1_3 || testMigrateVersion == MigrationVersion.v1_2);

	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, 1000L); // discovery enabled

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
		new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
		new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file; should fail since discovery is enabled
	try {
		testHarness.initializeState(
			OperatorSnapshotUtil.getResourceFilename(
				"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

		fail("Restore from savepoints from version before Flink 1.3.x should have failed if discovery is enabled.");
	} catch (Exception e) {
		Assert.assertTrue(e instanceof IllegalArgumentException);
	}
}
 
Example #27
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Test restoring from savepoints before version Flink 1.3 should fail if discovery is enabled.
 */
@Test
public void testRestoreFailsWithNonEmptyPreFlink13StatesIfDiscoveryEnabled() throws Exception {
	assumeTrue(testMigrateVersion == MigrationVersion.v1_3 || testMigrateVersion == MigrationVersion.v1_2);

	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, 1000L); // discovery enabled

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
		new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
		new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file; should fail since discovery is enabled
	try {
		testHarness.initializeState(
			OperatorSnapshotUtil.getResourceFilename(
				"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

		fail("Restore from savepoints from version before Flink 1.3.x should have failed if discovery is enabled.");
	} catch (Exception e) {
		Assert.assertTrue(e instanceof IllegalArgumentException);
	}
}
 
Example #28
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7,
		MigrationVersion.v1_8);
}
 
Example #29
Source File: ContinuousFileProcessingMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint / Mod Time: {0}")
public static Collection<Tuple2<MigrationVersion, Long>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, 1493116191000L),
		Tuple2.of(MigrationVersion.v1_3, 1496532000000L),
		Tuple2.of(MigrationVersion.v1_4, 1516897628000L),
		Tuple2.of(MigrationVersion.v1_5, 1533639934000L),
		Tuple2.of(MigrationVersion.v1_6, 1534696817000L),
		Tuple2.of(MigrationVersion.v1_7, 1544024599000L),
		Tuple2.of(MigrationVersion.v1_8, 1555215710000L));
}
 
Example #30
Source File: AbstractKeyedOperatorRestoreTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migrate Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7,
		MigrationVersion.v1_8);
}