Java Code Examples for org.apache.flink.testutils.migration.MigrationVersion

The following examples show how to use org.apache.flink.testutils.migration.MigrationVersion. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param testSnapshotFilenameProvider provider for the filename of the test snapshot.
 * @param testDataFilenameProvider provider for the filename of the test data.
 * @param testDataCount expected number of records to be read in the test data files.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		TestResourceFilenameSupplier testSnapshotFilenameProvider,
		TestResourceFilenameSupplier testDataFilenameProvider,
		int testDataCount) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
			.withNewSerializerProvider(serializerProvider)
			.withSnapshotDataLocation(testSnapshotFilenameProvider.get(testVersion))
			.withTestData(testDataFilenameProvider.get(testVersion), testDataCount)
		);
	}
}
 
Example 2
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
				.withNewSerializerProvider(serializerProvider)
				.withSnapshotDataLocation(
					String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
				.withTestData(
					String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
					DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example 3
Source Project: Flink-CEPplus   Source File: KryoSnapshotMigrationTest.java    License: Apache License 2.0 6 votes vote down vote up
private static void add(List<Object[]> all,
						String name, Supplier<TypeSerializer<Animal>> supplier,
						TypeSerializerSchemaCompatibility<Animal> expected) {

	TestSpecification<Animal> flink16 = TestSpecification.<Animal>builder(
		MigrationVersion.v1_6 + " " + name,
		KryoSerializer.class,
		KryoSerializerSnapshot.class,
		MigrationVersion.v1_6)
		.withNewSerializerProvider(supplier, expected)
		.withSnapshotDataLocation("flink-1.6-" + name + "-snapshot")
		.withTestData("flink-1.6-" + name + "-data", 2);

	TestSpecification<Animal> flink17 = TestSpecification.<Animal>builder(
		MigrationVersion.v1_7 + " " + name,
		KryoSerializer.class,
		KryoSerializerSnapshot.class,
		MigrationVersion.v1_7)
		.withNewSerializerProvider(supplier, expected)
		.withSnapshotDataLocation("flink-1.7-" + name + "-snapshot")
		.withTestData("flink-1.7-" + name + "-data", 2);

	all.add(new Object[]{flink16});
	all.add(new Object[]{flink17});
}
 
Example 4
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param <T> type of the test data.
 */
public <T> void addWithCompatibilityMatcher(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		Matcher<TypeSerializerSchemaCompatibility<T>> schemaCompatibilityMatcher) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
				TestSpecification.<T>builder(
						getSpecNameForVersion(name, testVersion),
						serializerClass,
						snapshotClass,
						testVersion)
						.withNewSerializerProvider(serializerProvider)
						.withSchemaCompatibilityMatcher(schemaCompatibilityMatcher)
						.withSnapshotDataLocation(
								String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
						.withTestData(
								String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
								DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example 5
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param elementMatcher an {@code hamcrest} matcher to match test data.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	Supplier<? extends TypeSerializer<T>> serializerProvider,
	Matcher<T> elementMatcher)  {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
				.withNewSerializerProvider(serializerProvider)
				.withSnapshotDataLocation(
					String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
				.withTestData(
					String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
					DEFAULT_TEST_DATA_COUNT)
			.withTestDataMatcher(elementMatcher)
		);
	}
}
 
Example 6
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * @param name test specification name.
 * @param serializerClass class of the current serializer.
 * @param snapshotClass class of the current serializer snapshot class.
 * @param serializerProvider provider for an instance of the current serializer.
 * @param testSnapshotFilenameProvider provider for the filename of the test snapshot.
 * @param testDataFilenameProvider provider for the filename of the test data.
 * @param testDataCount expected number of records to be read in the test data files.
 *
 * @param <T> type of the test data.
 */
public <T> void add(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		TestResourceFilenameSupplier testSnapshotFilenameProvider,
		TestResourceFilenameSupplier testDataFilenameProvider,
		int testDataCount) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
			TestSpecification.<T>builder(
				getSpecNameForVersion(name, testVersion),
				serializerClass,
				snapshotClass,
				testVersion)
			.withNewSerializerProvider(serializerProvider)
			.withSnapshotDataLocation(testSnapshotFilenameProvider.get(testVersion))
			.withTestData(testDataFilenameProvider.get(testVersion), testDataCount)
		);
	}
}
 
Example 7
/**
 * Adds a test specification to be tested for all specified test versions.
 *
 * <p>This method adds the specification with pre-defined snapshot and data filenames,
 * with the format "flink-&lt;testVersion&gt;-&lt;specName&gt;-&lt;data/snapshot&gt;",
 * and each specification's test data count is assumed to always be 10.
 *
 * @param <T> type of the test data.
 */
public <T> void addWithCompatibilityMatcher(
		String name,
		Class<? extends TypeSerializer> serializerClass,
		Class<? extends TypeSerializerSnapshot> snapshotClass,
		Supplier<? extends TypeSerializer<T>> serializerProvider,
		Matcher<TypeSerializerSchemaCompatibility<T>> schemaCompatibilityMatcher) {
	for (MigrationVersion testVersion : testVersions) {
		testSpecifications.add(
				TestSpecification.<T>builder(
						getSpecNameForVersion(name, testVersion),
						serializerClass,
						snapshotClass,
						testVersion)
						.withNewSerializerProvider(serializerProvider)
						.withSchemaCompatibilityMatcher(schemaCompatibilityMatcher)
						.withSnapshotDataLocation(
								String.format(DEFAULT_SNAPSHOT_FILENAME_FORMAT, testVersion, name))
						.withTestData(
								String.format(DEFAULT_TEST_DATA_FILENAME_FORMAT, testVersion, name),
								DEFAULT_TEST_DATA_COUNT)
		);
	}
}
 
Example 8
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 9
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example 10
/**
 * Test restoring from savepoints before version Flink 1.3 should fail if discovery is enabled.
 */
@Test
public void testRestoreFailsWithNonEmptyPreFlink13StatesIfDiscoveryEnabled() throws Exception {
	assumeTrue(testMigrateVersion == MigrationVersion.v1_3 || testMigrateVersion == MigrationVersion.v1_2);

	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, 1000L); // discovery enabled

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
		new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
		new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file; should fail since discovery is enabled
	try {
		testHarness.initializeState(
			OperatorSnapshotUtil.getResourceFilename(
				"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

		fail("Restore from savepoints from version before Flink 1.3.x should have failed if discovery is enabled.");
	} catch (Exception e) {
		Assert.assertTrue(e instanceof IllegalArgumentException);
	}
}
 
Example 11
Source Project: Flink-CEPplus   Source File: BucketingSinkMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * The bucket file prefix is the absolute path to the part files, which is stored within the savepoint.
 */
@Parameterized.Parameters(name = "Migration Savepoint / Bucket Files Prefix: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, "/var/folders/v_/ry2wp5fx0y7c1rvr41xy9_700000gn/T/junit9160378385359106772/junit479663758539998903/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_3, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit4273542175898623023/junit3801102997056424640/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_4, "/var/folders/tv/b_1d8fvx23dgk1_xs8db_95h0000gn/T/junit3198043255809479705/junit8947526563966405708/1970-01-01--01/part-0-"),
		Tuple2.of(MigrationVersion.v1_5, "/tmp/junit4927100426019463155/junit2465610012100182280/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_6, "/tmp/junit3459711376354834545/junit5114611885650086135/1970-01-01--00/part-0-"),
		Tuple2.of(MigrationVersion.v1_7, "/var/folders/r2/tdhx810x7yxb7q9_brnp49x40000gp/T/junit4288325607215628863/junit8132783417241536320/1970-01-01--08/part-0-"));
}
 
Example 12
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 13
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case "rocksdb":
			return "type-serializer-snapshot-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case "jobmanager":
			return "type-serializer-snapshot-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example 14
@Parameterized.Parameters(name = "Migrate Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example 15
@Parameterized.Parameters(name = "Migrate Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example 16
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME:
			return "stateful-udf-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case StateBackendLoader.MEMORY_STATE_BACKEND_NAME:
			return "stateful-udf-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example 17
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_2, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 18
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME:
			return "stateful-udf-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case StateBackendLoader.MEMORY_STATE_BACKEND_NAME:
			return "stateful-udf-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example 19
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
		Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 20
private String getSavepointPath(MigrationVersion savepointVersion, String backendType) {
	switch (backendType) {
		case StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME:
			return "new-stateful-udf-migration-itcase-flink" + savepointVersion + "-rocksdb-savepoint";
		case StateBackendLoader.MEMORY_STATE_BACKEND_NAME:
			return "new-stateful-udf-migration-itcase-flink" + savepointVersion + "-savepoint";
		default:
			throw new UnsupportedOperationException();
	}
}
 
Example 21
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 22
@SuppressWarnings("unchecked")
public static <T> TestSpecification<T> builder(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	MigrationVersion testMigrationVersion) {

	return new TestSpecification<>(
		name,
		(Class<? extends TypeSerializer<T>>) serializerClass,
		(Class<? extends TypeSerializerSnapshot<T>>) snapshotClass,
		testMigrationVersion);
}
 
Example 23
private TypeSerializerSnapshot<ElementT> snapshotUnderTest() {
	DataInputView input = contentsOf(testSpecification.getSnapshotDataLocation());
	try {
		if (!testSpecification.getTestMigrationVersion().isNewerVersionThan(MigrationVersion.v1_6)) {
			return readPre17SnapshotFormat(input);
		} else {
			return readSnapshot(input);
		}
	}
	catch (IOException e) {
		throw new RuntimeException("Unable to read " + testSpecification.getSnapshotDataLocation(),  e);
	}
}
 
Example 24
@SuppressWarnings("unchecked")
public static <T> TestSpecification<T> builder(
	String name,
	Class<? extends TypeSerializer> serializerClass,
	Class<? extends TypeSerializerSnapshot> snapshotClass,
	MigrationVersion testMigrationVersion) {

	return new TestSpecification<>(
		name,
		(Class<? extends TypeSerializer<T>>) serializerClass,
		(Class<? extends TypeSerializerSnapshot<T>>) snapshotClass,
		testMigrationVersion);
}
 
Example 25
private TestSpecification(
	String name,
	Class<? extends TypeSerializer<T>> serializerType,
	Class<? extends TypeSerializerSnapshot<T>> snapshotClass,
	MigrationVersion testMigrationVersion) {

	this.name = name;
	this.serializerType = serializerType;
	this.snapshotClass = snapshotClass;
	this.testMigrationVersion = testMigrationVersion;
}
 
Example 26
Source Project: flink   Source File: WindowOperatorMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_2,
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7,
		MigrationVersion.v1_8);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: CEPMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
	return Arrays.asList(
		MigrationVersion.v1_3,
		MigrationVersion.v1_4,
		MigrationVersion.v1_5,
		MigrationVersion.v1_6,
		MigrationVersion.v1_7);
}
 
Example 28
@Parameterized.Parameters(name = "Migration Savepoint / Mod Time: {0}")
public static Collection<Tuple2<MigrationVersion, Long>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, 1493116191000L),
		Tuple2.of(MigrationVersion.v1_3, 1496532000000L),
		Tuple2.of(MigrationVersion.v1_4, 1516897628000L),
		Tuple2.of(MigrationVersion.v1_5, 1533639934000L),
		Tuple2.of(MigrationVersion.v1_6, 1534696817000L),
		Tuple2.of(MigrationVersion.v1_7, 1544024599000L));
}
 
Example 29
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
	return Arrays.asList(
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_7, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.MEMORY_STATE_BACKEND_NAME),
			Tuple2.of(MigrationVersion.v1_8, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME));
}
 
Example 30
@Parameterized.Parameters(name = "Migration Savepoint / Mod Time: {0}")
public static Collection<Tuple2<MigrationVersion, Long>> parameters () {
	return Arrays.asList(
		Tuple2.of(MigrationVersion.v1_2, 1493116191000L),
		Tuple2.of(MigrationVersion.v1_3, 1496532000000L),
		Tuple2.of(MigrationVersion.v1_4, 1516897628000L),
		Tuple2.of(MigrationVersion.v1_5, 1533639934000L),
		Tuple2.of(MigrationVersion.v1_6, 1534696817000L),
		Tuple2.of(MigrationVersion.v1_7, 1544024599000L),
		Tuple2.of(MigrationVersion.v1_8, 1555215710000L));
}