Java Code Examples for org.apache.flink.table.api.ValidationException

The following examples show how to use org.apache.flink.table.api.ValidationException. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: FactoryUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Discovers a {@link EncodingFormat} of the given type using the given option (if present) as factory
 * identifier.
 */
public <I, F extends EncodingFormatFactory<I>> Optional<EncodingFormat<I>> discoverOptionalEncodingFormat(
		Class<F> formatFactoryClass,
		ConfigOption<String> formatOption) {
	return discoverOptionalFormatFactory(formatFactoryClass, formatOption)
		.map(formatFactory -> {
			String formatPrefix = formatPrefix(formatFactory, formatOption);
			try {
				return formatFactory.createEncodingFormat(context, projectOptions(formatPrefix));
			} catch (Throwable t) {
				throw new ValidationException(
					String.format(
						"Error creating sink format '%s' in option space '%s'.",
						formatFactory.factoryIdentifier(),
						formatPrefix),
					t);
			}
		});
}
 
Example 2
Source Project: flink   Source File: DecimalType.java    License: Apache License 2.0 6 votes vote down vote up
public DecimalType(boolean isNullable, int precision, int scale) {
	super(isNullable, LogicalTypeRoot.DECIMAL);
	if (precision < MIN_PRECISION || precision > MAX_PRECISION) {
		throw new ValidationException(
			String.format(
				"Decimal precision must be between %d and %d (both inclusive).",
				MIN_PRECISION,
				MAX_PRECISION));
	}
	if (scale < MIN_SCALE || scale > precision) {
		throw new ValidationException(
			String.format(
				"Decimal scale must be between %d and the precision %d (both inclusive).",
				MIN_SCALE,
				precision));
	}
	this.precision = precision;
	this.scale = scale;
}
 
Example 3
Source Project: flink   Source File: AggregateOperationFactory.java    License: Apache License 2.0 6 votes vote down vote up
private FieldReferenceExpression getValidatedTimeAttribute(GroupWindow window, ExpressionResolver resolver) {
	List<ResolvedExpression> timeFieldExprs = resolver.resolve(singletonList(window.getTimeField()));

	if (timeFieldExprs.size() != 1) {
		throw new ValidationException("A group window only supports a single time field column.");
	}

	Expression timeFieldExpr = timeFieldExprs.get(0);
	if (!(timeFieldExpr instanceof FieldReferenceExpression)) {
		throw new ValidationException("A group window expects a time attribute for grouping.");
	}

	FieldReferenceExpression timeField = (FieldReferenceExpression) timeFieldExpr;

	final LogicalType timeFieldType = timeField.getOutputDataType().getLogicalType();

	validateTimeAttributeType(timeFieldType);

	return timeField;
}
 
Example 4
Source Project: flink   Source File: FunctionITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidUseOfScalarFunction() {
	tEnv().executeSql("CREATE TABLE SinkTable(s STRING) WITH ('connector' = 'COLLECTION')");

	tEnv().createTemporarySystemFunction("PrimitiveScalarFunction", PrimitiveScalarFunction.class);
	try {
		tEnv().executeSql(
			"INSERT INTO SinkTable " +
			"SELECT * FROM TABLE(PrimitiveScalarFunction(1, 2, '3'))");
		fail();
	} catch (ValidationException e) {
		assertThat(
			e,
			hasMessage(
				containsString(
					"No match found for function signature PrimitiveScalarFunction(<NUMERIC>, <NUMERIC>, <CHARACTER>)")));
	}
}
 
Example 5
Source Project: flink   Source File: KafkaDynamicTableFactoryTestBase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMissingStartupTimestamp() {
	// Construct table source using DDL and table source factory
	ObjectIdentifier objectIdentifier = ObjectIdentifier.of(
			"default",
			"default",
			"scanTable");
	final Map<String, String> modifiedOptions = getModifiedOptions(
			getFullSourceOptions(),
			options -> {
				options.put("scan.startup.mode", "timestamp");
			});
	CatalogTable catalogTable = createKafkaSourceCatalogTable(modifiedOptions);

	thrown.expect(ValidationException.class);
	thrown.expect(containsCause(new ValidationException("'scan.startup.timestamp-millis' "
			+ "is required in 'timestamp' startup mode but missing.")));
	FactoryUtil.createTableSource(null,
			objectIdentifier,
			catalogTable,
			new Configuration(),
			Thread.currentThread().getContextClassLoader());
}
 
Example 6
Source Project: flink   Source File: DataType.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * This method should catch the most common errors. However, another validation is required in
 * deeper layers as we don't know whether the data type is used for input or output declaration.
 */
private static <C> Class<C> performEarlyClassValidation(
		LogicalType logicalType,
		Class<C> candidate) {

	if (candidate != null &&
			!logicalType.supportsInputConversion(candidate) &&
			!logicalType.supportsOutputConversion(candidate)) {
		throw new ValidationException(
			String.format(
				"Logical type '%s' does not support a conversion from or to class '%s'.",
				logicalType.asSummaryString(),
				candidate.getName()));
	}
	return candidate;
}
 
Example 7
Source Project: flink   Source File: FunctionCatalog.java    License: Apache License 2.0 6 votes vote down vote up
private void registerTemporarySystemFunction(
		String name,
		CatalogFunction function,
		boolean ignoreIfExists) {
	final String normalizedName = FunctionIdentifier.normalizeName(name);

	try {
		validateAndPrepareFunction(function);
	} catch (Throwable t) {
		throw new ValidationException(
			String.format(
				"Could not register temporary system function '%s' due to implementation errors.",
				name),
			t);
	}

	if (!tempSystemFunctions.containsKey(normalizedName)) {
		tempSystemFunctions.put(normalizedName, function);
	} else if (!ignoreIfExists) {
		throw new ValidationException(
			String.format(
				"Could not register temporary system function. A function named '%s' does already exist.",
				name));
	}
}
 
Example 8
Source Project: flink   Source File: TableEnvironmentImpl.java    License: Apache License 2.0 6 votes vote down vote up
private void registerTableSourceInternal(String name, TableSource<?> tableSource) {
	validateTableSource(tableSource);
	Optional<CatalogBaseTable> table = getCatalogTable(catalogManager.getBuiltInCatalogName(),
		catalogManager.getBuiltInDatabaseName(), name);

	if (table.isPresent()) {
		if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
			ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
			if (sourceSinkTable.getTableSource().isPresent()) {
				throw new ValidationException(String.format(
					"Table '%s' already exists. Please choose a different name.", name));
			} else {
				// wrapper contains only sink (not source)
				replaceTableInternal(
					name,
					ConnectorCatalogTable
						.sourceAndSink(tableSource, sourceSinkTable.getTableSink().get(), !IS_STREAM_TABLE));
			}
		} else {
			throw new ValidationException(String.format(
				"Table '%s' already exists. Please choose a different name.", name));
		}
	} else {
		registerTableInternal(name, ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE));
	}
}
 
Example 9
Source Project: flink   Source File: CsvRowFormatFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDisableQuoteCharacterException() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Format cannot define a quote character and disabled quote character at the same time.");
	final Map<String, String> properties = new Csv()
		.schema(SCHEMA)
		.fieldDelimiter(';')
		.lineDelimiter("\r\n")
		.allowComments()
		.ignoreParseErrors()
		.arrayElementDelimiter("|")
		.escapeCharacter('\\')
		.nullLiteral("n/a")
		.quoteCharacter('#')
		.disableQuoteCharacter()
		.toProperties();

	TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
}
 
Example 10
Source Project: flink   Source File: YearMonthIntervalType.java    License: Apache License 2.0 6 votes vote down vote up
public YearMonthIntervalType(boolean isNullable, YearMonthResolution resolution, int yearPrecision) {
	super(isNullable, LogicalTypeRoot.INTERVAL_YEAR_MONTH);
	Preconditions.checkNotNull(resolution);
	if (resolution == YearMonthResolution.MONTH && yearPrecision != DEFAULT_PRECISION) {
		throw new ValidationException(
			String.format(
				"Year precision of sub-year intervals must be equal to the default precision %d.",
				DEFAULT_PRECISION));
	}
	if (yearPrecision < MIN_PRECISION || yearPrecision > MAX_PRECISION) {
		throw new ValidationException(
			String.format(
				"Year precision of year-month intervals must be between %d and %d (both inclusive).",
				MIN_PRECISION,
				MAX_PRECISION));
	}
	this.resolution = resolution;
	this.yearPrecision = yearPrecision;
}
 
Example 11
Source Project: flink   Source File: TypeInferenceUtil.java    License: Apache License 2.0 6 votes vote down vote up
private static AdaptedCallContext inferInputTypes(
		TypeInference typeInference,
		CallContext callContext,
		@Nullable DataType outputType,
		boolean throwOnFailure) {

	final AdaptedCallContext adaptedCallContext = new AdaptedCallContext(callContext, outputType);

	// typed arguments have highest priority
	typeInference.getTypedArguments().ifPresent(adaptedCallContext::setExpectedArguments);

	final List<DataType> inferredDataTypes = typeInference.getInputTypeStrategy()
		.inferInputTypes(adaptedCallContext, throwOnFailure)
		.orElse(null);

	if (inferredDataTypes != null) {
		adaptedCallContext.setExpectedArguments(inferredDataTypes);
	} else if (throwOnFailure) {
		throw new ValidationException("Invalid input arguments.");
	}

	return adaptedCallContext;
}
 
Example 12
Source Project: flink   Source File: ElasticsearchUpsertTableSinkBase.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public TableSink<Tuple2<Boolean, Row>> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
	if (!Arrays.equals(getFieldNames(), fieldNames) || !Arrays.equals(getFieldTypes(), fieldTypes)) {
		throw new ValidationException("Reconfiguration with different fields is not allowed. " +
			"Expected: " + Arrays.toString(getFieldNames()) + " / " + Arrays.toString(getFieldTypes()) + ". " +
			"But was: " + Arrays.toString(fieldNames) + " / " + Arrays.toString(fieldTypes));
	}
	return copy(
		isAppendOnly,
		schema,
		hosts,
		index,
		docType,
		keyDelimiter,
		keyNullLiteral,
		serializationSchema,
		contentType,
		failureHandler,
		sinkOptions,
		requestFactory);
}
 
Example 13
Source Project: flink   Source File: AggregateOperationFactory.java    License: Apache License 2.0 6 votes vote down vote up
private ResolvedGroupWindow validateAndCreateSessionWindow(
		SessionWithGapOnTimeWithAlias window,
		String windowName,
		FieldReferenceExpression timeField) {
	ValueLiteralExpression windowGap = getAsValueLiteral(
		window.getGap(),
		"A session window expects a gap value literal.");

	final LogicalType windowGapType = windowGap.getOutputDataType().getLogicalType();

	if (!hasRoot(windowGapType, INTERVAL_DAY_TIME)) {
		throw new ValidationException("A session window expects a gap literal of a day-time interval type.");
	}

	return ResolvedGroupWindow.sessionWindow(
		windowName,
		timeField,
		windowGap);
}
 
Example 14
Source Project: flink   Source File: InputTypeStrategiesTestBase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testStrategy() {
	if (testSpec.expectedSignature != null) {
		assertThat(
			generateSignature(),
			equalTo(testSpec.expectedSignature));
	}
	if (testSpec.expectedErrorMessage != null) {
		thrown.expect(ValidationException.class);
		thrown.expectCause(containsCause(new ValidationException(testSpec.expectedErrorMessage)));
	}
	for (List<DataType> actualArgumentTypes : testSpec.actualArgumentTypes) {
		TypeInferenceUtil.Result result = runTypeInference(actualArgumentTypes);
		if (testSpec.expectedArgumentTypes != null) {
			assertThat(result.getExpectedArgumentTypes(), equalTo(testSpec.expectedArgumentTypes));
		}
	}
}
 
Example 15
Source Project: flink   Source File: AggregateOperationFactory.java    License: Apache License 2.0 6 votes vote down vote up
private ResolvedExpression unwrapFromAlias(CallExpression call) {
	List<ResolvedExpression> children = call.getResolvedChildren();
	List<String> aliases = children.subList(1, children.size())
		.stream()
		.map(alias -> ExpressionUtils.extractValue(alias, String.class)
			.orElseThrow(() -> new ValidationException("Unexpected alias: " + alias)))
		.collect(toList());

	if (!isFunctionOfKind(children.get(0), TABLE_AGGREGATE)) {
		throw fail();
	}

	validateAlias(
		aliases,
		(TableAggregateFunctionDefinition) ((CallExpression) children.get(0)).getFunctionDefinition());
	alias = aliases;
	return children.get(0);
}
 
Example 16
/**
 * For sink, routingkey-field-name information is mandatory.
 */
@Test (expected = ValidationException.class)
public void testMissingRoutingKeyForWriter() {
    Pravega pravega = new Pravega();
    Stream stream = Stream.of(SCOPE, STREAM);

    pravega.tableSinkWriterBuilder()
            .forStream(stream)
            .withPravegaConfig(PRAVEGA_CONFIG);

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withFormat(JSON)
            .withSchema(SCHEMA)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    TableFactoryService.find(StreamTableSinkFactory.class, propertiesMap)
            .createStreamTableSink(propertiesMap);
    fail("routingKey field name validation failed");
}
 
Example 17
Source Project: flink   Source File: SqlCreateTableConverter.java    License: Apache License 2.0 6 votes vote down vote up
private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) {
	UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable()
		.toString());
	ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
	CatalogManager.TableLookupResult lookupResult = catalogManager.getTable(identifier)
		.orElseThrow(() -> new ValidationException(String.format(
			"Source table '%s' of the LIKE clause not found in the catalog, at %s",
			identifier,
			sqlTableLike.getSourceTable().getParserPosition())));
	if (!(lookupResult.getTable() instanceof CatalogTable)) {
		throw new ValidationException(String.format(
			"Source table '%s' of the LIKE clause can not be a VIEW, at %s",
			identifier,
			sqlTableLike.getSourceTable().getParserPosition()));
	}
	return (CatalogTable) lookupResult.getTable();
}
 
Example 18
Source Project: flink   Source File: CalculatedTableFactory.java    License: Apache License 2.0 6 votes vote down vote up
private TableSchema adjustNames(
		TableSchema tableSchema,
		List<String> aliases,
		String functionName) {
	int aliasesSize = aliases.size();
	if (aliasesSize == 0) {
		return tableSchema;
	}

	int callArity = tableSchema.getFieldCount();
	if (callArity != aliasesSize) {
		throw new ValidationException(String.format(
			"List of column aliases must have same degree as table; " +
				"the returned table of function '%s' has " +
				"%d columns, whereas alias list has %d columns",
			functionName,
			callArity,
			aliasesSize));
	}

	return TableSchema.builder()
		.fields(aliases.toArray(new String[0]), tableSchema.getFieldDataTypes())
		.build();
}
 
Example 19
Source Project: flink   Source File: DescriptorProperties.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Validation for fixed indexed properties.
 *
 * <p>For example:
 *
 * <pre>
 *     schema.fields.0.type = INT, schema.fields.0.name = test
 *     schema.fields.1.type = LONG, schema.fields.1.name = test2
 * </pre>
 *
 * <p>The subKeyValidation map must define e.g. "type" and "name" and a validation logic for the given full key.
 */
public void validateFixedIndexedProperties(String key, boolean allowEmpty, Map<String, Consumer<String>> subKeyValidation) {
	// determine max index
	final int maxIndex = extractMaxIndex(key, "\\.(.*)");

	if (maxIndex < 0 && !allowEmpty) {
		throw new ValidationException("Property key '" + key + "' must not be empty.");
	}

	// validate
	for (int i = 0; i <= maxIndex; i++) {
		for (Map.Entry<String, Consumer<String>> subKey : subKeyValidation.entrySet()) {
			final String fullKey = key + '.' + i + '.' + subKey.getKey();
			if (properties.containsKey(fullKey)) {
				// run validation logic
				subKey.getValue().accept(fullKey);
			} else {
				throw new ValidationException("Required property key '" + fullKey + "' is missing.");
			}
		}
	}
}
 
Example 20
Source Project: flink   Source File: FunctionServiceTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test(expected = ValidationException.class)
public void testErrorConstructorClass() {
	FunctionDescriptor descriptor = new FunctionDescriptor()
			.fromClass(new ClassInstance()
					.of(ErrorConstructorClass.class.getName())
					.parameterString("arg"));

	FunctionService.createFunction(descriptor);
}
 
Example 21
Source Project: flink   Source File: Schema.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Specifies the previously defined field as a processing-time attribute.
 *
 * <p>E.g. field("proctime", Types.SQL_TIMESTAMP).proctime()
 */
public Schema proctime() {
	if (lastField == null) {
		throw new ValidationException("No field defined previously. Use field() before.");
	}
	tableSchema.get(lastField).put(SCHEMA_PROCTIME, "true");
	lastField = null;
	return this;
}
 
Example 22
Source Project: flink   Source File: DescriptorProperties.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns a Java {@link Duration} under the given key if it exists.
 */
public Optional<Duration> getOptionalDuration(String key) {
	return optionalGet(key).map((value) -> {
		try {
			return TimeUtils.parseDuration(value);
		} catch (Exception e) {
			throw new ValidationException("Invalid duration value for key '" + key + "'.", e);
		}
	});
}
 
Example 23
Source Project: flink   Source File: OperationTreeBuilder.java    License: Apache License 2.0 5 votes vote down vote up
public QueryOperation map(Expression mapFunction, QueryOperation child) {

		Expression resolvedMapFunction = mapFunction.accept(lookupResolver);

		if (!ApiExpressionUtils.isFunctionOfKind(resolvedMapFunction, FunctionKind.SCALAR)) {
			throw new ValidationException("Only a scalar function can be used in the map operator.");
		}

		Expression expandedFields = unresolvedCall(BuiltInFunctionDefinitions.FLATTEN, resolvedMapFunction);
		return project(Collections.singletonList(expandedFields), child, false);
	}
 
Example 24
Source Project: flink   Source File: TypeInferenceUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns an exception for an invalid call to a function.
 */
public static ValidationException createInvalidCallException(
		CallContext callContext,
		ValidationException cause) {
	return new ValidationException(
		String.format(
			"Invalid function call:\n%s(%s)",
			callContext.getName(),
			callContext.getArgumentDataTypes().stream()
				.map(DataType::toString)
				.collect(Collectors.joining(", "))),
		cause);
}
 
Example 25
Source Project: Flink-CEPplus   Source File: KafkaTableSinkBase.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public KafkaTableSinkBase configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
	if (!Arrays.equals(getFieldNames(), fieldNames) || !Arrays.equals(getFieldTypes(), fieldTypes)) {
		throw new ValidationException("Reconfiguration with different fields is not allowed. " +
			"Expected: " + Arrays.toString(getFieldNames()) + " / " + Arrays.toString(getFieldTypes()) + ". " +
			"But was: " + Arrays.toString(fieldNames) + " / " + Arrays.toString(fieldTypes));
	}
	return this;
}
 
Example 26
Source Project: flink   Source File: DataGenTableSourceFactory.java    License: Apache License 2.0 5 votes vote down vote up
private DataGenerator createRandomGenerator(String name, DataType type, ReadableConfig options) {
	ConfigOption<Integer> lenKey = key(FIELDS + "." + name + "." + LENGTH)
			.intType().defaultValue(100);
	OptionBuilder minKey = key(FIELDS + "." + name + "." + MIN);
	OptionBuilder maxKey = key(FIELDS + "." + name + "." + MAX);
	switch (type.getLogicalType().getTypeRoot()) {
		case BOOLEAN:
			return RandomGenerator.booleanGenerator();
		case CHAR:
		case VARCHAR:
			int length = options.get(lenKey);
			return getRandomStringGenerator(length);
		case TINYINT:
			return RandomGenerator.byteGenerator(
					options.get(minKey.intType().defaultValue((int) Byte.MIN_VALUE)).byteValue(),
					options.get(maxKey.intType().defaultValue((int) Byte.MAX_VALUE)).byteValue());
		case SMALLINT:
			return RandomGenerator.shortGenerator(
					options.get(minKey.intType().defaultValue((int) Short.MIN_VALUE)).shortValue(),
					options.get(maxKey.intType().defaultValue((int) Short.MAX_VALUE)).shortValue());
		case INTEGER:
			return RandomGenerator.intGenerator(
					options.get(minKey.intType().defaultValue(Integer.MIN_VALUE)),
					options.get(maxKey.intType().defaultValue(Integer.MAX_VALUE)));
		case BIGINT:
			return RandomGenerator.longGenerator(
					options.get(minKey.longType().defaultValue(Long.MIN_VALUE)),
					options.get(maxKey.longType().defaultValue(Long.MAX_VALUE)));
		case FLOAT:
			return RandomGenerator.floatGenerator(
					options.get(minKey.floatType().defaultValue(Float.MIN_VALUE)),
					options.get(maxKey.floatType().defaultValue(Float.MAX_VALUE)));
		case DOUBLE:
			return RandomGenerator.doubleGenerator(
					options.get(minKey.doubleType().defaultValue(Double.MIN_VALUE)),
					options.get(maxKey.doubleType().defaultValue(Double.MAX_VALUE)));
		default:
			throw new ValidationException("Unsupported type: " + type);
	}
}
 
Example 27
Source Project: flink   Source File: DescriptorProperties.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a type property.
 */
public void validateType(String key, boolean isOptional, boolean requireRow) {
	validateOptional(
		key,
		isOptional,
		(value) -> {
			// we don't validate the string but let the parser do the work for us
			// it throws a validation exception
			final TypeInformation<?> typeInfo = TypeStringUtils.readTypeInfo(value);
			if (requireRow && !(typeInfo instanceof RowTypeInfo)) {
				throw new ValidationException(
					"Row type information expected for key '" + key + "' but was: " + value);
			}
		});
}
 
Example 28
Source Project: flink   Source File: StreamTableEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
private <T> TypeInformation<T> extractTypeInformation(Table table, Class<T> clazz) {
	try {
		return TypeExtractor.createTypeInfo(clazz);
	} catch (Exception ex) {
		throw new ValidationException(
			String.format(
				"Could not convert query: %s to a DataStream of class %s",
				table.getQueryOperation().asSummaryString(),
				clazz.getSimpleName()),
			ex);
	}
}
 
Example 29
Source Project: flink   Source File: DescriptorProperties.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns a byte value under the given key if it exists.
 */
public Optional<Byte> getOptionalByte(String key) {
	return optionalGet(key).map((value) -> {
		try {
			return Byte.valueOf(value);
		} catch (Exception e) {
			throw new ValidationException("Invalid byte value for key '" + key + "'.", e);
		}
	});
}
 
Example 30
Source Project: flink   Source File: CsvFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInvalidLineDelimiter() {
	thrown.expect(ValidationException.class);
	thrown.expect(containsCause(new ValidationException("Invalid value for option 'csv.line-delimiter'. "
			+ "Supported values are [\\r, \\n, \\r\\n, \"\"], but was: abc")));

	final Map<String, String> options =
			getModifiedOptions(opts -> opts.put("csv.line-delimiter", "abc"));

	createTableSink(options);
}