Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#getOptionalString()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#getOptionalString() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JDBCTableSourceSinkFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private JDBCReadOptions getJDBCReadOptions(DescriptorProperties descriptorProperties) {
	final Optional<String> partitionColumnName =
		descriptorProperties.getOptionalString(CONNECTOR_READ_PARTITION_COLUMN);
	final Optional<Long> partitionLower = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND);
	final Optional<Long> partitionUpper = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND);
	final Optional<Integer> numPartitions = descriptorProperties.getOptionalInt(CONNECTOR_READ_PARTITION_NUM);

	final JDBCReadOptions.Builder builder = JDBCReadOptions.builder();
	if (partitionColumnName.isPresent()) {
		builder.setPartitionColumnName(partitionColumnName.get());
		builder.setPartitionLowerBound(partitionLower.get());
		builder.setPartitionUpperBound(partitionUpper.get());
		builder.setNumPartitions(numPartitions.get());
	}
	descriptorProperties.getOptionalInt(CONNECTOR_READ_FETCH_SIZE).ifPresent(builder::setFetchSize);

	return builder.build();
}
 
Example 2
Source File: PravegaValidator.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
private void validateReaderConfigurations(DescriptorProperties properties) {
    final Map<String, Consumer<String>> streamPropertyValidators = new HashMap<>();
    streamPropertyValidators.put(
            CONNECTOR_READER_STREAM_INFO_SCOPE,
            prefix -> properties.validateString(prefix, true, 1));
    streamPropertyValidators.put(
            CONNECTOR_READER_STREAM_INFO_STREAM,
            prefix -> properties.validateString(prefix, false, 0));
    streamPropertyValidators.put(
            CONNECTOR_READER_STREAM_INFO_START_STREAMCUT,
            prefix -> properties.validateString(prefix, true, 1));
    streamPropertyValidators.put(
            CONNECTOR_READER_STREAM_INFO_END_STREAMCUT,
            prefix -> properties.validateString(prefix, true, 1));
    properties.validateFixedIndexedProperties(CONNECTOR_READER_STREAM_INFO, true, streamPropertyValidators);

    // for readers we need default-scope from connection config or reader group scope
    Optional<String> readerGroupScope = properties.getOptionalString(CONNECTOR_READER_READER_GROUP_SCOPE);
    Optional<String> connConfigDefaultScope = properties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    if (!readerGroupScope.isPresent() && !connConfigDefaultScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_READER_READER_GROUP_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }
}
 
Example 3
Source File: PravegaValidator.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
private void validateWriterConfigurations(DescriptorProperties properties) {
    properties.validateString(CONNECTOR_WRITER_SCOPE, true, 1, Integer.MAX_VALUE);
    properties.validateString(CONNECTOR_WRITER_STREAM, false, 1, Integer.MAX_VALUE);
    properties.validateString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME, false, 1, Integer.MAX_VALUE);

    // for writers we need default-scope from connection config or scope from writer config
    Optional<String> scope = properties.getOptionalString(CONNECTOR_WRITER_SCOPE);
    Optional<String> connConfigDefaultScope = properties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    if (!scope.isPresent() && !connConfigDefaultScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_WRITER_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    // validate writer mode
    final Map<String, Consumer<String>> writerModeValidators = new HashMap<>();
    writerModeValidators.put(CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE, properties.noValidation());
    writerModeValidators.put(CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE, properties.noValidation());
    properties.validateEnum(CONNECTOR_WRITER_MODE, true, writerModeValidators);
}
 
Example 4
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
public void parseConfigurations(DescriptorProperties descriptorProperties, ConfigurationType configurationType) {
    metrics =  descriptorProperties.getOptionalBoolean(CONNECTOR_METRICS);
    controllerUri = descriptorProperties.getString(CONNECTOR_CONNECTION_CONFIG_CONTROLLER_URI);
    defaultScope = descriptorProperties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);

    authType = descriptorProperties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TYPE);
    authToken = descriptorProperties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TOKEN);
    validateHostName =  descriptorProperties.getOptionalBoolean(CONNECTOR_CONNECTION_CONFIG_SECURITY_VALIDATE_HOSTNAME);
    trustStore = descriptorProperties.getOptionalString(CONNECTOR_CONNECTION_CONFIG_SECURITY_TRUST_STORE);

    createPravegaConfig();

    if (configurationType == ConfigurationType.READER) {
        populateReaderConfig(descriptorProperties);
    }

    if (configurationType == ConfigurationType.WRITER) {
        populateWriterConfig(descriptorProperties);
    }
}
 
Example 5
Source File: JdbcTableSourceSinkFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private JdbcReadOptions getJdbcReadOptions(DescriptorProperties descriptorProperties) {
	final Optional<String> query = descriptorProperties.getOptionalString(CONNECTOR_READ_QUERY);
	final Optional<String> partitionColumnName =
		descriptorProperties.getOptionalString(CONNECTOR_READ_PARTITION_COLUMN);
	final Optional<Long> partitionLower = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND);
	final Optional<Long> partitionUpper = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND);
	final Optional<Integer> numPartitions = descriptorProperties.getOptionalInt(CONNECTOR_READ_PARTITION_NUM);

	final JdbcReadOptions.Builder builder = JdbcReadOptions.builder();
	if (query.isPresent()) {
		builder.setQuery(query.get());
	}
	if (partitionColumnName.isPresent()) {
		builder.setPartitionColumnName(partitionColumnName.get());
		builder.setPartitionLowerBound(partitionLower.get());
		builder.setPartitionUpperBound(partitionUpper.get());
		builder.setNumPartitions(numPartitions.get());
	}
	descriptorProperties.getOptionalInt(CONNECTOR_READ_FETCH_SIZE).ifPresent(builder::setFetchSize);

	return builder.build();
}
 
Example 6
Source File: HiveCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final String defaultDatabase =
		descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE)
			.orElse(HiveCatalog.DEFAULT_DB);

	final Optional<String> hiveConfDir = descriptorProperties.getOptionalString(CATALOG_HIVE_CONF_DIR);

	final String version = descriptorProperties.getOptionalString(CATALOG_HIVE_VERSION).orElse(HiveShimLoader.getHiveVersion());

	return new HiveCatalog(name, defaultDatabase, hiveConfDir.orElse(null), version);
}
 
Example 7
Source File: GenericInMemoryCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final Optional<String> defaultDatabase = descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE);

	return new GenericInMemoryCatalog(name, defaultDatabase.orElse(GenericInMemoryCatalog.DEFAULT_DB));
}
 
Example 8
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties params = new DescriptorProperties(true);
	params.putProperties(properties);

	final Optional<String> defaultDatabase = params.getOptionalString(CATALOG_DEFAULT_DATABASE);

	return new TestCatalog(name, defaultDatabase.orElse(GenericInMemoryCatalog.DEFAULT_DB));
}
 
Example 9
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
private void populateWriterConfig(DescriptorProperties descriptorProperties) {
    Optional<String> streamScope = descriptorProperties.getOptionalString(CONNECTOR_WRITER_SCOPE);

    if (!defaultScope.isPresent() && !streamScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_WRITER_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    final String scopeVal = streamScope.isPresent() ? streamScope.get() : defaultScope.get();

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_STREAM)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_STREAM + " configuration.");
    }
    final String streamName = descriptorProperties.getString(CONNECTOR_WRITER_STREAM);
    writerStream = Stream.of(scopeVal, streamName);

    txnLeaseRenewalInterval = descriptorProperties.getOptionalLong(CONNECTOR_WRITER_TXN_LEASE_RENEWAL_INTERVAL);

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME + " configuration.");
    }
    watermark = descriptorProperties.getOptionalBoolean(CONNECTOR_WRITER_ENABLE_WATERMARK);
    routingKey = descriptorProperties.getString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME);

    Optional<String> optionalMode = descriptorProperties.getOptionalString(CONNECTOR_WRITER_MODE);
    if (optionalMode.isPresent()) {
        String mode = optionalMode.get();
        if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.ATLEAST_ONCE);
        } else if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.EXACTLY_ONCE);
        } else {
            throw new ValidationException("Invalid writer mode " + mode + " passed. Supported values: ("
                    + CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE + " or " + CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE + ")");
        }
    }
}
 
Example 10
Source File: HiveCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final String defaultDatabase =
		descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE)
			.orElse(HiveCatalog.DEFAULT_DB);

	final Optional<String> hiveConfDir = descriptorProperties.getOptionalString(CATALOG_HIVE_CONF_DIR);

	final String version = descriptorProperties.getOptionalString(CATALOG_HIVE_VERSION).orElse(HiveShimLoader.getHiveVersion());

	return new HiveCatalog(name, defaultDatabase, hiveConfDir.orElse(null), version);
}
 
Example 11
Source File: GenericInMemoryCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final Optional<String> defaultDatabase = descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE);

	return new GenericInMemoryCatalog(name, defaultDatabase.orElse(GenericInMemoryCatalog.DEFAULT_DB));
}
 
Example 12
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties params = new DescriptorProperties(true);
	params.putProperties(properties);

	final Optional<String> defaultDatabase = params.getOptionalString(CATALOG_DEFAULT_DATABASE);

	return new TestCatalog(name, defaultDatabase.orElse(GenericInMemoryCatalog.DEFAULT_DB));
}
 
Example 13
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void populateReaderConfig(DescriptorProperties descriptorProperties) {
    uid = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_UID);
    rgScope = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_SCOPE);
    rgName = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_NAME);
    refreshInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_REFRESH_INTERVAL);
    eventReadTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_EVENT_READ_TIMEOUT_INTERVAL);
    checkpointInitiateTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_CHECKPOINT_INITIATE_TIMEOUT_INTERVAL);

    final Optional<Class<AssignerWithTimeWindows>> assignerClass = descriptorProperties.getOptionalClass(
            CONNECTOR_READER_USER_TIMESTAMP_ASSIGNER, AssignerWithTimeWindows.class);
    if (assignerClass.isPresent()) {
        assignerWithTimeWindows = Optional.of((AssignerWithTimeWindows<Row>) InstantiationUtil.instantiate(assignerClass.get()));
    } else {
        assignerWithTimeWindows = Optional.empty();
    }

    if (!defaultScope.isPresent() && !rgScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_READER_READER_GROUP_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    final List<Map<String, String>> streamPropsList = descriptorProperties.getVariableIndexedProperties(
            CONNECTOR_READER_STREAM_INFO,
            Arrays.asList(CONNECTOR_READER_STREAM_INFO_STREAM));

    if (streamPropsList.isEmpty()) {
        throw new ValidationException(CONNECTOR_READER_STREAM_INFO + " cannot be empty");
    }

    int index = 0;
    for (Map<String, String> propsMap : streamPropsList) {
        if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE) && !defaultScope.isPresent()) {
            throw new ValidationException("Must supply either " + CONNECTOR_READER_STREAM_INFO + "." + index + "." + CONNECTOR_READER_STREAM_INFO_SCOPE +
                    " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
        }
        String scopeName = (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE)) ?
                descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_SCOPE)) : defaultScope.get();

        if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_STREAM)) {
            throw new ValidationException(CONNECTOR_READER_STREAM_INFO + "." + index + "." +  CONNECTOR_READER_STREAM_INFO_STREAM + " cannot be empty");
        }
        String streamName = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_STREAM));

        String startCut = StreamCut.UNBOUNDED.asText();
        if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT)) {
            startCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT));
        }

        String endCut = StreamCut.UNBOUNDED.asText();
        if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT)) {
            endCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT));
        }

        Stream stream = Stream.of(scopeName, streamName);
        readerStreams.add(new StreamWithBoundaries(stream, StreamCut.from(startCut), StreamCut.from(endCut)));
        index++;
    }
}
 
Example 14
Source File: CsvTableSinkFactoryBase.java    From flink with Apache License 2.0 4 votes vote down vote up
protected CsvTableSink createTableSink(
		Boolean isStreaming,
		Map<String, String> properties) {

	DescriptorProperties params = new DescriptorProperties();
	params.putProperties(properties);

	// validate
	new FileSystemValidator().validate(params);
	new OldCsvValidator().validate(params);
	new SchemaValidator(isStreaming, false, false).validate(params);

	// build
	TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));

	// if a schema is defined, no matter derive schema is set or not, will use the defined schema
	final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
	if (hasSchema) {
		TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
		if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
			throw new TableException(String.format(
					"Encodings that differ from the schema are not supported yet for" +
							" CsvTableSink, format schema is '%s', but table schema is '%s'.",
					formatSchema,
					tableSchema));
		}
	}

	String path = params.getString(CONNECTOR_PATH);
	String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(",");
	Optional<String> writeModeParm = params.getOptionalString(FORMAT_WRITE_MODE);
	FileSystem.WriteMode writeMode =
			(writeModeParm.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParm.get()) : null;
	int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1);

	// bridge to java.sql.Timestamp/Time/Date
	DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes())
		.map(dt -> {
			switch (dt.getLogicalType().getTypeRoot()) {
				case TIMESTAMP_WITHOUT_TIME_ZONE:
					return dt.bridgedTo(Timestamp.class);
				case TIME_WITHOUT_TIME_ZONE:
					return dt.bridgedTo(Time.class);
				case DATE:
					return dt.bridgedTo(Date.class);
				default:
					return dt;
			}
		})
		.toArray(DataType[]::new);

	return new CsvTableSink(
		path,
		fieldDelimiter,
		numFiles,
		writeMode,
		tableSchema.getFieldNames(),
		dataTypes);
}