Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#putBoolean()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#putBoolean() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Pravega.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Internal method for connector properties conversion.
 */
@Override
protected Map<String, String> toConnectorProperties() {
    final DescriptorProperties properties = new DescriptorProperties();
    properties.putString(CONNECTOR_VERSION, String.valueOf(CONNECTOR_VERSION_VALUE));

    if (tableSourceReaderBuilder == null && tableSinkWriterBuilder == null) {
        throw new ValidationException("Missing both reader and writer configurations.");
    }

    PravegaConfig pravegaConfig = tableSourceReaderBuilder != null ?
            tableSourceReaderBuilder.getPravegaConfig() : tableSinkWriterBuilder.getPravegaConfig();
    populateConnectionConfig(pravegaConfig, properties);

    boolean metrics = tableSourceReaderBuilder != null ?
            tableSourceReaderBuilder.isMetricsEnabled() : tableSinkWriterBuilder.isMetricsEnabled();
    properties.putBoolean(CONNECTOR_METRICS, metrics);

    if (tableSourceReaderBuilder != null) {
        populateReaderProperties(properties);
    }
    if (tableSinkWriterBuilder != null) {
        populateWriterProperties(properties);
    }
    return properties.asMap();
}
 
Example 2
Source File: Pravega.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Populate all the writer configurations based on the values supplied through {@link TableSinkWriterBuilder}
 * @param properties the supplied descriptor properties.
 */
private void populateWriterProperties(DescriptorProperties properties) {
    properties.putBoolean(CONNECTOR_WRITER, true);
    properties.putString(CONNECTOR_WRITER_SCOPE, tableSinkWriterBuilder.resolveStream().getScope());
    properties.putString(CONNECTOR_WRITER_STREAM, tableSinkWriterBuilder.resolveStream().getStreamName());

    if (tableSinkWriterBuilder.writerMode == PravegaWriterMode.ATLEAST_ONCE) {
        properties.putString(CONNECTOR_WRITER_MODE, CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE);

    } else if (tableSinkWriterBuilder.writerMode == PravegaWriterMode.EXACTLY_ONCE) {
        properties.putString(CONNECTOR_WRITER_MODE, CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE);
    }

    properties.putBoolean(CONNECTOR_WRITER_ENABLE_WATERMARK, tableSinkWriterBuilder.enableWatermark);
    properties.putLong(CONNECTOR_WRITER_TXN_LEASE_RENEWAL_INTERVAL, tableSinkWriterBuilder.txnLeaseRenewalPeriod.toMilliseconds());

    if (tableSinkWriterBuilder.routingKeyFieldName != null) {
        properties.putString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME, tableSinkWriterBuilder.routingKeyFieldName);
    }
}
 
Example 3
Source File: CsvTableSinkFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private DescriptorProperties createDescriptor(TableSchema schema) {
	Map<String, String> properties = new HashMap<>();
	properties.put("connector.type", "filesystem");
	properties.put("connector.property-version", "1");
	properties.put("connector.path", "/path/to/csv");

	// schema
	properties.put("format.type", "csv");
	properties.put("format.property-version", "1");
	properties.put("format.field-delimiter", ";");

	DescriptorProperties descriptor = new DescriptorProperties(true);
	descriptor.putProperties(properties);
	descriptor.putTableSchema(SCHEMA, schema);
	if (deriveSchema == TernaryBoolean.TRUE) {
		descriptor.putBoolean("format.derive-schema", true);
	} else if (deriveSchema == TernaryBoolean.FALSE) {
		descriptor.putTableSchema(FORMAT_FIELDS, testingSchema);
	} // nothing to put for UNDEFINED
	return descriptor;
}
 
Example 4
Source File: StreamPlannerFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> requiredContext() {
	DescriptorProperties properties = new DescriptorProperties();

	properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true);
	return properties.asMap();
}
 
Example 5
Source File: Pravega.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
/**
 * Prepare Pravega connection specific configurations
 * @param pravegaConfig the Pravega configuration to use.
 * @param properties the supplied descriptor properties.
 */
private void populateConnectionConfig(PravegaConfig pravegaConfig, DescriptorProperties properties) {

    String controllerUri = pravegaConfig.getClientConfig().getControllerURI().toString();
    properties.putString(CONNECTOR_CONNECTION_CONFIG_CONTROLLER_URI, controllerUri);

    String defaultScope = pravegaConfig.getDefaultScope();
    if (defaultScope != null && defaultScope.length() != 0) {
        properties.putString(CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE, defaultScope);
    }

    if (pravegaConfig.getClientConfig().getCredentials() != null) {
        String authType = pravegaConfig.getClientConfig().getCredentials().getAuthenticationType();
        if (authType != null && authType.length() != 0) {
            properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TYPE, authType);
        }
        String authToken = pravegaConfig.getClientConfig().getCredentials().getAuthenticationToken();
        if (authToken != null && authToken.length() != 0) {
            properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TOKEN, authToken);
        }
    }

    boolean validateHostName = pravegaConfig.getClientConfig().isValidateHostName();
    properties.putBoolean(CONNECTOR_CONNECTION_CONFIG_SECURITY_VALIDATE_HOSTNAME, validateHostName);

    String trustStore = pravegaConfig.getClientConfig().getTrustStore();
    if (trustStore != null && trustStore.length() != 0) {
        properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_TRUST_STORE, trustStore);
    }
}
 
Example 6
Source File: StreamPlannerFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> requiredContext() {
	DescriptorProperties properties = new DescriptorProperties();

	properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true);
	return properties.asMap();
}
 
Example 7
Source File: StreamExecutorFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Map<String, String> requiredContext() {
	DescriptorProperties properties = new DescriptorProperties();
	properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true);
	return properties.asMap();
}
 
Example 8
Source File: Pravega.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
/**
 * Populate all the reader configurations based on the values supplied through {@link TableSourceReaderBuilder}
 * @param properties the supplied descriptor properties.
 */
private void populateReaderProperties(DescriptorProperties properties) {
    properties.putBoolean(CONNECTOR_READER, true);

    // reader stream information
    AbstractStreamingReaderBuilder.ReaderGroupInfo readerGroupInfo = tableSourceReaderBuilder.buildReaderGroupInfo();

    Map<Stream, StreamCut> startStreamCuts = readerGroupInfo.getReaderGroupConfig().getStartingStreamCuts();
    Map<Stream, StreamCut> endStreamCuts = readerGroupInfo.getReaderGroupConfig().getEndingStreamCuts();
    final List<List<String>> values = new ArrayList<>();
    startStreamCuts.keySet().stream().forEach(stream -> {
        StreamCut startStreamCut = startStreamCuts.get(stream);
        StreamCut endStreamCut = endStreamCuts.get(stream);
        values.add(Arrays.asList(stream.getScope(), stream.getStreamName(), startStreamCut.asText(), endStreamCut.asText()));
    });
    properties.putIndexedFixedProperties(
                                            CONNECTOR_READER_STREAM_INFO,
                                            Arrays.asList(
                                                        CONNECTOR_READER_STREAM_INFO_SCOPE,
                                                        CONNECTOR_READER_STREAM_INFO_STREAM,
                                                        CONNECTOR_READER_STREAM_INFO_START_STREAMCUT,
                                                        CONNECTOR_READER_STREAM_INFO_END_STREAMCUT
                                                    ),
                                            values
                                        );

    // reader group information
    String uid = Optional.ofNullable(tableSourceReaderBuilder.uid).orElseGet(tableSourceReaderBuilder::generateUid);
    properties.putString(CONNECTOR_READER_READER_GROUP_UID, uid);
    properties.putString(CONNECTOR_READER_READER_GROUP_SCOPE, readerGroupInfo.getReaderGroupScope());
    properties.putString(CONNECTOR_READER_READER_GROUP_NAME, readerGroupInfo.getReaderGroupName());
    properties.putLong(CONNECTOR_READER_READER_GROUP_REFRESH_INTERVAL, readerGroupInfo.getReaderGroupConfig().getGroupRefreshTimeMillis());
    properties.putLong(CONNECTOR_READER_READER_GROUP_EVENT_READ_TIMEOUT_INTERVAL, tableSourceReaderBuilder.eventReadTimeout.toMilliseconds());
    properties.putLong(CONNECTOR_READER_READER_GROUP_CHECKPOINT_INITIATE_TIMEOUT_INTERVAL, tableSourceReaderBuilder.checkpointInitiateTimeout.toMilliseconds());

    // user information
    if (tableSourceReaderBuilder.getAssignerWithTimeWindows() != null) {
        try {
            @SuppressWarnings("unchecked")
            AssignerWithTimeWindows<Row> assigner = (AssignerWithTimeWindows<Row>) tableSourceReaderBuilder
                    .getAssignerWithTimeWindows().deserializeValue(getClass().getClassLoader());

            properties.putClass(CONNECTOR_READER_USER_TIMESTAMP_ASSIGNER, assigner.getClass());
        } catch (Exception e) {
            throw new TableException(e.getMessage());
        }
    }
}
 
Example 9
Source File: StreamExecutorFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Map<String, String> requiredContext() {
	DescriptorProperties properties = new DescriptorProperties();
	properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true);
	return properties.asMap();
}
 
Example 10
Source File: CatalogTableBuilder.java    From flink with Apache License 2.0 3 votes vote down vote up
@Override
protected Map<String, String> additionalProperties() {
	DescriptorProperties descriptorProperties = new DescriptorProperties();

	descriptorProperties.putBoolean(CatalogConfig.IS_GENERIC, isGeneric);

	descriptorProperties.putProperties(this.properties);

	return descriptorProperties.asMap();
}
 
Example 11
Source File: CatalogTableBuilder.java    From flink with Apache License 2.0 3 votes vote down vote up
@Override
protected Map<String, String> additionalProperties() {
	DescriptorProperties descriptorProperties = new DescriptorProperties();

	descriptorProperties.putBoolean(CatalogConfig.IS_GENERIC, isGeneric);

	descriptorProperties.putProperties(this.properties);

	return descriptorProperties.asMap();
}