Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#getOptionalLong()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#getOptionalLong() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JDBCTableSourceSinkFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private JDBCReadOptions getJDBCReadOptions(DescriptorProperties descriptorProperties) {
	final Optional<String> partitionColumnName =
		descriptorProperties.getOptionalString(CONNECTOR_READ_PARTITION_COLUMN);
	final Optional<Long> partitionLower = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND);
	final Optional<Long> partitionUpper = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND);
	final Optional<Integer> numPartitions = descriptorProperties.getOptionalInt(CONNECTOR_READ_PARTITION_NUM);

	final JDBCReadOptions.Builder builder = JDBCReadOptions.builder();
	if (partitionColumnName.isPresent()) {
		builder.setPartitionColumnName(partitionColumnName.get());
		builder.setPartitionLowerBound(partitionLower.get());
		builder.setPartitionUpperBound(partitionUpper.get());
		builder.setNumPartitions(numPartitions.get());
	}
	descriptorProperties.getOptionalInt(CONNECTOR_READ_FETCH_SIZE).ifPresent(builder::setFetchSize);

	return builder.build();
}
 
Example 2
Source File: JdbcTableSourceSinkFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private JdbcReadOptions getJdbcReadOptions(DescriptorProperties descriptorProperties) {
	final Optional<String> query = descriptorProperties.getOptionalString(CONNECTOR_READ_QUERY);
	final Optional<String> partitionColumnName =
		descriptorProperties.getOptionalString(CONNECTOR_READ_PARTITION_COLUMN);
	final Optional<Long> partitionLower = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND);
	final Optional<Long> partitionUpper = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND);
	final Optional<Integer> numPartitions = descriptorProperties.getOptionalInt(CONNECTOR_READ_PARTITION_NUM);

	final JdbcReadOptions.Builder builder = JdbcReadOptions.builder();
	if (query.isPresent()) {
		builder.setQuery(query.get());
	}
	if (partitionColumnName.isPresent()) {
		builder.setPartitionColumnName(partitionColumnName.get());
		builder.setPartitionLowerBound(partitionLower.get());
		builder.setPartitionUpperBound(partitionUpper.get());
		builder.setNumPartitions(numPartitions.get());
	}
	descriptorProperties.getOptionalInt(CONNECTOR_READ_FETCH_SIZE).ifPresent(builder::setFetchSize);

	return builder.build();
}
 
Example 3
Source File: ExternalCatalogTable.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Reads table statistics from the descriptors properties.
 *
 * @deprecated This method exists for backwards-compatibility only.
 */
@Deprecated
public Optional<TableStats> getTableStats() {
	DescriptorProperties normalizedProps = new DescriptorProperties();
	normalizedProps.putProperties(normalizedProps);
	Optional<Long> rowCount = normalizedProps.getOptionalLong(STATISTICS_ROW_COUNT);
	if (rowCount.isPresent()) {
		Map<String, ColumnStats> columnStats = readColumnStats(normalizedProps, STATISTICS_COLUMNS);
		return Optional.of(new TableStats(rowCount.get(), columnStats));
	} else {
		return Optional.empty();
	}
}
 
Example 4
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
private void populateWriterConfig(DescriptorProperties descriptorProperties) {
    Optional<String> streamScope = descriptorProperties.getOptionalString(CONNECTOR_WRITER_SCOPE);

    if (!defaultScope.isPresent() && !streamScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_WRITER_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    final String scopeVal = streamScope.isPresent() ? streamScope.get() : defaultScope.get();

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_STREAM)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_STREAM + " configuration.");
    }
    final String streamName = descriptorProperties.getString(CONNECTOR_WRITER_STREAM);
    writerStream = Stream.of(scopeVal, streamName);

    txnLeaseRenewalInterval = descriptorProperties.getOptionalLong(CONNECTOR_WRITER_TXN_LEASE_RENEWAL_INTERVAL);

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME + " configuration.");
    }
    watermark = descriptorProperties.getOptionalBoolean(CONNECTOR_WRITER_ENABLE_WATERMARK);
    routingKey = descriptorProperties.getString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME);

    Optional<String> optionalMode = descriptorProperties.getOptionalString(CONNECTOR_WRITER_MODE);
    if (optionalMode.isPresent()) {
        String mode = optionalMode.get();
        if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.ATLEAST_ONCE);
        } else if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.EXACTLY_ONCE);
        } else {
            throw new ValidationException("Invalid writer mode " + mode + " passed. Supported values: ("
                    + CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE + " or " + CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE + ")");
        }
    }
}
 
Example 5
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void populateReaderConfig(DescriptorProperties descriptorProperties) {
    uid = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_UID);
    rgScope = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_SCOPE);
    rgName = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_NAME);
    refreshInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_REFRESH_INTERVAL);
    eventReadTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_EVENT_READ_TIMEOUT_INTERVAL);
    checkpointInitiateTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_CHECKPOINT_INITIATE_TIMEOUT_INTERVAL);

    final Optional<Class<AssignerWithTimeWindows>> assignerClass = descriptorProperties.getOptionalClass(
            CONNECTOR_READER_USER_TIMESTAMP_ASSIGNER, AssignerWithTimeWindows.class);
    if (assignerClass.isPresent()) {
        assignerWithTimeWindows = Optional.of((AssignerWithTimeWindows<Row>) InstantiationUtil.instantiate(assignerClass.get()));
    } else {
        assignerWithTimeWindows = Optional.empty();
    }

    if (!defaultScope.isPresent() && !rgScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_READER_READER_GROUP_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    final List<Map<String, String>> streamPropsList = descriptorProperties.getVariableIndexedProperties(
            CONNECTOR_READER_STREAM_INFO,
            Arrays.asList(CONNECTOR_READER_STREAM_INFO_STREAM));

    if (streamPropsList.isEmpty()) {
        throw new ValidationException(CONNECTOR_READER_STREAM_INFO + " cannot be empty");
    }

    int index = 0;
    for (Map<String, String> propsMap : streamPropsList) {
        if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE) && !defaultScope.isPresent()) {
            throw new ValidationException("Must supply either " + CONNECTOR_READER_STREAM_INFO + "." + index + "." + CONNECTOR_READER_STREAM_INFO_SCOPE +
                    " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
        }
        String scopeName = (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE)) ?
                descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_SCOPE)) : defaultScope.get();

        if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_STREAM)) {
            throw new ValidationException(CONNECTOR_READER_STREAM_INFO + "." + index + "." +  CONNECTOR_READER_STREAM_INFO_STREAM + " cannot be empty");
        }
        String streamName = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_STREAM));

        String startCut = StreamCut.UNBOUNDED.asText();
        if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT)) {
            startCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT));
        }

        String endCut = StreamCut.UNBOUNDED.asText();
        if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT)) {
            endCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT));
        }

        Stream stream = Stream.of(scopeName, streamName);
        readerStreams.add(new StreamWithBoundaries(stream, StreamCut.from(startCut), StreamCut.from(endCut)));
        index++;
    }
}