Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#putString()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#putString() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataGenTableSourceFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testLackEndForSequence() {
	try {
		DescriptorProperties descriptor = new DescriptorProperties();
		descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
		descriptor.putString(FIELDS + ".f0." + KIND, SEQUENCE);
		descriptor.putLong(FIELDS + ".f0." + START, 0);

		createSource(
				TableSchema.builder().field("f0", DataTypes.BIGINT()).build(),
				descriptor.asMap());
	} catch (ValidationException e) {
		Throwable cause = e.getCause();
		Assert.assertTrue(cause instanceof ValidationException);
		Assert.assertTrue(cause.getMessage().contains(
				"Could not find required property 'fields.f0.end' for sequence generator."));
		return;
	}
	Assert.fail("Should fail by ValidationException.");
}
 
Example 2
Source File: DataGenTableSourceFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testLackStartForSequence() {
	try {
		DescriptorProperties descriptor = new DescriptorProperties();
		descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
		descriptor.putString(FIELDS + ".f0." + KIND, SEQUENCE);
		descriptor.putLong(FIELDS + ".f0." + END, 100);

		createSource(
				TableSchema.builder().field("f0", DataTypes.BIGINT()).build(),
				descriptor.asMap());
	} catch (ValidationException e) {
		Throwable cause = e.getCause();
		Assert.assertTrue(cause instanceof ValidationException);
		Assert.assertTrue(cause.getMessage().contains(
				"Could not find required property 'fields.f0.start' for sequence generator."));
		return;
	}
	Assert.fail("Should fail by ValidationException.");
}
 
Example 3
Source File: Pravega.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Internal method for connector properties conversion.
 */
@Override
protected Map<String, String> toConnectorProperties() {
    final DescriptorProperties properties = new DescriptorProperties();
    properties.putString(CONNECTOR_VERSION, String.valueOf(CONNECTOR_VERSION_VALUE));

    if (tableSourceReaderBuilder == null && tableSinkWriterBuilder == null) {
        throw new ValidationException("Missing both reader and writer configurations.");
    }

    PravegaConfig pravegaConfig = tableSourceReaderBuilder != null ?
            tableSourceReaderBuilder.getPravegaConfig() : tableSinkWriterBuilder.getPravegaConfig();
    populateConnectionConfig(pravegaConfig, properties);

    boolean metrics = tableSourceReaderBuilder != null ?
            tableSourceReaderBuilder.isMetricsEnabled() : tableSinkWriterBuilder.isMetricsEnabled();
    properties.putBoolean(CONNECTOR_METRICS, metrics);

    if (tableSourceReaderBuilder != null) {
        populateReaderProperties(properties);
    }
    if (tableSinkWriterBuilder != null) {
        populateWriterProperties(properties);
    }
    return properties.asMap();
}
 
Example 4
Source File: Pravega.java    From flink-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Populate all the writer configurations based on the values supplied through {@link TableSinkWriterBuilder}
 * @param properties the supplied descriptor properties.
 */
private void populateWriterProperties(DescriptorProperties properties) {
    properties.putBoolean(CONNECTOR_WRITER, true);
    properties.putString(CONNECTOR_WRITER_SCOPE, tableSinkWriterBuilder.resolveStream().getScope());
    properties.putString(CONNECTOR_WRITER_STREAM, tableSinkWriterBuilder.resolveStream().getStreamName());

    if (tableSinkWriterBuilder.writerMode == PravegaWriterMode.ATLEAST_ONCE) {
        properties.putString(CONNECTOR_WRITER_MODE, CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE);

    } else if (tableSinkWriterBuilder.writerMode == PravegaWriterMode.EXACTLY_ONCE) {
        properties.putString(CONNECTOR_WRITER_MODE, CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE);
    }

    properties.putBoolean(CONNECTOR_WRITER_ENABLE_WATERMARK, tableSinkWriterBuilder.enableWatermark);
    properties.putLong(CONNECTOR_WRITER_TXN_LEASE_RENEWAL_INTERVAL, tableSinkWriterBuilder.txnLeaseRenewalPeriod.toMilliseconds());

    if (tableSinkWriterBuilder.routingKeyFieldName != null) {
        properties.putString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME, tableSinkWriterBuilder.routingKeyFieldName);
    }
}
 
Example 5
Source File: PulsarCatalogDescriptor.java    From pulsar-flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toCatalogProperties() {
    DescriptorProperties props = new DescriptorProperties();

    if (pulsarVersion != null) {
        props.putString(CATALOG_PULSAR_VERSION, pulsarVersion);
    }

    return props.asMap();
}
 
Example 6
Source File: CsvTableSinkFactoryTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAppendTableSourceFactory() {
	DescriptorProperties descriptor = createDescriptor(testingSchema);
	descriptor.putString("update-mode", "append");
	TableSource sink = createTableSource(descriptor);

	assertTrue(sink instanceof CsvTableSource);
	assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
 
Example 7
Source File: CsvTableSinkFactoryTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAppendTableSinkFactory() {
	DescriptorProperties descriptor = createDescriptor(testingSchema);
	descriptor.putString("update-mode", "append");
	TableSink sink = createTableSink(descriptor);

	assertTrue(sink instanceof CsvTableSink);
	assertEquals(testingSchema.toRowDataType(), sink.getConsumedDataType());
}
 
Example 8
Source File: DataGenTableSourceFactoryTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSequenceCheckpointRestore() throws Exception {
	DescriptorProperties descriptor = new DescriptorProperties();
	descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
	descriptor.putString(FIELDS + ".f0." + KIND, SEQUENCE);
	descriptor.putLong(FIELDS + ".f0." + START, 0);
	descriptor.putLong(FIELDS + ".f0." + END, 100);

	DynamicTableSource dynamicTableSource = createSource(
			TableSchema.builder().field("f0", DataTypes.BIGINT()).build(),
			descriptor.asMap());

	DataGenTableSource dataGenTableSource = (DataGenTableSource) dynamicTableSource;
	DataGeneratorSource<RowData> source = dataGenTableSource.createSource();

	final int initElement = 0;
	final int maxElement = 100;
	final Set<RowData> expectedOutput = new HashSet<>();
	for (long i = initElement; i <= maxElement; i++) {
		expectedOutput.add(GenericRowData.of(i));
	}
	DataGeneratorSourceTest.innerTestDataGenCheckpointRestore(
			() -> {
				try {
					return InstantiationUtil.clone(source);
				} catch (IOException | ClassNotFoundException e) {
					throw new RuntimeException(e);
				}
			}, expectedOutput);
}
 
Example 9
Source File: HiveModuleDescriptor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toModuleProperties() {
	final DescriptorProperties properties = new DescriptorProperties();

	if (hiveVersion != null) {
		properties.putString(MODULE_HIVE_VERSION, hiveVersion);
	}

	return properties.asMap();
}
 
Example 10
Source File: HiveCatalogDescriptor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toCatalogProperties() {
	final DescriptorProperties properties = new DescriptorProperties();

	if (hiveSitePath != null) {
		properties.putString(CATALOG_HIVE_CONF_DIR, hiveSitePath);
	}

	if (hiveVersion != null) {
		properties.putString(CATALOG_HIVE_VERSION, hiveVersion);
	}

	return properties.asMap();
}
 
Example 11
Source File: TaxiFares.java    From flink-training-exercises with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toConnectorProperties() {
	DescriptorProperties properties = new DescriptorProperties();
	if (this.path != null) {
		properties.putString(CONNECTOR_PATH, this.path);
	}
	if (this.maxEventDelaySecs != null) {
		properties.putInt(CONNECTOR_MAX_EVENT_DELAY_SECS, this.maxEventDelaySecs);
	}
	if (this.servingSpeedFactor != null) {
		properties.putInt(CONNECTOR_SERVING_SPEED_FACTOR, this.servingSpeedFactor);
	}
	return properties.asMap();
}
 
Example 12
Source File: TaxiRides.java    From flink-training-exercises with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toConnectorProperties() {
	DescriptorProperties properties = new DescriptorProperties();
	if (this.path != null) {
		properties.putString(CONNECTOR_PATH, this.path);
	}
	if (this.maxEventDelaySecs != null) {
		properties.putInt(CONNECTOR_MAX_EVENT_DELAY_SECS, this.maxEventDelaySecs);
	}
	if (this.servingSpeedFactor != null) {
		properties.putInt(CONNECTOR_SERVING_SPEED_FACTOR, this.servingSpeedFactor);
	}
	return properties.asMap();
}
 
Example 13
Source File: Pravega.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
/**
 * Prepare Pravega connection specific configurations
 * @param pravegaConfig the Pravega configuration to use.
 * @param properties the supplied descriptor properties.
 */
private void populateConnectionConfig(PravegaConfig pravegaConfig, DescriptorProperties properties) {

    String controllerUri = pravegaConfig.getClientConfig().getControllerURI().toString();
    properties.putString(CONNECTOR_CONNECTION_CONFIG_CONTROLLER_URI, controllerUri);

    String defaultScope = pravegaConfig.getDefaultScope();
    if (defaultScope != null && defaultScope.length() != 0) {
        properties.putString(CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE, defaultScope);
    }

    if (pravegaConfig.getClientConfig().getCredentials() != null) {
        String authType = pravegaConfig.getClientConfig().getCredentials().getAuthenticationType();
        if (authType != null && authType.length() != 0) {
            properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TYPE, authType);
        }
        String authToken = pravegaConfig.getClientConfig().getCredentials().getAuthenticationToken();
        if (authToken != null && authToken.length() != 0) {
            properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_AUTH_TOKEN, authToken);
        }
    }

    boolean validateHostName = pravegaConfig.getClientConfig().isValidateHostName();
    properties.putBoolean(CONNECTOR_CONNECTION_CONFIG_SECURITY_VALIDATE_HOSTNAME, validateHostName);

    String trustStore = pravegaConfig.getClientConfig().getTrustStore();
    if (trustStore != null && trustStore.length() != 0) {
        properties.putString(CONNECTOR_CONNECTION_CONFIG_SECURITY_TRUST_STORE, trustStore);
    }
}
 
Example 14
Source File: KafkaJsonConnectorITest.java    From AthenaX with Apache License 2.0 5 votes vote down vote up
private static ExternalCatalogTable mockExternalCatalogTable(String topic, String brokerAddress) {
  TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO});
  ConnectorDescriptor descriptor = new ConnectorDescriptor("kafka+json", 1, false) {
    @Override
    public void addConnectorProperties(DescriptorProperties properties) {
      properties.putTableSchema(TOPIC_SCHEMA_KEY, schema);
      properties.putString(TOPIC_NAME_KEY, topic);
      properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.GROUP_ID_CONFIG, "foo");
      properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress);
      properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    }
  };

  return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty());
}
 
Example 15
Source File: MockExternalCatalogTable.java    From AthenaX with Apache License 2.0 5 votes vote down vote up
ExternalCatalogTable toExternalCatalogTable() {
  TableSchema tableSchema = new TableSchema(schema.getFieldNames(), schema.getFieldTypes());
  ConnectorDescriptor descriptor = new ConnectorDescriptor(CONNECTOR_TYPE, CONNECTOR_VERSION, false) {
    @Override
    public void addConnectorProperties(DescriptorProperties properties) {
      properties.putTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY, tableSchema);
      properties.putString(TABLE_DATA_CONNECTOR_PROPERTY, serializeRows());
    }
  };
  return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty());
}
 
Example 16
Source File: HiveCatalogDescriptor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> toCatalogProperties() {
	final DescriptorProperties properties = new DescriptorProperties();

	if (hiveSitePath != null) {
		properties.putString(CATALOG_HIVE_CONF_DIR, hiveSitePath);
	}

	if (hiveVersion != null) {
		properties.putString(CATALOG_HIVE_VERSION, hiveVersion);
	}

	return properties.asMap();
}
 
Example 17
Source File: Pravega.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
/**
 * Populate all the reader configurations based on the values supplied through {@link TableSourceReaderBuilder}
 * @param properties the supplied descriptor properties.
 */
private void populateReaderProperties(DescriptorProperties properties) {
    properties.putBoolean(CONNECTOR_READER, true);

    // reader stream information
    AbstractStreamingReaderBuilder.ReaderGroupInfo readerGroupInfo = tableSourceReaderBuilder.buildReaderGroupInfo();

    Map<Stream, StreamCut> startStreamCuts = readerGroupInfo.getReaderGroupConfig().getStartingStreamCuts();
    Map<Stream, StreamCut> endStreamCuts = readerGroupInfo.getReaderGroupConfig().getEndingStreamCuts();
    final List<List<String>> values = new ArrayList<>();
    startStreamCuts.keySet().stream().forEach(stream -> {
        StreamCut startStreamCut = startStreamCuts.get(stream);
        StreamCut endStreamCut = endStreamCuts.get(stream);
        values.add(Arrays.asList(stream.getScope(), stream.getStreamName(), startStreamCut.asText(), endStreamCut.asText()));
    });
    properties.putIndexedFixedProperties(
                                            CONNECTOR_READER_STREAM_INFO,
                                            Arrays.asList(
                                                        CONNECTOR_READER_STREAM_INFO_SCOPE,
                                                        CONNECTOR_READER_STREAM_INFO_STREAM,
                                                        CONNECTOR_READER_STREAM_INFO_START_STREAMCUT,
                                                        CONNECTOR_READER_STREAM_INFO_END_STREAMCUT
                                                    ),
                                            values
                                        );

    // reader group information
    String uid = Optional.ofNullable(tableSourceReaderBuilder.uid).orElseGet(tableSourceReaderBuilder::generateUid);
    properties.putString(CONNECTOR_READER_READER_GROUP_UID, uid);
    properties.putString(CONNECTOR_READER_READER_GROUP_SCOPE, readerGroupInfo.getReaderGroupScope());
    properties.putString(CONNECTOR_READER_READER_GROUP_NAME, readerGroupInfo.getReaderGroupName());
    properties.putLong(CONNECTOR_READER_READER_GROUP_REFRESH_INTERVAL, readerGroupInfo.getReaderGroupConfig().getGroupRefreshTimeMillis());
    properties.putLong(CONNECTOR_READER_READER_GROUP_EVENT_READ_TIMEOUT_INTERVAL, tableSourceReaderBuilder.eventReadTimeout.toMilliseconds());
    properties.putLong(CONNECTOR_READER_READER_GROUP_CHECKPOINT_INITIATE_TIMEOUT_INTERVAL, tableSourceReaderBuilder.checkpointInitiateTimeout.toMilliseconds());

    // user information
    if (tableSourceReaderBuilder.getAssignerWithTimeWindows() != null) {
        try {
            @SuppressWarnings("unchecked")
            AssignerWithTimeWindows<Row> assigner = (AssignerWithTimeWindows<Row>) tableSourceReaderBuilder
                    .getAssignerWithTimeWindows().deserializeValue(getClass().getClassLoader());

            properties.putClass(CONNECTOR_READER_USER_TIMESTAMP_ASSIGNER, assigner.getClass());
        } catch (Exception e) {
            throw new TableException(e.getMessage());
        }
    }
}
 
Example 18
Source File: ViewEntry.java    From flink with Apache License 2.0 4 votes vote down vote up
public static ViewEntry create(String name, String query) {
	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putString(TABLES_QUERY, query);
	return new ViewEntry(name, properties);
}
 
Example 19
Source File: ViewEntry.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static ViewEntry create(String name, String query) {
	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putString(TABLES_QUERY, query);
	return new ViewEntry(name, properties);
}
 
Example 20
Source File: ViewEntry.java    From flink with Apache License 2.0 4 votes vote down vote up
public static ViewEntry create(String name, String query) {
	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putString(TABLES_QUERY, query);
	return new ViewEntry(name, properties);
}