Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#putProperties()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#putProperties() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkCalciteCatalogReader.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Checks whether the {@link CatalogTable} uses legacy connector source options.
 */
private static boolean isLegacySourceOptions(
		CatalogTable catalogTable,
		CatalogSchemaTable schemaTable) {
	// normalize option keys
	DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(catalogTable.getOptions());
	if (properties.containsKey(ConnectorDescriptorValidator.CONNECTOR_TYPE)) {
		return true;
	} else {
		// try to create legacy table source using the options,
		// some legacy factories uses the new 'connector' key
		try {
			TableFactoryUtil.findAndCreateTableSource(
				schemaTable.getCatalog(),
				schemaTable.getTableIdentifier(),
				catalogTable,
				new Configuration());
			// success, then we will use the legacy factories
			return true;
		} catch (Throwable e) {
			// fail, then we will use new factories
			return false;
		}
	}
}
 
Example 2
Source File: ExecutionEntry.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new execution entry enriched with additional properties that are prefixed with
 * {@link Environment#EXECUTION_ENTRY}.
 */
public static ExecutionEntry enrich(ExecutionEntry execution, Map<String, String> prefixedProperties) {
	final Map<String, String> enrichedProperties = new HashMap<>(execution.asMap());

	prefixedProperties.forEach((k, v) -> {
		final String normalizedKey = k.toLowerCase();
		if (k.startsWith(EXECUTION_ENTRY + '.')) {
			enrichedProperties.put(normalizedKey.substring(EXECUTION_ENTRY.length() + 1), v);
		}
	});

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(enrichedProperties);

	return new ExecutionEntry(properties);
}
 
Example 3
Source File: DeploymentEntry.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new deployment entry enriched with additional properties that are prefixed with
 * {@link Environment#DEPLOYMENT_ENTRY}.
 */
public static DeploymentEntry enrich(DeploymentEntry deployment, Map<String, String> prefixedProperties) {
	final Map<String, String> enrichedProperties = new HashMap<>(deployment.asMap());

	prefixedProperties.forEach((k, v) -> {
		final String normalizedKey = k.toLowerCase();
		if (k.startsWith(DEPLOYMENT_ENTRY + '.')) {
			enrichedProperties.put(normalizedKey.substring(DEPLOYMENT_ENTRY.length() + 1), v);
		}
	});

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(enrichedProperties);

	return new DeploymentEntry(properties);
}
 
Example 4
Source File: CatalogTableImpTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testToProperties() {
	TableSchema schema = createTableSchema();
	Map<String, String> prop = createProperties();
	CatalogTable table = new CatalogTableImpl(
		schema,
		createPartitionKeys(),
		prop,
		TEST
	);

	DescriptorProperties descriptorProperties = new DescriptorProperties();
	descriptorProperties.putProperties(table.toProperties());

	assertEquals(schema, descriptorProperties.getTableSchema(Schema.SCHEMA));
}
 
Example 5
Source File: HBaseTableFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private DescriptorProperties createDescriptor(String[] columnNames, TypeInformation[] columnTypes) {
	TableSchema tableSchema = new TableSchema(columnNames, columnTypes);

	Map<String, String> tableProperties = new HashMap<>();
	tableProperties.put("connector.type", "hbase");
	tableProperties.put("connector.version", "1.4.3");
	tableProperties.put("connector.property-version", "1");
	tableProperties.put("connector.table-name", "testHBastTable");
	tableProperties.put("connector.zookeeper.quorum", "localhost:2181");
	tableProperties.put("connector.zookeeper.znode.parent", "/flink");
	tableProperties.put("connector.write.buffer-flush.max-size", "10mb");
	tableProperties.put("connector.write.buffer-flush.max-rows", "1000");
	tableProperties.put("connector.write.buffer-flush.interval", "10s");

	DescriptorProperties descriptorProperties = new DescriptorProperties(true);
	descriptorProperties.putTableSchema(SCHEMA, tableSchema);
	descriptorProperties.putProperties(tableProperties);
	return descriptorProperties;
}
 
Example 6
Source File: ConfigUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Normalizes key-value properties from Yaml in the normalized format of the Table API.
 */
public static DescriptorProperties normalizeYaml(Map<String, Object> yamlMap) {
	final Map<String, String> normalized = new HashMap<>();
	yamlMap.forEach((k, v) -> normalizeYamlObject(normalized, k, v));
	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(normalized);
	return properties;
}
 
Example 7
Source File: ExternalCatalogTable.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Reads table statistics from the descriptors properties.
 *
 * @deprecated This method exists for backwards-compatibility only.
 */
@Deprecated
public Optional<TableStats> getTableStats() {
	DescriptorProperties normalizedProps = new DescriptorProperties();
	normalizedProps.putProperties(normalizedProps);
	Optional<Long> rowCount = normalizedProps.getOptionalLong(STATISTICS_ROW_COUNT);
	if (rowCount.isPresent()) {
		Map<String, ColumnStats> columnStats = readColumnStats(normalizedProps, STATISTICS_COLUMNS);
		return Optional.of(new TableStats(rowCount.get(), columnStats));
	} else {
		return Optional.empty();
	}
}
 
Example 8
Source File: CatalogTableImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Construct a {@link CatalogTableImpl} from complete properties that contains table schema.
 */
public static CatalogTableImpl fromProperties(Map<String, String> properties) {
	DescriptorProperties descriptorProperties = new DescriptorProperties();
	descriptorProperties.putProperties(properties);
	TableSchema tableSchema = descriptorProperties.getTableSchema(Schema.SCHEMA);
	List<String> partitionKeys = descriptorProperties.getPartitionKeys();
	return new CatalogTableImpl(
			tableSchema,
			partitionKeys,
			removeRedundant(properties, tableSchema, partitionKeys),
			""
	);
}
 
Example 9
Source File: ConfigUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Normalizes key-value properties from Yaml in the normalized format of the Table API.
 */
public static DescriptorProperties normalizeYaml(Map<String, Object> yamlMap) {
	final Map<String, String> normalized = new HashMap<>();
	yamlMap.forEach((k, v) -> normalizeYamlObject(normalized, k, v));
	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(normalized);
	return properties;
}
 
Example 10
Source File: ConfigurationEntry.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Merges two configuration entries. The properties of the first configuration entry might be
 * overwritten by the second one.
 */
public static ConfigurationEntry merge(ConfigurationEntry configuration1, ConfigurationEntry configuration2) {
	final Map<String, String> mergedProperties = new HashMap<>(configuration1.asMap());
	mergedProperties.putAll(configuration2.asMap());

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(mergedProperties);

	return new ConfigurationEntry(properties);
}
 
Example 11
Source File: HBaseDescriptorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRequiredFields() {
	HBase hbaseDesc0 = new HBase();
	HBase hbaseDesc1 = new HBase()
		.version("1.4.3")
		.zookeeperQuorum("localhost:2181")
		.zookeeperNodeParent("/hbase/root"); // no table name
	HBase hbaseDesc2 = new HBase()
		.version("1.4.3")
		.tableName("ns:table")
		.zookeeperNodeParent("/hbase/root"); // no zookeeper quorum
	HBase hbaseDesc3 = new HBase()
		.tableName("ns:table")
		.zookeeperQuorum("localhost:2181"); // no version

	HBase[] testCases = new HBase[]{hbaseDesc0, hbaseDesc1, hbaseDesc2, hbaseDesc3};
	for (int i = 0; i < testCases.length; i++) {
		HBase hbaseDesc = testCases[i];
		DescriptorProperties properties = new DescriptorProperties();
		properties.putProperties(hbaseDesc.toProperties());
		boolean caughtExpectedException = false;
		try {
			validator().validate(properties);
		} catch (ValidationException e) {
			caughtExpectedException = true;
		}
		Assert.assertTrue("The case#" + i + " didn't get the expected error", caughtExpectedException);
	}
}
 
Example 12
Source File: ConfigurationEntry.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Merges two configuration entries. The properties of the first configuration entry might be
 * overwritten by the second one.
 */
public static ConfigurationEntry merge(ConfigurationEntry configuration1, ConfigurationEntry configuration2) {
	final Map<String, String> mergedProperties = new HashMap<>(configuration1.asMap());
	mergedProperties.putAll(configuration2.asMap());

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(mergedProperties);

	return new ConfigurationEntry(properties);
}
 
Example 13
Source File: ConfigurationEntry.java    From flink with Apache License 2.0 5 votes vote down vote up
public static ConfigurationEntry enrich(ConfigurationEntry configuration, Map<String, String> prefixedProperties) {
	final Map<String, String> enrichedProperties = new HashMap<>(configuration.asMap());

	prefixedProperties.forEach((k, v) -> {
		final String normalizedKey = k.toLowerCase();
		if (k.startsWith(CONFIGURATION_ENTRY + ".")) {
			enrichedProperties.put(normalizedKey, v);
		}
	});

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(enrichedProperties);

	return new ConfigurationEntry(properties);
}
 
Example 14
Source File: ExecutionEntry.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Merges two execution entries. The properties of the first execution entry might be
 * overwritten by the second one.
 */
public static ExecutionEntry merge(ExecutionEntry execution1, ExecutionEntry execution2) {
	final Map<String, String> mergedProperties = new HashMap<>(execution1.asMap());
	mergedProperties.putAll(execution2.asMap());

	final DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(mergedProperties);

	return new ExecutionEntry(properties);
}
 
Example 15
Source File: TestTableSourceFactoryBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) {
	final DescriptorProperties params = new DescriptorProperties(true);
	params.putProperties(properties);
	final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(params);
	final List<RowtimeAttributeDescriptor> rowtime = SchemaValidator.deriveRowtimeAttributes(params);
	return new TestTableSource(
		params.getTableSchema(SCHEMA),
		properties.get(testProperty),
		proctime.orElse(null),
		rowtime);
}
 
Example 16
Source File: TaxiRideTableSourceFactory.java    From flink-training-exercises with Apache License 2.0 5 votes vote down vote up
private DescriptorProperties getValidatedProperties(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = new DescriptorProperties(true);
	descriptorProperties.putProperties(properties);

	new StreamTableDescriptorValidator(true, false, false).validate(descriptorProperties);
	new TaxiRidesValidator().validate(descriptorProperties);

	return descriptorProperties;
}
 
Example 17
Source File: CsvTableSourceFactoryBase.java    From flink with Apache License 2.0 4 votes vote down vote up
protected CsvTableSource createTableSource(
		Boolean isStreaming,
		Map<String, String> properties) {

	DescriptorProperties params = new DescriptorProperties();
	params.putProperties(properties);

	// validate
	new FileSystemValidator().validate(params);
	new OldCsvValidator().validate(params);
	new SchemaValidator(isStreaming, false, false).validate(params);

	// build
	CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder();

	TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
	TableSchema tableSchema = params.getTableSchema(SCHEMA);

	// the CsvTableSource needs some rework first
	// for now the schema must be equal to the encoding
	if (!formatSchema.equals(tableSchema)) {
		throw new TableException(
				"Encodings that differ from the schema are not supported yet for CsvTableSources.");
	}

	params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path);
	params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter);
	params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter);

	for (int i = 0; i < formatSchema.getFieldCount(); ++i) {
		csvTableSourceBuilder.field(formatSchema.getFieldNames()[i], formatSchema.getFieldTypes()[i]);
	}
	params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter);
	params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix);
	params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> {
		if (flag) {
			csvTableSourceBuilder.ignoreFirstLine();
		}
	});

	params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> {
		if (flag) {
			csvTableSourceBuilder.ignoreParseErrors();
		}
	});

	return csvTableSourceBuilder.build();
}
 
Example 18
Source File: HiveCatalog.java    From flink with Apache License 2.0 4 votes vote down vote up
private CatalogBaseTable instantiateCatalogTable(Table hiveTable, HiveConf hiveConf) {
	boolean isView = TableType.valueOf(hiveTable.getTableType()) == TableType.VIRTUAL_VIEW;

	// Table properties
	Map<String, String> properties = hiveTable.getParameters();

	boolean isGeneric = isGenericForGet(hiveTable.getParameters());

	TableSchema tableSchema;
	// Partition keys
	List<String> partitionKeys = new ArrayList<>();

	if (isGeneric) {
		properties = retrieveFlinkProperties(properties);
		DescriptorProperties tableSchemaProps = new DescriptorProperties(true);
		tableSchemaProps.putProperties(properties);
		ObjectPath tablePath = new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName());
		tableSchema = tableSchemaProps.getOptionalTableSchema(Schema.SCHEMA)
				.orElseThrow(() -> new CatalogException("Failed to get table schema from properties for generic table " + tablePath));
		partitionKeys = tableSchemaProps.getPartitionKeys();
		// remove the schema from properties
		properties = CatalogTableImpl.removeRedundant(properties, tableSchema, partitionKeys);
	} else {
		properties.put(CatalogConfig.IS_GENERIC, String.valueOf(false));
		// Table schema
		List<FieldSchema> fields = getNonPartitionFields(hiveConf, hiveTable);
		Set<String> notNullColumns = client.getNotNullColumns(hiveConf, hiveTable.getDbName(), hiveTable.getTableName());
		Optional<UniqueConstraint> primaryKey = isView ? Optional.empty() :
				client.getPrimaryKey(hiveTable.getDbName(), hiveTable.getTableName(), HiveTableUtil.relyConstraint((byte) 0));
		// PK columns cannot be null
		primaryKey.ifPresent(pk -> notNullColumns.addAll(pk.getColumns()));
		tableSchema = HiveTableUtil.createTableSchema(fields, hiveTable.getPartitionKeys(), notNullColumns, primaryKey.orElse(null));

		if (!hiveTable.getPartitionKeys().isEmpty()) {
			partitionKeys = getFieldNames(hiveTable.getPartitionKeys());
		}
	}

	String comment = properties.remove(HiveCatalogConfig.COMMENT);

	if (isView) {
		return new CatalogViewImpl(
				hiveTable.getViewOriginalText(),
				hiveTable.getViewExpandedText(),
				tableSchema,
				properties,
				comment);
	} else {
		return new CatalogTableImpl(tableSchema, partitionKeys, properties, comment);
	}
}
 
Example 19
Source File: CsvTableSinkFactoryBase.java    From flink with Apache License 2.0 4 votes vote down vote up
protected CsvTableSink createTableSink(
		Boolean isStreaming,
		Map<String, String> properties) {

	DescriptorProperties params = new DescriptorProperties();
	params.putProperties(properties);

	// validate
	new FileSystemValidator().validate(params);
	new OldCsvValidator().validate(params);
	new SchemaValidator(isStreaming, false, false).validate(params);

	// build
	TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));

	// if a schema is defined, no matter derive schema is set or not, will use the defined schema
	final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
	if (hasSchema) {
		TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
		if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
			throw new TableException(String.format(
					"Encodings that differ from the schema are not supported yet for" +
							" CsvTableSink, format schema is '%s', but table schema is '%s'.",
					formatSchema,
					tableSchema));
		}
	}

	String path = params.getString(CONNECTOR_PATH);
	String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(",");
	Optional<String> writeModeParm = params.getOptionalString(FORMAT_WRITE_MODE);
	FileSystem.WriteMode writeMode =
			(writeModeParm.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParm.get()) : null;
	int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1);

	// bridge to java.sql.Timestamp/Time/Date
	DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes())
		.map(dt -> {
			switch (dt.getLogicalType().getTypeRoot()) {
				case TIMESTAMP_WITHOUT_TIME_ZONE:
					return dt.bridgedTo(Timestamp.class);
				case TIME_WITHOUT_TIME_ZONE:
					return dt.bridgedTo(Time.class);
				case DATE:
					return dt.bridgedTo(Date.class);
				default:
					return dt;
			}
		})
		.toArray(DataType[]::new);

	return new CsvTableSink(
		path,
		fieldDelimiter,
		numFiles,
		writeMode,
		tableSchema.getFieldNames(),
		dataTypes);
}
 
Example 20
Source File: CatalogTableBuilder.java    From flink with Apache License 2.0 3 votes vote down vote up
@Override
protected Map<String, String> additionalProperties() {
	DescriptorProperties descriptorProperties = new DescriptorProperties();

	descriptorProperties.putBoolean(CatalogConfig.IS_GENERIC, isGeneric);

	descriptorProperties.putProperties(this.properties);

	return descriptorProperties.asMap();
}