Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#containsKey()

The following examples show how to use org.apache.flink.table.descriptors.DescriptorProperties#containsKey() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkCalciteCatalogReader.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Checks whether the {@link CatalogTable} uses legacy connector source options.
 */
private static boolean isLegacySourceOptions(
		CatalogTable catalogTable,
		CatalogSchemaTable schemaTable) {
	// normalize option keys
	DescriptorProperties properties = new DescriptorProperties(true);
	properties.putProperties(catalogTable.getOptions());
	if (properties.containsKey(ConnectorDescriptorValidator.CONNECTOR_TYPE)) {
		return true;
	} else {
		// try to create legacy table source using the options,
		// some legacy factories uses the new 'connector' key
		try {
			TableFactoryUtil.findAndCreateTableSource(
				schemaTable.getCatalog(),
				schemaTable.getTableIdentifier(),
				catalogTable,
				new Configuration());
			// success, then we will use the legacy factories
			return true;
		} catch (Throwable e) {
			// fail, then we will use new factories
			return false;
		}
	}
}
 
Example 2
Source File: JsonRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(JsonValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(JsonValidator.FORMAT_SCHEMA);
	} else if (descriptorProperties.containsKey(JsonValidator.FORMAT_JSON_SCHEMA)) {
		return JsonRowSchemaConverter.convert(descriptorProperties.getString(JsonValidator.FORMAT_JSON_SCHEMA));
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 3
Source File: JsonRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(JsonValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(JsonValidator.FORMAT_SCHEMA);
	} else if (descriptorProperties.containsKey(JsonValidator.FORMAT_JSON_SCHEMA)) {
		return JsonRowSchemaConverter.convert(descriptorProperties.getString(JsonValidator.FORMAT_JSON_SCHEMA));
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 4
Source File: AvroRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowSerializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}
 
Example 5
Source File: AvroRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowDeserializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}
 
Example 6
Source File: CsvRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(CsvValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(CsvValidator.FORMAT_SCHEMA);
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 7
Source File: ElasticsearchUpsertTableSinkFactoryBase.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<Host> getHosts(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(CONNECTOR_HOSTS)) {
		return validateAndParseHostsString(descriptorProperties);
	} else {
		final List<Map<String, String>> hosts = descriptorProperties.getFixedIndexedProperties(
			CONNECTOR_HOSTS,
			Arrays.asList(CONNECTOR_HOSTS_HOSTNAME, CONNECTOR_HOSTS_PORT, CONNECTOR_HOSTS_PROTOCOL));
		return hosts.stream()
			.map(host -> new Host(
				descriptorProperties.getString(host.get(CONNECTOR_HOSTS_HOSTNAME)),
				descriptorProperties.getInt(host.get(CONNECTOR_HOSTS_PORT)),
				descriptorProperties.getString(host.get(CONNECTOR_HOSTS_PROTOCOL))))
			.collect(Collectors.toList());
	}
}
 
Example 8
Source File: ConnectorConfigurations.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
private void populateWriterConfig(DescriptorProperties descriptorProperties) {
    Optional<String> streamScope = descriptorProperties.getOptionalString(CONNECTOR_WRITER_SCOPE);

    if (!defaultScope.isPresent() && !streamScope.isPresent()) {
        throw new ValidationException("Must supply either " + CONNECTOR_WRITER_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE);
    }

    final String scopeVal = streamScope.isPresent() ? streamScope.get() : defaultScope.get();

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_STREAM)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_STREAM + " configuration.");
    }
    final String streamName = descriptorProperties.getString(CONNECTOR_WRITER_STREAM);
    writerStream = Stream.of(scopeVal, streamName);

    txnLeaseRenewalInterval = descriptorProperties.getOptionalLong(CONNECTOR_WRITER_TXN_LEASE_RENEWAL_INTERVAL);

    if (!descriptorProperties.containsKey(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME)) {
        throw new ValidationException("Missing " + CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME + " configuration.");
    }
    watermark = descriptorProperties.getOptionalBoolean(CONNECTOR_WRITER_ENABLE_WATERMARK);
    routingKey = descriptorProperties.getString(CONNECTOR_WRITER_ROUTING_KEY_FILED_NAME);

    Optional<String> optionalMode = descriptorProperties.getOptionalString(CONNECTOR_WRITER_MODE);
    if (optionalMode.isPresent()) {
        String mode = optionalMode.get();
        if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.ATLEAST_ONCE);
        } else if (mode.equals(CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE)) {
            writerMode = Optional.of(PravegaWriterMode.EXACTLY_ONCE);
        } else {
            throw new ValidationException("Invalid writer mode " + mode + " passed. Supported values: ("
                    + CONNECTOR_WRITER_MODE_VALUE_ATLEAST_ONCE + " or " + CONNECTOR_WRITER_MODE_VALUE_EXACTLY_ONCE + ")");
        }
    }
}
 
Example 9
Source File: PravegaValidator.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
@Override
public void validate(DescriptorProperties properties) {
    super.validate(properties);
    properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_PRAVEGA, false);
    validateConnectionConfig(properties);
    if (properties.containsKey(CONNECTOR_READER)) {
        validateReaderConfigurations(properties);
    }
    if (properties.containsKey(CONNECTOR_WRITER)) {
        validateWriterConfigurations(properties);
    }
}
 
Example 10
Source File: CsvRowFormatFactory.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(CsvValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(CsvValidator.FORMAT_SCHEMA);
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 11
Source File: AvroRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowSerializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}
 
Example 12
Source File: AvroRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowDeserializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}
 
Example 13
Source File: CsvRowFormatFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(CsvValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(CsvValidator.FORMAT_SCHEMA);
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 14
Source File: PulsarSchemaValidator.java    From pulsar-flink with Apache License 2.0 5 votes vote down vote up
@Override
public void validate(DescriptorProperties properties) {
    Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME);
    Map<String, String> types = properties.getIndexedProperty(SCHEMA, SCHEMA_TYPE);

    boolean proctimeFound = false;

    int fieldsCount = Math.max(names.size(), types.size());

    for (int i = 0; i < fieldsCount; i++) {
        properties.validateString(SCHEMA + "." + i + "." + SCHEMA_NAME, false, 1);
        properties.validateType(SCHEMA + "." + i + "." + SCHEMA_TYPE, false, false);
        properties.validateString(SCHEMA + "." + i + "." + SCHEMA_FROM, true, 1);
        // either proctime or rowtime
        String proctime = SCHEMA + "." + i + "." + SCHEMA_PROCTIME;
        String rowtime = SCHEMA + "." + i + "." + ROWTIME;

        if (properties.containsKey(proctime)) {
            if (!isStreamEnvironment) {
                throw new ValidationException(
                        "Property $proctime is not allowed in a batch environment.");
            } else if (proctimeFound) {
                throw new ValidationException("A proctime attribute must only be defined once.");
            }
            // check proctime
            properties.validateBoolean(proctime, false);
            proctimeFound = properties.getBoolean(proctime);
            // no rowtime
            properties.validatePrefixExclusion(rowtime);
        } else if (properties.hasPrefix(rowtime)) {
            // check rowtime
            RowtimeValidator rowtimeValidator =
                    new RowtimeValidator(
                            supportsSourceTimestamps, supportsSourceWatermarks, SCHEMA + "." + i + ".");
            rowtimeValidator.validate(properties);
            // no proctime
            properties.validateExclusion(proctime);
        }
    }
}
 
Example 15
Source File: PulsarCatalogValidator.java    From pulsar-flink with Apache License 2.0 5 votes vote down vote up
private void validateStartingOffsets(DescriptorProperties properties) {
    if (properties.containsKey(CATALOG_STARTUP_MODE)) {
        String v = properties.getString(CATALOG_STARTUP_MODE);
        if (!v.equals("earliest") && !v.equals("latest")) {
            throw new ValidationException(CATALOG_STARTUP_MODE + " should be either earliest or latest");
        }
    }
}
 
Example 16
Source File: JsonRowFormatFactory.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private TypeInformation<Row> createTypeInformation(DescriptorProperties descriptorProperties) {
	if (descriptorProperties.containsKey(JsonValidator.FORMAT_SCHEMA)) {
		return (RowTypeInfo) descriptorProperties.getType(JsonValidator.FORMAT_SCHEMA);
	} else if (descriptorProperties.containsKey(JsonValidator.FORMAT_JSON_SCHEMA)) {
		return JsonRowSchemaConverter.convert(descriptorProperties.getString(JsonValidator.FORMAT_JSON_SCHEMA));
	} else {
		return deriveSchema(descriptorProperties.asMap()).toRowType();
	}
}
 
Example 17
Source File: AvroRowFormatFactory.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowSerializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}
 
Example 18
Source File: AvroRowFormatFactory.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	// create and configure
	if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) {
		return new AvroRowDeserializationSchema(
			descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class));
	} else {
		return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA));
	}
}