Java Code Examples for org.apache.flink.table.api.Types

The following examples show how to use org.apache.flink.table.api.Types. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus   Source File: JsonTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<Descriptor> descriptors() {
	final Descriptor desc1 = new Json().jsonSchema("test");

	final Descriptor desc2 = new Json().jsonSchema(JSON_SCHEMA).failOnMissingField(true);

	final Descriptor desc3 = new Json()
		.schema(
			Types.ROW(
				new String[]{"test1", "test2"},
				new TypeInformation[]{Types.STRING(), Types.SQL_TIMESTAMP()}))
		.failOnMissingField(true);

	final Descriptor desc4 = new Json().deriveSchema();

	return Arrays.asList(desc1, desc2, desc3, desc4);
}
 
Example 2
Source Project: flink   Source File: JsonTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<Descriptor> descriptors() {
	final Descriptor desc1 = new Json().jsonSchema("test");

	final Descriptor desc2 = new Json().jsonSchema(JSON_SCHEMA).failOnMissingField(true);

	final Descriptor desc3 = new Json()
		.schema(
			Types.ROW(
				new String[]{"test1", "test2"},
				new TypeInformation[]{Types.STRING(), Types.SQL_TIMESTAMP()}))
		.failOnMissingField(true);

	final Descriptor desc4 = new Json().deriveSchema();

	return Arrays.asList(desc1, desc2, desc3, desc4);
}
 
Example 3
Source Project: sylph   Source File: JoinTest.java    License: Apache License 2.0 6 votes vote down vote up
@Before
public void init()
{
    StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    execEnv.setParallelism(4);
    execEnv.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
    tableEnv = (StreamTableEnvironmentImpl) StreamTableEnvironment.create(execEnv);

    tableEnv.registerFunction("from_unixtime", new TimeUtil.FromUnixTime());

    //---create stream source
    TypeInformation[] fieldTypes = {Types.STRING(), Types.STRING(), Types.LONG()};
    String[] fieldNames = {"topic", "user_id", "time"};
    RowTypeInfo rowTypeInfo = new RowTypeInfo(fieldTypes, fieldNames);
    DataStream<Row> dataSource = execEnv.fromCollection(new ArrayList<>(), rowTypeInfo);

    tableEnv.registerTableSource("tb1", new SylphTableSource(rowTypeInfo, dataSource));
    tableEnv.registerTableSource("tb0", new SylphTableSource(rowTypeInfo, dataSource));

    final AntlrSqlParser sqlParser = new AntlrSqlParser();
    this.dimTable = (CreateTable) sqlParser.createStatement(
            "create batch table users(id string, name string, city string) with(type = '"
                    + JoinOperator.class.getName() + "')");
}
 
Example 4
Source Project: Alink   Source File: GlmModelMapper.java    License: Apache License 2.0 6 votes vote down vote up
/**
 *
 * @return table schema.
 */
@Override
public TableSchema getOutputSchema() {
    String predResultColName = params.get(GlmPredictParams.PREDICTION_COL);
    String linkPredResultColName = params.get(GlmPredictParams.LINK_PRED_RESULT_COL);

    String[] colNames;
    TypeInformation[] colTypes;

    TableSchema dataSchema = getDataSchema();
    if (linkPredResultColName == null || linkPredResultColName.isEmpty()) {
        colNames = ArrayUtils.add(dataSchema.getFieldNames(), predResultColName);
        colTypes = ArrayUtils.add(dataSchema.getFieldTypes(), Types.DOUBLE());
    } else {
        colNames = ArrayUtils.addAll(dataSchema.getFieldNames(),
            new String[]{predResultColName, linkPredResultColName});
        colTypes = ArrayUtils.addAll(dataSchema.getFieldTypes(),
            new TypeInformation[]{Types.DOUBLE(), Types.DOUBLE()});
    }
    return new TableSchema(colNames, colTypes);
}
 
Example 5
Source Project: Alink   Source File: Word2VecTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void train() throws Exception {
	TableSchema schema = new TableSchema(
		new String[] {"docid", "content"},
		new TypeInformation <?>[] {Types.LONG(), Types.STRING()}
	);
	List <Row> rows = new ArrayList <>();
	rows.add(Row.of(0L, "老王 是 我们 团队 里 最胖 的"));
	rows.add(Row.of(1L, "老黄 是 第二 胖 的"));
	rows.add(Row.of(2L, "胖"));
	rows.add(Row.of(3L, "胖 胖 胖"));

	MemSourceBatchOp source = new MemSourceBatchOp(rows, schema);

	Word2Vec word2Vec = new Word2Vec()
		.setSelectedCol("content")
		.setOutputCol("output")
		.setMinCount(1);

	List<Row> result = word2Vec.fit(source).transform(source).collect();

	Assert.assertEquals(rows.size(), result.size());
}
 
Example 6
Source Project: Alink   Source File: FeatureHasherMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test1() throws Exception {
    TableSchema schema = new TableSchema(new String[] {"double", "bool", "number", "str"},
        new TypeInformation<?>[] {Types.DOUBLE(), Types.BOOLEAN(), Types.STRING(), Types.STRING()});

    Params params = new Params()
        .set(FeatureHasherParams.SELECTED_COLS, new String[] {"double", "bool", "number", "str"})
        .set(FeatureHasherParams.OUTPUT_COL, "output")
        .set(FeatureHasherParams.RESERVED_COLS, new String[] {});

    FeatureHasherMapper mapper = new FeatureHasherMapper(schema, params);

    assertEquals(mapper.map(Row.of(1.1, true, "2", "A")).getField(0),
        new SparseVector(262144, new int[]{62393, 85133, 120275, 214318}, new double[]{1.0, 1.0, 1.0, 1.1}));
    assertEquals(mapper.map(Row.of(2.1, true, "1", "A")).getField(0),
        new SparseVector(262144, new int[]{76287, 85133, 120275, 214318}, new double[]{1.0, 1.0, 1.0, 2.1}));
    assertEquals(mapper.getOutputSchema(),
        new TableSchema(new String[] {"output"}, new TypeInformation<?>[] {VectorTypes.VECTOR})
    );
}
 
Example 7
Source Project: Alink   Source File: FeatureHasherMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test2() throws Exception {
    TableSchema schema = new TableSchema(new String[] {"double", "bool", "number", "str"},
        new TypeInformation<?>[] {Types.DOUBLE(), Types.BOOLEAN(), Types.STRING(), Types.STRING()});

    Params params = new Params()
        .set(FeatureHasherParams.SELECTED_COLS, new String[] {"double", "bool", "number", "str"})
        .set(FeatureHasherParams.OUTPUT_COL, "output")
        .set(FeatureHasherParams.NUM_FEATURES, 10);

    FeatureHasherMapper mapper = new FeatureHasherMapper(schema, params);

    assertEquals(mapper.map(Row.of(1.1, true, "2", "A")).getField(4),
        new SparseVector(10, new int[]{5, 8, 9}, new double[]{2.0, 1.1, 1.0}));
    assertEquals(mapper.map(Row.of(2.1, true, "1", "B")).getField(4),
        new SparseVector(10, new int[]{1, 5, 6, 8}, new double[]{1.0, 1.0, 1.0, 2.1}));
    assertEquals(mapper.getOutputSchema(),
        new TableSchema(new String[] {"double", "bool", "number", "str", "output"},
            new TypeInformation<?>[] {Types.DOUBLE(), Types.BOOLEAN(), Types.STRING(), Types.STRING(),
                VectorTypes.VECTOR}));
}
 
Example 8
Source Project: Alink   Source File: FeatureHasherMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test3() throws Exception {
    TableSchema schema = new TableSchema(new String[] {"double", "bool", "number", "str"},
        new TypeInformation<?>[] {Types.DOUBLE(), Types.BOOLEAN(), Types.STRING(), Types.STRING()});

    Params params = new Params()
        .set(FeatureHasherParams.SELECTED_COLS, new String[] {"double", "bool", "number", "str"})
        .set(FeatureHasherParams.OUTPUT_COL, "output")
        .set(FeatureHasherParams.NUM_FEATURES, 10)
        .set(FeatureHasherParams.CATEGORICAL_COLS, new String[] {"double"});

    FeatureHasherMapper mapper = new FeatureHasherMapper(schema, params);

    assertEquals(mapper.map(Row.of(1.1, true, "2", "A")).getField(4),
        new SparseVector(10, new int[]{0, 5, 9}, new double[]{1.0, 2.0, 1.0}));
    assertEquals(mapper.map(Row.of(2.1, true, "1", "B")).getField(4),
        new SparseVector(10, new int[]{1, 5, 6}, new double[]{2.0, 1.0, 1.0}));
}
 
Example 9
Source Project: Alink   Source File: DCTMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test() throws Exception {
	TableSchema schema = new TableSchema(new String[] {"vec"}, new TypeInformation <?>[] {Types.STRING()});

	DCTMapper dctMapper = new DCTMapper(schema, new Params().set(DCTParams.SELECTED_COL, "vec"));

	DCTMapper inverseDCTMapper = new DCTMapper(schema,
		new Params().set(DCTParams.SELECTED_COL, "vec").set(DCTParams.INVERSE, true));

	String[] vectors = new String[] {
		"1.0 2.0 3.0 4.0 5.0",
		"1.0 2.0 1.0 2.0",
		"1.0 100000.0 -5000.0 0.1 0.0000005"
	};

	for (String vector : vectors) {
		assertTrue(
			VectorUtil.parseDense((String) inverseDCTMapper.map(dctMapper.map(Row.of(vector))).getField(0))
				.minus(VectorUtil.parseDense(vector))
				.normL1() < 1e-10
		);
	}
}
 
Example 10
Source Project: Alink   Source File: LinearModelMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test1() throws Exception {
	TableSchema dataSchema = new TableSchema(
		new String[] {"f0", "f1", "f2", "f3"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE()}
	);
	Params params = new Params()
		.set(LogisticRegressionPredictParams.PREDICTION_COL, "pred")
		.set(LogisticRegressionPredictParams.RESERVED_COLS, new String[] {});

	LinearModelMapper mapper = new LinearModelMapper(modelSchema, dataSchema, params);
	mapper.loadModel(model);

	assertEquals(mapper.map(Row.of(1.0, 1.0, 0.0, 1.0)).getField(0), 1);
	assertEquals(mapper.getOutputSchema(), new TableSchema(new String[] {"pred"},
		new TypeInformation <?>[] {Types.INT()}));
}
 
Example 11
Source Project: Alink   Source File: LinearModelMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test2() throws Exception {
	TableSchema dataSchema = new TableSchema(
		new String[] {"f0", "f1", "f2", "f3"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE()}
	);
	Params params = new Params()
		.set(LogisticRegressionPredictParams.PREDICTION_COL, "pred");

	LinearModelMapper mapper = new LinearModelMapper(modelSchema, dataSchema, params);
	mapper.loadModel(model);

	assertEquals(mapper.map(Row.of(1.0, 1.0, 0.0, 1.0)).getField(4), 1);
	assertEquals(mapper.getOutputSchema(), new TableSchema(new String[] {"f0", "f1", "f2", "f3", "pred"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE(), Types.INT()}));
}
 
Example 12
Source Project: Alink   Source File: SegmentMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test2() throws Exception {
	TableSchema schema = new TableSchema(new String[] {"sentence"}, new TypeInformation <?>[] {Types.STRING()});
	String[] dictArray = new String[] {"低风险"};

	Params params = new Params()
		.set(SegmentParams.SELECTED_COL, "sentence")
		.set(SegmentParams.USER_DEFINED_DICT, dictArray);

	SegmentMapper mapper = new SegmentMapper(schema, params);
	mapper.open();

	assertEquals(mapper.map(Row.of("我们辅助用户简单快速低成本低风险的实现系统权限安全管理")).getField(0),
		"我们 辅助 用户 简单 快速 低成本 低风险 的 实现 系统 权限 安全 管理");
	assertEquals(mapper.getOutputSchema(), schema);
}
 
Example 13
Source Project: Alink   Source File: SoftmaxModelMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test1() throws Exception {
	TableSchema dataSchema = new TableSchema(
		new String[] {"f0", "f1", "f2"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE()}
	);
	Params params = new Params()
		.set(SoftmaxPredictParams.PREDICTION_COL, "pred")
		.set(SoftmaxPredictParams.RESERVED_COLS, new String[] {});

	SoftmaxModelMapper mapper = new SoftmaxModelMapper(modelSchema, dataSchema, params);
	mapper.loadModel(model);

	assertEquals(mapper.map(Row.of(1.0, 7.0, 9.0)).getField(0), 2);
	assertEquals(mapper.getOutputSchema(), new TableSchema(new String[] {"pred"},
		new TypeInformation <?>[] {Types.INT()}));
}
 
Example 14
Source Project: Alink   Source File: SoftmaxModelMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test2() throws Exception {
	TableSchema dataSchema = new TableSchema(
		new String[] {"f0", "f1", "f2"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE()}
	);
	Params params = new Params()
		.set(SoftmaxPredictParams.PREDICTION_COL, "pred");

	SoftmaxModelMapper mapper = new SoftmaxModelMapper(modelSchema, dataSchema, params);
	mapper.loadModel(model);

	assertEquals(mapper.map(Row.of(1.0, 7.0, 9.0)).getField(3), 2);
	assertEquals(mapper.getOutputSchema(), new TableSchema(new String[] {"f0", "f1", "f2", "pred"},
		new TypeInformation <?>[] {Types.DOUBLE(), Types.DOUBLE(), Types.DOUBLE(), Types.INT()}));
}
 
Example 15
Source Project: PoseidonX   Source File: FlinkTaskInstanceBusiness.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * 构建 FlinkKafka010InputComponent 对应的业务逻辑
 * @param flinkComponent
 */
private void dealFlinkKafka010InputComponent(FlinkKafka010InputComponent flinkComponent) {

    TypeInformation<Row> typeInfo = Types.ROW(
            flinkComponent.getColumnAliasNames(),
            flinkComponent.getColumnTypes()
    );

    Properties kafkaProperties = new Properties();
    kafkaProperties.setProperty("bootstrap.servers",flinkComponent.getBrokerHostPorts());
    kafkaProperties.setProperty("group.id",flinkComponent.getGroup());

    MyKafkaJsonTableSource kafkaTableSource = new MyKafkaJsonTableSource(
            flinkComponent.getTopic(),
            kafkaProperties,
            typeInfo,
            flinkComponent.getColumnNames());

    tEnv.registerTableSource(flinkComponent.getName(), kafkaTableSource);

}
 
Example 16
Source Project: PoseidonX   Source File: FlinkTaskInstanceBusiness.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * 构建 dealFlinkHBaseOutputComponent 对应的业务逻辑
 * @param flinkComponent
 */
private void dealFlinkHBaseOutputComponent(FlinkHBaseOutputComponent flinkComponent) {

    TypeInformation<Row> typeInfo = Types.ROW(
            flinkComponent.getColumnNames(),
            flinkComponent.getDataTypeStrArray()
    );

    Properties sourceProperties = new Properties();
    sourceProperties.put(HBaseConstant.ZKHOST,flinkComponent.getZkHost());
    sourceProperties.put(HBaseConstant.ZKPORT,flinkComponent.getZkPort());
    sourceProperties.put(HBaseConstant.ZKPREFIX,flinkComponent.getZkPrefix());
    sourceProperties.put(HBaseConstant.TABLE_NAME,flinkComponent.getTableName());
    sourceProperties.put(HBaseConstant.COLFAMILY, flinkComponent.getColFamily());

    HBaseJsonTableSink hBaseJsonTableSink = new HBaseJsonTableSink(sourceProperties);

    tableSinkMap.put(flinkComponent.getName(),hBaseJsonTableSink);


}
 
Example 17
Source Project: flink   Source File: DependencyTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTableFactoryDiscovery() throws Exception {
	final LocalExecutor executor = createExecutor();
	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	try {
		final TableSchema result = executor.getTableSchema(sessionId, "TableNumber1");
		final TableSchema expected = TableSchema.builder()
			.field("IntegerField1", Types.INT())
			.field("StringField1", Types.STRING())
			.field("rowtimeField", Types.SQL_TIMESTAMP())
			.build();

		assertEquals(expected, result);
	} finally {
		executor.closeSession(sessionId);
	}
}
 
Example 18
Source Project: flink   Source File: JsonTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<Descriptor> descriptors() {
	final Descriptor desc1 = new Json().jsonSchema("test");

	final Descriptor desc2 = new Json().jsonSchema(JSON_SCHEMA).failOnMissingField(true);

	final Descriptor desc3 = new Json()
		.schema(
			Types.ROW(
				new String[]{"test1", "test2"},
				new TypeInformation[]{Types.STRING(), Types.SQL_TIMESTAMP()}))
		.failOnMissingField(true);

	final Descriptor desc4 = new Json().deriveSchema();

	final Descriptor desc5 = new Json().failOnMissingField(false);

	final Descriptor desc6 = new Json().jsonSchema(JSON_SCHEMA).ignoreParseErrors(false);

	final Descriptor desc7 = new Json().ignoreParseErrors(true);

	return Arrays.asList(desc1, desc2, desc3, desc4, desc5, desc6, desc7);
}
 
Example 19
protected TableSchema createTestSchema() {
	return TableSchema.builder()
		.field(FIELD_KEY, Types.LONG())
		.field(FIELD_FRUIT_NAME, Types.STRING())
		.field(FIELD_COUNT, Types.DECIMAL())
		.field(FIELD_TS, Types.SQL_TIMESTAMP())
		.build();
}
 
Example 20
Source Project: Flink-CEPplus   Source File: KafkaTableSourceBase.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a field of the schema to be the processing time attribute.
 *
 * @param proctimeAttribute The name of the field that becomes the processing time field.
 */
private Optional<String> validateProctimeAttribute(Optional<String> proctimeAttribute) {
	return proctimeAttribute.map((attribute) -> {
		// validate that field exists and is of correct type
		Optional<TypeInformation<?>> tpe = schema.getFieldType(attribute);
		if (!tpe.isPresent()) {
			throw new ValidationException("Processing time attribute '" + attribute + "' is not present in TableSchema.");
		} else if (tpe.get() != Types.SQL_TIMESTAMP()) {
			throw new ValidationException("Processing time attribute '" + attribute + "' is not of type SQL_TIMESTAMP.");
		}
		return attribute;
	});
}
 
Example 21
Source Project: Flink-CEPplus   Source File: KafkaTableSourceBase.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a list of fields to be rowtime attributes.
 *
 * @param rowtimeAttributeDescriptors The descriptors of the rowtime attributes.
 */
private List<RowtimeAttributeDescriptor> validateRowtimeAttributeDescriptors(List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors) {
	Preconditions.checkNotNull(rowtimeAttributeDescriptors, "List of rowtime attributes must not be null.");
	// validate that all declared fields exist and are of correct type
	for (RowtimeAttributeDescriptor desc : rowtimeAttributeDescriptors) {
		String rowtimeAttribute = desc.getAttributeName();
		Optional<TypeInformation<?>> tpe = schema.getFieldType(rowtimeAttribute);
		if (!tpe.isPresent()) {
			throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not present in TableSchema.");
		} else if (tpe.get() != Types.SQL_TIMESTAMP()) {
			throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not of type SQL_TIMESTAMP.");
		}
	}
	return rowtimeAttributeDescriptors;
}
 
Example 22
Source Project: Flink-CEPplus   Source File: DependencyTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTableFactoryDiscovery() throws Exception {
	// create environment
	final Map<String, String> replaceVars = new HashMap<>();
	replaceVars.put("$VAR_CONNECTOR_TYPE", CONNECTOR_TYPE_VALUE);
	replaceVars.put("$VAR_CONNECTOR_PROPERTY", TEST_PROPERTY);
	replaceVars.put("$VAR_CONNECTOR_PROPERTY_VALUE", "test-value");
	final Environment env = EnvironmentFileUtil.parseModified(FACTORY_ENVIRONMENT_FILE, replaceVars);

	// create executor with dependencies
	final URL dependency = Paths.get("target", TABLE_FACTORY_JAR_FILE).toUri().toURL();
	final LocalExecutor executor = new LocalExecutor(
		env,
		Collections.singletonList(dependency),
		new Configuration(),
		new DefaultCLI(new Configuration()));

	final SessionContext session = new SessionContext("test-session", new Environment());

	final TableSchema result = executor.getTableSchema(session, "TableNumber1");
	final TableSchema expected = TableSchema.builder()
		.field("IntegerField1", Types.INT())
		.field("StringField1", Types.STRING())
		.field("rowtimeField", Types.SQL_TIMESTAMP())
		.build();

	assertEquals(expected, result);
}
 
Example 23
Source Project: Flink-CEPplus   Source File: CliResultViewTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testResultViewClearResult(TypedResult<?> typedResult, boolean isTableMode, int expectedCancellationCount) throws Exception {
	final CountDownLatch cancellationCounterLatch = new CountDownLatch(expectedCancellationCount);
	final SessionContext session = new SessionContext("test-session", new Environment());
	final MockExecutor executor = new MockExecutor(typedResult, cancellationCounterLatch);
	final ResultDescriptor descriptor = new ResultDescriptor(
		"result-id",
		TableSchema.builder().field("Null Field", Types.STRING()).build(),
		false);

	Thread resultViewRunner = null;
	CliClient cli = null;
	try {
		cli = new CliClient(TerminalUtils.createDummyTerminal(), session, executor);
		resultViewRunner = new Thread(new TestingCliResultView(cli, descriptor, isTableMode));
		resultViewRunner.start();
	} finally {
		if (resultViewRunner != null && !resultViewRunner.isInterrupted()) {
			resultViewRunner.interrupt();
		}
		if (cli != null) {
			cli.close();
		}
	}

	assertTrue(
		"Invalid number of cancellations.",
		cancellationCounterLatch.await(10, TimeUnit.SECONDS));
}
 
Example 24
Source Project: Flink-CEPplus   Source File: JsonRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSchemaDerivation() {
	final Map<String, String> properties = toMap(
		new Schema()
			.field("field1", Types.BOOLEAN())
			.field("field2", Types.INT())
			.field("proctime", Types.SQL_TIMESTAMP()).proctime(),
		new Json()
			.deriveSchema());

	testSchemaSerializationSchema(properties);

	testSchemaDeserializationSchema(properties);
}
 
Example 25
Source Project: pulsar-flink   Source File: PulsarTableSource.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a field of the schema to be the processing time attribute.
 *
 * @param proctimeAttribute The name of the field that becomes the processing time field.
 */
private Optional<String> validateProctimeAttribute(Optional<String> proctimeAttribute) {
    return proctimeAttribute.map((attribute) -> {
        // validate that field exists and is of correct type
        Optional<TypeInformation<?>> tpe = schema.getFieldType(attribute);
        if (!tpe.isPresent()) {
            throw new ValidationException("Processing time attribute '" + attribute + "' is not present in TableSchema.");
        } else if (tpe.get() != Types.SQL_TIMESTAMP()) {
            throw new ValidationException("Processing time attribute '" + attribute + "' is not of type SQL_TIMESTAMP.");
        }
        return attribute;
    });
}
 
Example 26
Source Project: pulsar-flink   Source File: PulsarTableSource.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a list of fields to be rowtime attributes.
 *
 * @param rowtimeAttributeDescriptors The descriptors of the rowtime attributes.
 */
private List<RowtimeAttributeDescriptor> validateRowtimeAttributeDescriptors(List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors) {
    Preconditions.checkNotNull(rowtimeAttributeDescriptors, "List of rowtime attributes must not be null.");
    // validate that all declared fields exist and are of correct type
    for (RowtimeAttributeDescriptor desc : rowtimeAttributeDescriptors) {
        String rowtimeAttribute = desc.getAttributeName();
        Optional<TypeInformation<?>> tpe = schema.getFieldType(rowtimeAttribute);
        if (!tpe.isPresent()) {
            throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not present in TableSchema.");
        } else if (tpe.get() != Types.SQL_TIMESTAMP()) {
            throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not of type SQL_TIMESTAMP.");
        }
    }
    return rowtimeAttributeDescriptors;
}
 
Example 27
protected TableSchema createTestSchema() {
	return TableSchema.builder()
		.field(FIELD_KEY, Types.LONG())
		.field(FIELD_FRUIT_NAME, Types.STRING())
		.field(FIELD_COUNT, Types.DECIMAL())
		.field(FIELD_TS, Types.SQL_TIMESTAMP())
		.build();
}
 
Example 28
Source Project: flink   Source File: KafkaTableSourceBase.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a field of the schema to be the processing time attribute.
 *
 * @param proctimeAttribute The name of the field that becomes the processing time field.
 */
private Optional<String> validateProctimeAttribute(Optional<String> proctimeAttribute) {
	return proctimeAttribute.map((attribute) -> {
		// validate that field exists and is of correct type
		Optional<TypeInformation<?>> tpe = schema.getFieldType(attribute);
		if (!tpe.isPresent()) {
			throw new ValidationException("Processing time attribute '" + attribute + "' is not present in TableSchema.");
		} else if (tpe.get() != Types.SQL_TIMESTAMP()) {
			throw new ValidationException("Processing time attribute '" + attribute + "' is not of type SQL_TIMESTAMP.");
		}
		return attribute;
	});
}
 
Example 29
Source Project: flink   Source File: KafkaTableSourceBase.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Validates a list of fields to be rowtime attributes.
 *
 * @param rowtimeAttributeDescriptors The descriptors of the rowtime attributes.
 */
private List<RowtimeAttributeDescriptor> validateRowtimeAttributeDescriptors(List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors) {
	Preconditions.checkNotNull(rowtimeAttributeDescriptors, "List of rowtime attributes must not be null.");
	// validate that all declared fields exist and are of correct type
	for (RowtimeAttributeDescriptor desc : rowtimeAttributeDescriptors) {
		String rowtimeAttribute = desc.getAttributeName();
		Optional<TypeInformation<?>> tpe = schema.getFieldType(rowtimeAttribute);
		if (!tpe.isPresent()) {
			throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not present in TableSchema.");
		} else if (tpe.get() != Types.SQL_TIMESTAMP()) {
			throw new ValidationException("Rowtime attribute '" + rowtimeAttribute + "' is not of type SQL_TIMESTAMP.");
		}
	}
	return rowtimeAttributeDescriptors;
}
 
Example 30
Source Project: flink   Source File: FieldInfoUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static void checkRowtimeType(TypeInformation<?> type) {
	if (!(type.equals(Types.LONG()) || type instanceof SqlTimeTypeInfo)) {
		throw new ValidationException(
			"The rowtime attribute can only replace a field with a valid time type, " +
				"such as Timestamp or Long. But was: " + type);
	}
}