org.apache.flink.api.java.typeutils.WritableTypeInfo Java Examples

The following examples show how to use org.apache.flink.api.java.typeutils.WritableTypeInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WritableSerializerTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringArrayWritable() {
	StringArrayWritable[] data = new StringArrayWritable[]{
			new StringArrayWritable(new String[]{}),
			new StringArrayWritable(new String[]{""}),
			new StringArrayWritable(new String[]{"a", "a"}),
			new StringArrayWritable(new String[]{"a", "b"}),
			new StringArrayWritable(new String[]{"c", "c"}),
			new StringArrayWritable(new String[]{"d", "f"}),
			new StringArrayWritable(new String[]{"d", "m"}),
			new StringArrayWritable(new String[]{"z", "x"}),
			new StringArrayWritable(new String[]{"a", "a", "a"})
	};

	WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]);
	WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig());

	SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data);

	testInstance.testAll();
}
 
Example #2
Source File: WritableSerializerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringArrayWritable() {
	StringArrayWritable[] data = new StringArrayWritable[]{
			new StringArrayWritable(new String[]{}),
			new StringArrayWritable(new String[]{""}),
			new StringArrayWritable(new String[]{"a", "a"}),
			new StringArrayWritable(new String[]{"a", "b"}),
			new StringArrayWritable(new String[]{"c", "c"}),
			new StringArrayWritable(new String[]{"d", "f"}),
			new StringArrayWritable(new String[]{"d", "m"}),
			new StringArrayWritable(new String[]{"z", "x"}),
			new StringArrayWritable(new String[]{"a", "a", "a"})
	};

	WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]);
	WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig());

	SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data);

	testInstance.testAll();
}
 
Example #3
Source File: WritableSerializerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringArrayWritable() {
	StringArrayWritable[] data = new StringArrayWritable[]{
			new StringArrayWritable(new String[]{}),
			new StringArrayWritable(new String[]{""}),
			new StringArrayWritable(new String[]{"a", "a"}),
			new StringArrayWritable(new String[]{"a", "b"}),
			new StringArrayWritable(new String[]{"c", "c"}),
			new StringArrayWritable(new String[]{"d", "f"}),
			new StringArrayWritable(new String[]{"d", "m"}),
			new StringArrayWritable(new String[]{"z", "x"}),
			new StringArrayWritable(new String[]{"a", "a", "a"})
	};

	WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]);
	WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig());

	SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data);

	testInstance.testAll();
}
 
Example #4
Source File: HCatInputFormatBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a HCatInputFormat for the given database, table, and
 * {@link org.apache.hadoop.conf.Configuration}.
 * By default, the InputFormat returns {@link org.apache.hive.hcatalog.data.HCatRecord}.
 * The return type of the InputFormat can be changed to Flink-native tuples by calling
 * {@link HCatInputFormatBase#asFlinkTuples()}.
 *
 * @param database The name of the database to read from.
 * @param table The name of the table to read.
 * @param config The Configuration for the InputFormat.
 * @throws java.io.IOException
 */
public HCatInputFormatBase(String database, String table, Configuration config) throws IOException {
	super();
	this.configuration = config;
	HadoopUtils.mergeHadoopConf(this.configuration);

	this.hCatInputFormat = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(this.configuration, database, table);
	this.outputSchema = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.getTableSchema(this.configuration);

	// configure output schema of HCatFormat
	configuration.set("mapreduce.lib.hcat.output.schema", HCatUtil.serialize(outputSchema));
	// set type information
	this.resultType = new WritableTypeInfo(DefaultHCatRecord.class);
}
 
Example #5
Source File: HCatInputFormatBase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a HCatInputFormat for the given database, table, and
 * {@link org.apache.hadoop.conf.Configuration}.
 * By default, the InputFormat returns {@link org.apache.hive.hcatalog.data.HCatRecord}.
 * The return type of the InputFormat can be changed to Flink-native tuples by calling
 * {@link HCatInputFormatBase#asFlinkTuples()}.
 *
 * @param database The name of the database to read from.
 * @param table The name of the table to read.
 * @param config The Configuration for the InputFormat.
 * @throws java.io.IOException
 */
public HCatInputFormatBase(String database, String table, Configuration config) throws IOException {
	super();
	this.configuration = config;
	HadoopUtils.mergeHadoopConf(this.configuration);

	this.hCatInputFormat = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(this.configuration, database, table);
	this.outputSchema = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.getTableSchema(this.configuration);

	// configure output schema of HCatFormat
	configuration.set("mapreduce.lib.hcat.output.schema", HCatUtil.serialize(outputSchema));
	// set type information
	this.resultType = new WritableTypeInfo(DefaultHCatRecord.class);
}
 
Example #6
Source File: HCatInputFormatBase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a HCatInputFormat for the given database, table, and
 * {@link org.apache.hadoop.conf.Configuration}.
 * By default, the InputFormat returns {@link org.apache.hive.hcatalog.data.HCatRecord}.
 * The return type of the InputFormat can be changed to Flink-native tuples by calling
 * {@link HCatInputFormatBase#asFlinkTuples()}.
 *
 * @param database The name of the database to read from.
 * @param table The name of the table to read.
 * @param config The Configuration for the InputFormat.
 * @throws java.io.IOException
 */
public HCatInputFormatBase(String database, String table, Configuration config) throws IOException {
	super();
	this.configuration = config;
	HadoopUtils.mergeHadoopConf(this.configuration);

	this.hCatInputFormat = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(this.configuration, database, table);
	this.outputSchema = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.getTableSchema(this.configuration);

	// configure output schema of HCatFormat
	configuration.set("mapreduce.lib.hcat.output.schema", HCatUtil.serialize(outputSchema));
	// set type information
	this.resultType = new WritableTypeInfo(DefaultHCatRecord.class);
}