Java Code Examples for org.apache.avro.io.EncoderFactory

The following examples show how to use org.apache.avro.io.EncoderFactory. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: localization_nifi   Source File: JsonUtils.java    License: Apache License 2.0 7 votes vote down vote up
/**
 * Writes provided {@link GenericRecord} into the provided
 * {@link OutputStream} as JSON.
 */
public static void write(GenericRecord record, OutputStream out) {
    try {
        DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
        JsonEncoder encoder = EncoderFactory.get().jsonEncoder(record.getSchema(), out);
        writer.write(record, encoder);
        encoder.flush();
    } catch (Exception e) {
        throw new IllegalStateException("Failed to read GenericRecord", e);
    }
}
 
Example 2
Source Project: nifi   Source File: WriteAvroResultWithExternalSchema.java    License: Apache License 2.0 6 votes vote down vote up
public WriteAvroResultWithExternalSchema(final Schema avroSchema, final RecordSchema recordSchema, final SchemaAccessWriter schemaAccessWriter,
                                         final OutputStream out, final BlockingQueue<BinaryEncoder> recycleQueue, final ComponentLog logger) {
    super(out);
    this.recordSchema = recordSchema;
    this.schemaAccessWriter = schemaAccessWriter;
    this.avroSchema = avroSchema;
    this.buffered = new BufferedOutputStream(out);
    this.recycleQueue = recycleQueue;

    BinaryEncoder reusableEncoder = recycleQueue.poll();
    if (reusableEncoder == null) {
        logger.debug("Was not able to obtain a BinaryEncoder from reuse pool. This is normal for the first X number of iterations (where X is equal to the max size of the pool), " +
            "but if this continues, it indicates that increasing the size of the pool will likely yield better performance for this Avro Writer.");
    }

    encoder = EncoderFactory.get().blockingBinaryEncoder(buffered, reusableEncoder);

    datumWriter = new GenericDatumWriter<>(avroSchema);
}
 
Example 3
Source Project: mt-flume   Source File: Log4jAppender.java    License: Apache License 2.0 6 votes vote down vote up
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
  if (schema == null || !datumSchema.equals(schema)) {
    schema = datumSchema;
    out = new ByteArrayOutputStream();
    writer = new ReflectDatumWriter<Object>(schema);
    encoder = EncoderFactory.get().binaryEncoder(out, null);
  }
  out.reset();
  try {
    writer.write(datum, encoder);
    encoder.flush();
    return out.toByteArray();
  } catch (IOException e) {
    throw new FlumeException(e);
  }
}
 
Example 4
Source Project: reef   Source File: AvroEvaluatorListSerializer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Convert AvroEvaluatorList to JSON string.
 */
@Override
public String toString(final AvroEvaluatorList avroEvaluatorList) {
  final DatumWriter<AvroEvaluatorList> evaluatorWriter = new SpecificDatumWriter<>(AvroEvaluatorList.class);
  try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
    final JsonEncoder encoder = EncoderFactory.get().jsonEncoder(avroEvaluatorList.getSchema(), out);
    evaluatorWriter.write(avroEvaluatorList, encoder);
    encoder.flush();
    return out.toString(AvroHttpSerializer.JSON_CHARSET);
  } catch (final IOException e) {
    throw new RuntimeException(e);
  }
}
 
Example 5
Source Project: pulsar-flume-ng-sink   Source File: PulsarSink.java    License: Apache License 2.0 6 votes vote down vote up
private byte[] serializeEvent(Event event, boolean useAvroEventFormat) throws IOException {
    byte[] bytes;
    if (useAvroEventFormat) {
        if (!tempOutStream.isPresent()) {
            tempOutStream = Optional.of(new ByteArrayOutputStream());
        }
        if (!writer.isPresent()) {
            writer = Optional.of(new SpecificDatumWriter<AvroFlumeEvent>(AvroFlumeEvent.class));
        }
        tempOutStream.get().reset();
        AvroFlumeEvent e = new AvroFlumeEvent(toCharSeqMap(event.getHeaders()),
                ByteBuffer.wrap(event.getBody()));
        encoder = EncoderFactory.get().directBinaryEncoder(tempOutStream.get(), encoder);
        writer.get().write(e, encoder);
        encoder.flush();
        bytes = tempOutStream.get().toByteArray();
    } else {
        bytes = event.getBody();
    }
    return bytes;
}
 
Example 6
Source Project: components   Source File: DatasetContentWriter.java    License: Apache License 2.0 6 votes vote down vote up
private Consumer<IndexedRecord> getWritingConsumer(Encoder[] encoder) {
    return new Consumer<IndexedRecord>() {

        GenericDatumWriter<IndexedRecord> writer = null;

        @Override
        public void accept(IndexedRecord ir) {
            if (writer == null) {
                writer = new GenericDatumWriter<>(ir.getSchema());
                try {
                    if (json) {
                        encoder[0] = EncoderFactory.get().jsonEncoder(ir.getSchema(), output);
                    } else {
                        encoder[0] = EncoderFactory.get().binaryEncoder(output, null);
                    }
                } catch (IOException ioe) {
                    throw new RuntimeException(ioe);
                }

            }
            writeIndexedRecord(writer, encoder[0], ir);
        }
    };
}
 
Example 7
Source Project: SDA   Source File: AvroOneM2MDataPublish.java    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_ONEM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_ONEM2M> avroEventWriter = new SpecificDatumWriter<COL_ONEM2M>(COL_ONEM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example 8
/**
 * tests Avro Serializer
 */
@Test
public void testSerializer() throws Exception {
    Context context = new Context();
    String schemaFile = getClass().getResource("/schema.avsc").getFile();
    context.put(ES_AVRO_SCHEMA_FILE, schemaFile);
    avroSerializer.configure(context);
    Schema schema = new Schema.Parser().parse(new File(schemaFile));
    GenericRecord user = generateGenericRecord(schema);
    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    Encoder encoder = new EncoderFactory().binaryEncoder(outputStream, null);
    datumWriter.write(user, encoder);
    encoder.flush();
    Event event = EventBuilder.withBody(outputStream.toByteArray());
    XContentBuilder expected = generateContentBuilder();
    XContentBuilder actual = avroSerializer.serialize(event);
    JsonParser parser = new JsonParser();
    assertEquals(parser.parse(Strings.toString(expected)), parser.parse(Strings.toString(actual)));
}
 
Example 9
Source Project: SDA   Source File: AvroLWM2MDataPublish.java    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_LWM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example 10
static void writeAvroYarnJobSubmissionParametersToOutputStream(
    final YarnClusterSubmissionFromCS yarnClusterSubmissionFromCS,
    final String jobFolderOnDFSPath,
    final OutputStream outputStream) throws IOException {
  final DatumWriter<AvroYarnJobSubmissionParameters> datumWriter =
      new SpecificDatumWriter<>(AvroYarnJobSubmissionParameters.class);

  final AvroYarnJobSubmissionParameters jobSubmissionParameters =
      yarnClusterSubmissionFromCS.getYarnJobSubmissionParameters();
  jobSubmissionParameters.setDfsJobSubmissionFolder(jobFolderOnDFSPath);
  final JsonEncoder encoder = EncoderFactory.get().jsonEncoder(jobSubmissionParameters.getSchema(),
      outputStream);
  datumWriter.write(jobSubmissionParameters, encoder);
  encoder.flush();
  outputStream.flush();
}
 
Example 11
private <T> Decoder serializeSpecificFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);

    try {
        FastSpecificSerializerGenerator<T> fastSpecificSerializerGenerator = new FastSpecificSerializerGenerator<>(
                schema, tempDir, classLoader, null);
        FastSerializer<T> fastSerializer = fastSpecificSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}
 
Example 12
Source Project: avro-fastserde   Source File: FastStringableTest.java    License: Apache License 2.0 6 votes vote down vote up
private <T> Decoder serializeSpecificFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);

    try {
        FastSpecificSerializerGenerator<T> fastSpecificSerializerGenerator = new FastSpecificSerializerGenerator<>(
                schema, tempDir, classLoader, null);
        FastSerializer<T> fastSerializer = fastSpecificSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}
 
Example 13
Source Project: kite   Source File: Log4jAppender.java    License: Apache License 2.0 6 votes vote down vote up
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
  if (schema == null || !datumSchema.equals(schema)) {
    schema = datumSchema;
    out = new ByteArrayOutputStream();
    writer = new ReflectDatumWriter<Object>(schema);
    encoder = EncoderFactory.get().binaryEncoder(out, null);
  }
  out.reset();
  try {
    writer.write(datum, encoder);
    encoder.flush();
    return out.toByteArray();
  } catch (IOException e) {
    throw new FlumeException(e);
  }
}
 
Example 14
Source Project: hadoop   Source File: AvroTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example 15
Source Project: jMetalSP   Source File: DataSerializer.java    License: MIT License 6 votes vote down vote up
public byte[] serializeMessage(S clazz, String path) {

        byte[] result = null;
        try {

            File file = new File(path);

            Schema schema = new Schema.Parser().parse(file);

            ByteArrayOutputStream out = new ByteArrayOutputStream();
            BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
            DatumWriter<S> dataFileWriter = new SpecificDatumWriter<S>(schema);
            dataFileWriter.write(clazz, encoder);
            encoder.flush();
            result=out.toByteArray();
            out.close();
        }catch (Exception ex){
            ex.printStackTrace();
        }
        return result;
    }
 
Example 16
Source Project: jMetalSP   Source File: AvroSerializationSchema.java    License: MIT License 6 votes vote down vote up
@Override
public byte[] serialize(T obj) {
    byte[] serializedBytes = null;
    try {
        ensureInitialized();
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        encoder = EncoderFactory.get().binaryEncoder(out, null);
        dataFileWriter.write(obj, encoder);
        encoder.flush();
        serializedBytes = out.toByteArray();
        out.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    return serializedBytes;
}
 
Example 17
Source Project: SDA   Source File: AvroLWM2MDataPublish.java    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_LWM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example 18
Source Project: big-c   Source File: Display.java    License: Apache License 2.0 6 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example 19
Source Project: big-c   Source File: AvroTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example 20
Source Project: flink   Source File: AvroSerializationSchema.java    License: Apache License 2.0 6 votes vote down vote up
protected void checkAvroInitialized() {
	if (datumWriter != null) {
		return;
	}
	ClassLoader cl = Thread.currentThread().getContextClassLoader();
		if (SpecificRecord.class.isAssignableFrom(recordClazz)) {
		Schema schema = SpecificData.get().getSchema(recordClazz);
		this.datumWriter = new SpecificDatumWriter<>(schema);
		this.schema = schema;
	} else {
		this.schema = new Schema.Parser().parse(this.schemaString);
		GenericData genericData = new GenericData(cl);

		this.datumWriter = new GenericDatumWriter<>(schema, genericData);
	}
	this.arrayOutputStream = new ByteArrayOutputStream();
	this.encoder = EncoderFactory.get().directBinaryEncoder(arrayOutputStream, null);
}
 
Example 21
Source Project: mt-flume   Source File: AvroEventDeserializer.java    License: Apache License 2.0 6 votes vote down vote up
private void initialize() throws IOException, NoSuchAlgorithmException {
  SeekableResettableInputBridge in = new SeekableResettableInputBridge(ris);
  long pos = in.tell();
  in.seek(0L);
  fileReader = new DataFileReader<GenericRecord>(in,
      new GenericDatumReader<GenericRecord>());
  fileReader.sync(pos);

  schema = fileReader.getSchema();
  datumWriter = new GenericDatumWriter(schema);
  out = new ByteArrayOutputStream();
  encoder = EncoderFactory.get().binaryEncoder(out, encoder);

  schemaHash = SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema);
  schemaHashString = Hex.encodeHexString(schemaHash);
}
 
Example 22
Source Project: celos   Source File: AvroToJsonConverter.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public FixFile convert(TestRun testRun, FixFile ff) throws IOException {
    byte[] bytes = IOUtils.toByteArray(ff.getContent());
    if (bytes.length == 0) {
        return ff;
    }
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    GenericDatumReader<Object> reader = new GenericDatumReader<>();
    FileReader<Object> fileReader =  DataFileReader.openReader(new SeekableByteArrayInput(bytes), reader);
    try {
        Schema schema = fileReader.getSchema();
        DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
        JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, os);

        for (Object datum : fileReader) {
            writer.write(datum, encoder);
        }
        encoder.flush();
    } finally {
        fileReader.close();
    }
    return new FixFile(new ByteArrayInputStream(os.toByteArray()));
}
 
Example 23
Source Project: data-highway   Source File: TruckParkAppIntegrationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public byte[] serialize(String topic, Record record) {
  try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
    baos.write(DataDeserializer.MAGIC_BYTE);
    baos.write(Ints.toByteArray(VERSION));
    DatumWriter<Object> writer = new GenericDatumWriter<>(SCHEMA);
    BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    writer.write(record, encoder);
    encoder.flush();
    return baos.toByteArray();
  } catch (IOException unreachable) {
    throw new RuntimeException(unreachable);
  }
}
 
Example 24
Source Project: data-highway   Source File: DataDeserializerTest.java    License: Apache License 2.0 5 votes vote down vote up
private byte[] toAvroBinary(Schema schema, Object value, int version) {
  try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
    baos.write(0x0);
    baos.write(Ints.toByteArray(version));
    DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
    Encoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    writer.write(value, encoder);
    encoder.flush();
    return baos.toByteArray();
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
}
 
Example 25
Source Project: components   Source File: Person.java    License: Apache License 2.0 5 votes vote down vote up
public byte[] serToAvroBytes() throws IOException {
    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    datumWriter.write(toAvroRecord(), encoder);
    encoder.flush();
    byte[] result = out.toByteArray();
    out.close();
    return result;
}
 
Example 26
Source Project: Flink-CEPplus   Source File: AvroTestUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Writes given record using specified schema.
 * @param record record to serialize
 * @param schema schema to use for serialization
 * @return serialized record
 */
public static byte[] writeRecord(GenericRecord record, Schema schema) throws IOException {
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(stream, null);

	new GenericDatumWriter<>(schema).write(record, encoder);
	encoder.flush();
	return stream.toByteArray();
}
 
Example 27
FakePartitionLevelConsumer(int partition, StreamConfig streamConfig) {

    // TODO: this logic can move to a FakeStreamProducer instead of being inside the Consumer
    File tempDir = new File(FileUtils.getTempDirectory(), getClass().getSimpleName());
    File outputDir = new File(tempDir, String.valueOf(partition));

    int offset = 0;

    try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536)) {
      File avroFile = unpackAvroTarFile(outputDir).get(0);

      int numPartitions = FakeStreamConfigUtils.getNumPartitions(streamConfig);

      try (DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile)) {
        BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
        GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(reader.getSchema());

        int recordNumber = 0;
        for (GenericRecord genericRecord : reader) {
          if (getPartitionNumber(recordNumber++, numPartitions) != partition) {
            continue;
          }
          outputStream.reset();

          datumWriter.write(genericRecord, binaryEncoder);
          binaryEncoder.flush();

          byte[] bytes = outputStream.toByteArray();
          // contiguous offsets
          messageOffsets.add(offset++);
          messageBytes.add(bytes);
        }
      }
    } catch (Exception e) {
      LOGGER.error("Could not create {}", FakePartitionLevelConsumer.class.getName(), e);
    } finally {
      FileUtils.deleteQuietly(outputDir);
    }
  }
 
Example 28
Source Project: flink   Source File: AvroRowSerializationSchema.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates an Avro serialization schema for the given specific record class.
 *
 * @param recordClazz Avro record class used to serialize Flink's row to Avro's record
 */
public AvroRowSerializationSchema(Class<? extends SpecificRecord> recordClazz) {
	Preconditions.checkNotNull(recordClazz, "Avro record class must not be null.");
	this.recordClazz = recordClazz;
	this.schema = SpecificData.get().getSchema(recordClazz);
	this.schemaString = schema.toString();
	this.datumWriter = new SpecificDatumWriter<>(schema);
	this.arrayOutputStream = new ByteArrayOutputStream();
	this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null);
}
 
Example 29
Source Project: flink   Source File: AvroRowSerializationSchema.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates an Avro serialization schema for the given Avro schema string.
 *
 * @param avroSchemaString Avro schema string used to serialize Flink's row to Avro's record
 */
public AvroRowSerializationSchema(String avroSchemaString) {
	Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null.");
	this.recordClazz = null;
	this.schemaString = avroSchemaString;
	try {
		this.schema = new Schema.Parser().parse(avroSchemaString);
	} catch (SchemaParseException e) {
		throw new IllegalArgumentException("Could not parse Avro schema string.", e);
	}
	this.datumWriter = new GenericDatumWriter<>(schema);
	this.arrayOutputStream = new ByteArrayOutputStream();
	this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null);
}
 
Example 30
Source Project: flink   Source File: AvroRowSerializationSchema.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream inputStream) throws ClassNotFoundException, IOException {
	recordClazz = (Class<? extends SpecificRecord>) inputStream.readObject();
	schemaString = (String) inputStream.readObject();
	if (recordClazz != null) {
		schema = SpecificData.get().getSchema(recordClazz);
	} else {
		schema = new Schema.Parser().parse(schemaString);
	}
	datumWriter = new SpecificDatumWriter<>(schema);
	arrayOutputStream = new ByteArrayOutputStream();
	encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null);
}