org.apache.avro.io.EncoderFactory Java Examples

The following examples show how to use org.apache.avro.io.EncoderFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JsonUtils.java    From localization_nifi with Apache License 2.0 8 votes vote down vote up
/**
 * Writes provided {@link GenericRecord} into the provided
 * {@link OutputStream} as JSON.
 */
public static void write(GenericRecord record, OutputStream out) {
    try {
        DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
        JsonEncoder encoder = EncoderFactory.get().jsonEncoder(record.getSchema(), out);
        writer.write(record, encoder);
        encoder.flush();
    } catch (Exception e) {
        throw new IllegalStateException("Failed to read GenericRecord", e);
    }
}
 
Example #2
Source File: WriteAvroResultWithExternalSchema.java    From nifi with Apache License 2.0 6 votes vote down vote up
public WriteAvroResultWithExternalSchema(final Schema avroSchema, final RecordSchema recordSchema, final SchemaAccessWriter schemaAccessWriter,
                                         final OutputStream out, final BlockingQueue<BinaryEncoder> recycleQueue, final ComponentLog logger) {
    super(out);
    this.recordSchema = recordSchema;
    this.schemaAccessWriter = schemaAccessWriter;
    this.avroSchema = avroSchema;
    this.buffered = new BufferedOutputStream(out);
    this.recycleQueue = recycleQueue;

    BinaryEncoder reusableEncoder = recycleQueue.poll();
    if (reusableEncoder == null) {
        logger.debug("Was not able to obtain a BinaryEncoder from reuse pool. This is normal for the first X number of iterations (where X is equal to the max size of the pool), " +
            "but if this continues, it indicates that increasing the size of the pool will likely yield better performance for this Avro Writer.");
    }

    encoder = EncoderFactory.get().blockingBinaryEncoder(buffered, reusableEncoder);

    datumWriter = new GenericDatumWriter<>(avroSchema);
}
 
Example #3
Source File: Log4jAppender.java    From kite with Apache License 2.0 6 votes vote down vote up
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
  if (schema == null || !datumSchema.equals(schema)) {
    schema = datumSchema;
    out = new ByteArrayOutputStream();
    writer = new ReflectDatumWriter<Object>(schema);
    encoder = EncoderFactory.get().binaryEncoder(out, null);
  }
  out.reset();
  try {
    writer.write(datum, encoder);
    encoder.flush();
    return out.toByteArray();
  } catch (IOException e) {
    throw new FlumeException(e);
  }
}
 
Example #4
Source File: AvroTestUtil.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example #5
Source File: DataSerializer.java    From jMetalSP with MIT License 6 votes vote down vote up
public byte[] serializeMessage(S clazz, String path) {

        byte[] result = null;
        try {

            File file = new File(path);

            Schema schema = new Schema.Parser().parse(file);

            ByteArrayOutputStream out = new ByteArrayOutputStream();
            BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
            DatumWriter<S> dataFileWriter = new SpecificDatumWriter<S>(schema);
            dataFileWriter.write(clazz, encoder);
            encoder.flush();
            result=out.toByteArray();
            out.close();
        }catch (Exception ex){
            ex.printStackTrace();
        }
        return result;
    }
 
Example #6
Source File: AvroSerializationSchema.java    From jMetalSP with MIT License 6 votes vote down vote up
@Override
public byte[] serialize(T obj) {
    byte[] serializedBytes = null;
    try {
        ensureInitialized();
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        encoder = EncoderFactory.get().binaryEncoder(out, null);
        dataFileWriter.write(obj, encoder);
        encoder.flush();
        serializedBytes = out.toByteArray();
        out.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    return serializedBytes;
}
 
Example #7
Source File: AvroLWM2MDataPublish.java    From SDA with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_LWM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example #8
Source File: FastStringableTest.java    From avro-fastserde with Apache License 2.0 6 votes vote down vote up
private <T> Decoder serializeSpecificFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);

    try {
        FastSpecificSerializerGenerator<T> fastSpecificSerializerGenerator = new FastSpecificSerializerGenerator<>(
                schema, tempDir, classLoader, null);
        FastSerializer<T> fastSerializer = fastSpecificSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}
 
Example #9
Source File: DatasetContentWriter.java    From components with Apache License 2.0 6 votes vote down vote up
private Consumer<IndexedRecord> getWritingConsumer(Encoder[] encoder) {
    return new Consumer<IndexedRecord>() {

        GenericDatumWriter<IndexedRecord> writer = null;

        @Override
        public void accept(IndexedRecord ir) {
            if (writer == null) {
                writer = new GenericDatumWriter<>(ir.getSchema());
                try {
                    if (json) {
                        encoder[0] = EncoderFactory.get().jsonEncoder(ir.getSchema(), output);
                    } else {
                        encoder[0] = EncoderFactory.get().binaryEncoder(output, null);
                    }
                } catch (IOException ioe) {
                    throw new RuntimeException(ioe);
                }

            }
            writeIndexedRecord(writer, encoder[0], ir);
        }
    };
}
 
Example #10
Source File: Display.java    From big-c with Apache License 2.0 6 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example #11
Source File: AvroTestUtil.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example #12
Source File: AvroSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
protected void checkAvroInitialized() {
	if (datumWriter != null) {
		return;
	}
	ClassLoader cl = Thread.currentThread().getContextClassLoader();
		if (SpecificRecord.class.isAssignableFrom(recordClazz)) {
		Schema schema = SpecificData.get().getSchema(recordClazz);
		this.datumWriter = new SpecificDatumWriter<>(schema);
		this.schema = schema;
	} else {
		this.schema = new Schema.Parser().parse(this.schemaString);
		GenericData genericData = new GenericData(cl);

		this.datumWriter = new GenericDatumWriter<>(schema, genericData);
	}
	this.arrayOutputStream = new ByteArrayOutputStream();
	this.encoder = EncoderFactory.get().directBinaryEncoder(arrayOutputStream, null);
}
 
Example #13
Source File: PulsarSink.java    From pulsar-flume-ng-sink with Apache License 2.0 6 votes vote down vote up
private byte[] serializeEvent(Event event, boolean useAvroEventFormat) throws IOException {
    byte[] bytes;
    if (useAvroEventFormat) {
        if (!tempOutStream.isPresent()) {
            tempOutStream = Optional.of(new ByteArrayOutputStream());
        }
        if (!writer.isPresent()) {
            writer = Optional.of(new SpecificDatumWriter<AvroFlumeEvent>(AvroFlumeEvent.class));
        }
        tempOutStream.get().reset();
        AvroFlumeEvent e = new AvroFlumeEvent(toCharSeqMap(event.getHeaders()),
                ByteBuffer.wrap(event.getBody()));
        encoder = EncoderFactory.get().directBinaryEncoder(tempOutStream.get(), encoder);
        writer.get().write(e, encoder);
        encoder.flush();
        bytes = tempOutStream.get().toByteArray();
    } else {
        bytes = event.getBody();
    }
    return bytes;
}
 
Example #14
Source File: FastSpecificSerializerGeneratorTest.java    From avro-fastserde with Apache License 2.0 6 votes vote down vote up
private <T> Decoder serializeSpecificFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);

    try {
        FastSpecificSerializerGenerator<T> fastSpecificSerializerGenerator = new FastSpecificSerializerGenerator<>(
                schema, tempDir, classLoader, null);
        FastSerializer<T> fastSerializer = fastSpecificSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}
 
Example #15
Source File: AvroOneM2MDataPublish.java    From SDA with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_ONEM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_ONEM2M> avroEventWriter = new SpecificDatumWriter<COL_ONEM2M>(COL_ONEM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example #16
Source File: AvroEventDeserializer.java    From mt-flume with Apache License 2.0 6 votes vote down vote up
private void initialize() throws IOException, NoSuchAlgorithmException {
  SeekableResettableInputBridge in = new SeekableResettableInputBridge(ris);
  long pos = in.tell();
  in.seek(0L);
  fileReader = new DataFileReader<GenericRecord>(in,
      new GenericDatumReader<GenericRecord>());
  fileReader.sync(pos);

  schema = fileReader.getSchema();
  datumWriter = new GenericDatumWriter(schema);
  out = new ByteArrayOutputStream();
  encoder = EncoderFactory.get().binaryEncoder(out, encoder);

  schemaHash = SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema);
  schemaHashString = Hex.encodeHexString(schemaHash);
}
 
Example #17
Source File: AvroEvaluatorListSerializer.java    From reef with Apache License 2.0 6 votes vote down vote up
/**
 * Convert AvroEvaluatorList to JSON string.
 */
@Override
public String toString(final AvroEvaluatorList avroEvaluatorList) {
  final DatumWriter<AvroEvaluatorList> evaluatorWriter = new SpecificDatumWriter<>(AvroEvaluatorList.class);
  try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
    final JsonEncoder encoder = EncoderFactory.get().jsonEncoder(avroEvaluatorList.getSchema(), out);
    evaluatorWriter.write(avroEvaluatorList, encoder);
    encoder.flush();
    return out.toString(AvroHttpSerializer.JSON_CHARSET);
  } catch (final IOException e) {
    throw new RuntimeException(e);
  }
}
 
Example #18
Source File: TestAvroSerializer.java    From flume-elasticsearch-sink with Apache License 2.0 6 votes vote down vote up
/**
 * tests Avro Serializer
 */
@Test
public void testSerializer() throws Exception {
    Context context = new Context();
    String schemaFile = getClass().getResource("/schema.avsc").getFile();
    context.put(ES_AVRO_SCHEMA_FILE, schemaFile);
    avroSerializer.configure(context);
    Schema schema = new Schema.Parser().parse(new File(schemaFile));
    GenericRecord user = generateGenericRecord(schema);
    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    Encoder encoder = new EncoderFactory().binaryEncoder(outputStream, null);
    datumWriter.write(user, encoder);
    encoder.flush();
    Event event = EventBuilder.withBody(outputStream.toByteArray());
    XContentBuilder expected = generateContentBuilder();
    XContentBuilder actual = avroSerializer.serialize(event);
    JsonParser parser = new JsonParser();
    assertEquals(parser.parse(Strings.toString(expected)), parser.parse(Strings.toString(actual)));
}
 
Example #19
Source File: AvroLWM2MDataPublish.java    From SDA with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * 데이타 전송
 * @param event
 * @throws Exception
 * @return void
 */
public void send(COL_LWM2M event) throws Exception {
	EncoderFactory avroEncoderFactory = EncoderFactory.get();
	SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$);
	
	ByteArrayOutputStream stream = new ByteArrayOutputStream();
	BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null);

	try {
		avroEventWriter.write(event, binaryEncoder);
		binaryEncoder.flush();
	} catch (IOException e) {
		e.printStackTrace();
		throw e;
	}
	IOUtils.closeQuietly(stream);

	KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(
			TOPIC, stream.toByteArray());

	producer.send(data);
}
 
Example #20
Source File: Log4jAppender.java    From mt-flume with Apache License 2.0 6 votes vote down vote up
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
  if (schema == null || !datumSchema.equals(schema)) {
    schema = datumSchema;
    out = new ByteArrayOutputStream();
    writer = new ReflectDatumWriter<Object>(schema);
    encoder = EncoderFactory.get().binaryEncoder(out, null);
  }
  out.reset();
  try {
    writer.write(datum, encoder);
    encoder.flush();
    return out.toByteArray();
  } catch (IOException e) {
    throw new FlumeException(e);
  }
}
 
Example #21
Source File: YarnSubmissionParametersFileGenerator.java    From reef with Apache License 2.0 6 votes vote down vote up
static void writeAvroYarnJobSubmissionParametersToOutputStream(
    final YarnClusterSubmissionFromCS yarnClusterSubmissionFromCS,
    final String jobFolderOnDFSPath,
    final OutputStream outputStream) throws IOException {
  final DatumWriter<AvroYarnJobSubmissionParameters> datumWriter =
      new SpecificDatumWriter<>(AvroYarnJobSubmissionParameters.class);

  final AvroYarnJobSubmissionParameters jobSubmissionParameters =
      yarnClusterSubmissionFromCS.getYarnJobSubmissionParameters();
  jobSubmissionParameters.setDfsJobSubmissionFolder(jobFolderOnDFSPath);
  final JsonEncoder encoder = EncoderFactory.get().jsonEncoder(jobSubmissionParameters.getSchema(),
      outputStream);
  datumWriter.write(jobSubmissionParameters, encoder);
  encoder.flush();
  outputStream.flush();
}
 
Example #22
Source File: AvroToJsonConverter.java    From celos with Apache License 2.0 5 votes vote down vote up
@Override
public FixFile convert(TestRun testRun, FixFile ff) throws IOException {
    byte[] bytes = IOUtils.toByteArray(ff.getContent());
    if (bytes.length == 0) {
        return ff;
    }
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    GenericDatumReader<Object> reader = new GenericDatumReader<>();
    FileReader<Object> fileReader =  DataFileReader.openReader(new SeekableByteArrayInput(bytes), reader);
    try {
        Schema schema = fileReader.getSchema();
        DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
        JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, os);

        for (Object datum : fileReader) {
            writer.write(datum, encoder);
        }
        encoder.flush();
    } finally {
        fileReader.close();
    }
    return new FixFile(new ByteArrayInputStream(os.toByteArray()));
}
 
Example #23
Source File: AvroRowSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates an Avro serialization schema for the given specific record class.
 *
 * @param recordClazz Avro record class used to serialize Flink's row to Avro's record
 */
public AvroRowSerializationSchema(Class<? extends SpecificRecord> recordClazz) {
	Preconditions.checkNotNull(recordClazz, "Avro record class must not be null.");
	this.recordClazz = recordClazz;
	this.schema = SpecificData.get().getSchema(recordClazz);
	this.schemaString = schema.toString();
	this.datumWriter = new SpecificDatumWriter<>(schema);
	this.arrayOutputStream = new ByteArrayOutputStream();
	this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null);
}
 
Example #24
Source File: MessageSerializerImpl.java    From reef with Apache License 2.0 5 votes vote down vote up
/**
 * Deserialize messages of type TMessage from input outputStream.
 * @param outputStream A ByteArrayOutputStream where the message to
 *                     be serialized will be written.
 * @param message An Avro message class which implements the Avro SpcificRecord interface.
 * @param sequence The numerical position of the message in the outgoing message stream.
 * @throws IOException An error occurred writing the message to the outputStream.
 */
public void serialize(final ByteArrayOutputStream outputStream,
                      final SpecificRecord message, final long sequence) throws IOException {
  // Binary encoder for both the header and message.
  final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
  // Write the header and the message.
  headerWriter.write(new Header(sequence, msgMetaClassName), encoder);
  messageWriter.write((TMessage)message, encoder);
  encoder.flush();
}
 
Example #25
Source File: WatcherAvroUtil.java    From reef with Apache License 2.0 5 votes vote down vote up
public static String toString(final SpecificRecord record) {
  final String jsonEncodedRecord;
  try {
    final Schema schema = record.getSchema();
    final ByteArrayOutputStream bos = new ByteArrayOutputStream();
    final Encoder encoder = EncoderFactory.get().jsonEncoder(schema, bos);
    final SpecificDatumWriter datumWriter = new SpecificDatumWriter(record.getClass());
    datumWriter.write(record, encoder);
    encoder.flush();
    jsonEncodedRecord = new String(bos.toByteArray(), Charset.forName("UTF-8"));
  } catch (final IOException e) {
    throw new RuntimeException(e);
  }
  return jsonEncodedRecord;
}
 
Example #26
Source File: RecordBenchmarkBase.java    From avro-fastserde with Apache License 2.0 5 votes vote down vote up
@Setup
public void init() throws Exception {
    final GenericDatumWriter<GenericData.Record> datumWriter = new GenericDatumWriter<>(specificRecordSchema);
    for (int i = 0; i < 1000; i++) {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);

        genericRecords.add(FastSerdeBenchmarkSupport.generateRandomRecordData(specificRecordSchema));
        specificRecords
                .add(FastSerdeBenchmarkSupport.toSpecificRecord(genericRecords.get(genericRecords.size() - 1)));

        datumWriter.write(genericRecords.get(genericRecords.size() - 1), encoder);
        encoder.flush();

        recordBytes.add(baos.toByteArray());
    }
    fastGenericDatumReader = new FastGenericDatumReader<>(
            specificRecordSchema, cache);
    fastGenericDatumWriter = new FastGenericDatumWriter<>(specificRecordSchema, cache);

    genericDatumReader = new GenericDatumReader<>(specificRecordSchema);
    genericDatumWriter = new GenericDatumWriter<>(specificRecordSchema);

    fastSpecificDatumReader = new FastSpecificDatumReader<>(
            specificRecordSchema, cache);
    fastSpecificDatumWriter = new FastSpecificDatumWriter<>(specificRecordSchema, cache);

    specificDatumReader = new SpecificDatumReader<>(specificRecordSchema);
    specificDatumWriter = new SpecificDatumWriter<>(specificRecordSchema);
}
 
Example #27
Source File: AvroConsoleProducer.java    From HiveKa with Apache License 2.0 5 votes vote down vote up
public static byte[] serializeAvro(Schema schema, GenericRecord event) throws IOException {
  ByteArrayOutputStream stream = new ByteArrayOutputStream();
  BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(stream, null);
  DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
  datumWriter.write(event, binaryEncoder);
  binaryEncoder.flush();
  IOUtils.closeQuietly(stream);


  return stream.toByteArray();
}
 
Example #28
Source File: AbstractAvroSerializer.java    From jstorm with Apache License 2.0 5 votes vote down vote up
public void write(Kryo kryo, Output output, GenericContainer record) {

        String fingerPrint = this.getFingerprint(record.getSchema());
        output.writeString(fingerPrint);
        GenericDatumWriter<GenericContainer> writer = new GenericDatumWriter<>(record.getSchema());

        BinaryEncoder encoder = EncoderFactory
                .get()
                .directBinaryEncoder(output, null);
        try {
            writer.write(record, encoder);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
 
Example #29
Source File: FastSerdeTestsSupport.java    From avro-fastserde with Apache License 2.0 5 votes vote down vote up
public static <T> Decoder serializeSpecific(T record, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);

    try {
        SpecificDatumWriter<T> writer = new SpecificDatumWriter<>(schema);
        writer.write(record, binaryEncoder);
        binaryEncoder.flush();

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}
 
Example #30
Source File: TestAvroEventSerializer.java    From mt-flume with Apache License 2.0 5 votes vote down vote up
private byte[] serializeAvro(Object datum, Schema schema) throws IOException {
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema);
  BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
  out.reset();
  writer.write(datum, encoder);
  encoder.flush();
  return out.toByteArray();
}