com.amazonaws.services.kinesisfirehose.model.Record Java Examples

The following examples show how to use com.amazonaws.services.kinesisfirehose.model.Record. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FirehoseSinkTask.java    From kinesis-kafka-connector with Apache License 2.0 6 votes vote down vote up
/**
 * Method to perform PutRecordBatch operation with the given record list.
 *
 * @param recordList
 *            the collection of records
 * @return the output of PutRecordBatch
 */
private PutRecordBatchResult putRecordBatch(List<Record> recordList) {
	PutRecordBatchRequest putRecordBatchRequest = new PutRecordBatchRequest();
	putRecordBatchRequest.setDeliveryStreamName(deliveryStreamName);
	putRecordBatchRequest.setRecords(recordList);

	// Put Record Batch records. Max No.Of Records we can put in a
	// single put record batch request is 500 and total size < 4MB
	PutRecordBatchResult putRecordBatchResult = null; 
	try {
		 putRecordBatchResult = firehoseClient.putRecordBatch(putRecordBatchRequest);
	}catch(AmazonKinesisFirehoseException akfe){
		 System.out.println("Amazon Kinesis Firehose Exception:" + akfe.getLocalizedMessage());
	}catch(Exception e){
		 System.out.println("Connector Exception" + e.getLocalizedMessage());
	}
	return putRecordBatchResult; 
}
 
Example #2
Source File: FirehoseSinkTask.java    From kinesis-kafka-connector with Apache License 2.0 6 votes vote down vote up
/**
 * @param sinkRecords
 */
private void putRecordsInBatch(Collection<SinkRecord> sinkRecords) {
	List<Record> recordList = new ArrayList<Record>();
	int recordsInBatch = 0;
	int recordsSizeInBytes = 0;

	for (SinkRecord sinkRecord : sinkRecords) {
		Record record = DataUtility.createRecord(sinkRecord);
		recordList.add(record);
		recordsInBatch++;
		recordsSizeInBytes += record.getData().capacity();
					
		if (recordsInBatch == batchSize || recordsSizeInBytes > batchSizeInBytes) {
			putRecordBatch(recordList);
			recordList.clear();
			recordsInBatch = 0;
			recordsSizeInBytes = 0;
		}
	}

	if (recordsInBatch > 0) {
		putRecordBatch(recordList);
	}
}
 
Example #3
Source File: FirehoseTransportBufferSimple.java    From bender with Apache License 2.0 6 votes vote down vote up
@Override
public boolean add(InternalEvent ievent) throws IllegalStateException, IOException {
  if (dataRecords.size() >= MAX_RECORDS) {
    logger.trace("hit record index max");
    throw new IllegalStateException("reached max payload size");
  }

  byte[] record = this.serializer.serialize(ievent);

  /*
   * Restrict size of individual record
   */
  if (record.length > MAX_RECORD_SIZE) {
    throw new IOException(
        "serialized event is " + record.length + " larger than max of " + MAX_RECORD_SIZE);
  }

  ByteBuffer data = ByteBuffer.wrap(record);
  dataRecords.add(new Record().withData(data));

  return true;
}
 
Example #4
Source File: FirehoseTransportBufferBatch.java    From bender with Apache License 2.0 5 votes vote down vote up
@Override
public void close() {
  if (this.cos.getByteCount() != 0 && this.dataRecords.size() < MAX_RECORDS) {
    logger.trace("flushing remainder of buffer");
    ByteBuffer data = ByteBuffer.wrap(baos.toByteArray());
    this.dataRecords.add(new Record().withData(data));
  }

  try {
    this.baos.close();
  } catch (IOException e) {
  }
}
 
Example #5
Source File: FirehoseTransportBufferSimple.java    From bender with Apache License 2.0 4 votes vote down vote up
@Override
public ArrayList<Record> getInternalBuffer() {
  return this.dataRecords;
}
 
Example #6
Source File: FirehoseTransportBufferBatch.java    From bender with Apache License 2.0 4 votes vote down vote up
@Override
public boolean add(InternalEvent ievent) throws IllegalStateException, IOException {
  byte[] record = serializer.serialize(ievent);

  /*
   * Restrict size of individual record
   */
  if (record.length > MAX_RECORD_SIZE) {
    throw new IOException(
        "serialized event is " + record.length + " larger than max of " + MAX_RECORD_SIZE);
  }

  /*
   * Write record if there's room in buffer
   */
  if (dataRecords.size() >= MAX_RECORDS) {
    logger.trace("hit record index max");
    throw new IllegalStateException("reached max payload size");
  } else {
    if (cos.getByteCount() + record.length < MAX_RECORD_SIZE) {
      cos.write(record);
      return true;
    }

    /*
     * If current record is full then flush buffer to a Firehose Record and create a new buffer
     */
    logger.trace("creating new datarecord");
    ByteBuffer data = ByteBuffer.wrap(baos.toByteArray());
    this.dataRecords.add(new Record().withData(data));
    baos.reset();
    cos.resetByteCount();
    cos.resetCount();

    /*
     * If we hit the max number of Firehose Records (4) then notify IPC service that this buffer
     * needs to be sent.
     */
    if (dataRecords.size() >= MAX_RECORDS) {
      logger.trace("hit record index max");
      throw new IllegalStateException("reached max payload size");
    }

    /*
     * Otherwise write the record to the empty internal buffer
     */
    cos.write(record);
  }

  return true;
}
 
Example #7
Source File: FirehoseTransportBufferBatch.java    From bender with Apache License 2.0 4 votes vote down vote up
@Override
public ArrayList<Record> getInternalBuffer() {
  return this.dataRecords;
}
 
Example #8
Source File: DataUtility.java    From kinesis-kafka-connector with Apache License 2.0 2 votes vote down vote up
/**
 * Converts Kafka record into Kinesis record
 * 
 * @param sinkRecord
 *            Kafka unit of message
 * @return Kinesis unit of message
 */
public static Record createRecord(SinkRecord sinkRecord) {
	return new Record().withData(parseValue(sinkRecord.valueSchema(), sinkRecord.value()));
}
 
Example #9
Source File: FirehoseTransportBuffer.java    From bender with Apache License 2.0 votes vote down vote up
public abstract ArrayList<Record> getInternalBuffer();