Java Code Examples for com.amazonaws.services.kinesis.model.PutRecordRequest#setData()

The following examples show how to use com.amazonaws.services.kinesis.model.PutRecordRequest#setData() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractKinesisOutputOperator.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
public void processTuple(T tuple)
{
  // Send out single data
  try {
    if (isBatchProcessing) {
      if (putRecordsRequestEntryList.size() == batchSize) {
        flushRecords();
        logger.debug( "flushed {} records.", batchSize );
      }
      addRecord(tuple);
    } else {
      Pair<String, V> keyValue = tupleToKeyValue(tuple);
      PutRecordRequest requestRecord = new PutRecordRequest();
      requestRecord.setStreamName(streamName);
      requestRecord.setPartitionKey(keyValue.first);
      requestRecord.setData(ByteBuffer.wrap(getRecord(keyValue.second)));
      client.putRecord(requestRecord);
    }
    sendCount++;
  } catch (AmazonClientException e) {
    throw new RuntimeException(e);
  }
}
 
Example 2
Source File: StreamSource.java    From aws-big-data-blog with Apache License 2.0 6 votes vote down vote up
/**
 * Process the input file and send PutRecordRequests to Amazon Kinesis.
 * 
 * This function serves to Isolate StreamSource logic so subclasses
 * can process input files differently.
 * 
 * @param inputStream
 *        the input stream to process
 * @param iteration
 *        the iteration if looping over file
 * @throws IOException
 *         throw exception if error processing inputStream.
 */
protected void processInputStream(InputStream inputStream, int iteration) throws IOException {
    try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) {
        String line;
        int lines = 0;
        while ((line = br.readLine()) != null) {
            KinesisMessageModel kinesisMessageModel = objectMapper.readValue(line, KinesisMessageModel.class);

            PutRecordRequest putRecordRequest = new PutRecordRequest();
            putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM);
            putRecordRequest.setData(ByteBuffer.wrap(line.getBytes()));
            putRecordRequest.setPartitionKey(Integer.toString(kinesisMessageModel.getUserid()));
            kinesisClient.putRecord(putRecordRequest);
            lines++;
        }
        LOG.info("Added " + lines + " records to stream source.");
    }
}
 
Example 3
Source File: ProducerBase.java    From aws-big-data-blog with Apache License 2.0 6 votes vote down vote up
public void run() {

		while (true) {
			try {
				// get message from queue - blocking so code will wait here for work to do
				Event event = eventsQueue.take();

				PutRecordRequest put = new PutRecordRequest();
				put.setStreamName(this.streamName);

				put.setData(event.getData());
				put.setPartitionKey(event.getPartitionKey());

				PutRecordResult result = kinesisClient.putRecord(put);
				logger.info(result.getSequenceNumber() + ": {}", this);	

			} catch (Exception e) {
				// didn't get record - move on to next\
				e.printStackTrace();		
			}
		}

	}
 
Example 4
Source File: StreamSource.java    From amazon-kinesis-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Process the input file and send PutRecordRequests to Amazon Kinesis.
 * 
 * This function serves to Isolate StreamSource logic so subclasses
 * can process input files differently.
 * 
 * @param inputStream
 *        the input stream to process
 * @param iteration
 *        the iteration if looping over file
 * @throws IOException
 *         throw exception if error processing inputStream.
 */
protected void processInputStream(InputStream inputStream, int iteration) throws IOException {
    try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) {
        String line;
        int lines = 0;
        while ((line = br.readLine()) != null) {
            KinesisMessageModel kinesisMessageModel = objectMapper.readValue(line, KinesisMessageModel.class);

            PutRecordRequest putRecordRequest = new PutRecordRequest();
            putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM);
            putRecordRequest.setData(ByteBuffer.wrap(line.getBytes()));
            putRecordRequest.setPartitionKey(Integer.toString(kinesisMessageModel.getUserid()));
            kinesisClient.putRecord(putRecordRequest);
            lines++;
        }
        LOG.info("Added " + lines + " records to stream source.");
    }
}
 
Example 5
Source File: KinesisSender.java    From zipkin-aws with Apache License 2.0 5 votes vote down vote up
@Override public Call<Void> sendSpans(List<byte[]> list) {
  if (closeCalled) throw new IllegalStateException("closed");

  ByteBuffer message = ByteBuffer.wrap(BytesMessageEncoder.forEncoding(encoding()).encode(list));

  PutRecordRequest request = new PutRecordRequest();
  request.setStreamName(streamName);
  request.setData(message);
  request.setPartitionKey(getPartitionKey());

  return new KinesisCall(request);
}
 
Example 6
Source File: KinesisLogger.java    From json-data-generator with Apache License 2.0 5 votes vote down vote up
@Override
public void logEvent(String event, Map<String, Object> producerConfig) {

    String streamName = (String) producerConfig.get("stream");
    if(streamName == null){
        streamName = this.streamName;
    }

    sequenceNumber.getAndIncrement();
    try {

        PutRecordRequest putRecordRequest = new PutRecordRequest();
        putRecordRequest.setStreamName( streamName);
        putRecordRequest.setData(generateData(event));
        putRecordRequest.setPartitionKey( TIMESTAMP);
        PutRecordResult putRecordResult = kinesisClient.putRecord( putRecordRequest );
    } catch (Exception ex) {
        //got interrupted while waiting
        log.error("Error while publishing events : ", ex);
    }
    long totalTimeElasped = System.currentTimeMillis() - startTimeFull;
    log.info("Events Published : " +  sequenceNumber + " events in " + (totalTimeElasped / 1000) + " secs");
    if(this.maxRecords != 0 && sequenceNumber.intValue() == maxRecords){
        shutdown();
        System.exit(0);
    }
}
 
Example 7
Source File: KinesisTestProducer.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void run()
{
  if (records == null) {
    generateRecords();
  } else {
    for (String msg : records) {
      PutRecordRequest putRecordRequest = new PutRecordRequest();
      putRecordRequest.setStreamName(streamName);
      putRecordRequest.setData(ByteBuffer.wrap(msg.getBytes()));
      putRecordRequest.setPartitionKey(msg);
      client.putRecord(putRecordRequest);
    }
  }
}
 
Example 8
Source File: BatchedStreamSource.java    From aws-big-data-blog with Apache License 2.0 5 votes vote down vote up
private void flushBuffer() throws IOException {
    PutRecordRequest putRecordRequest = new PutRecordRequest();
    putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM);
    putRecordRequest.setData(ByteBuffer.wrap(bufferToBytes()));
    putRecordRequest.setPartitionKey(String.valueOf(UUID.randomUUID()));
    kinesisClient.putRecord(putRecordRequest);
    buffer.clear();
}
 
Example 9
Source File: S3ManifestEmitter.java    From amazon-kinesis-connectors with Apache License 2.0 5 votes vote down vote up
@Override
public List<byte[]> emit(final UnmodifiableBuffer<byte[]> buffer) throws IOException {
    // Store the contents of buffer.getRecords because superclass will
    // clear the buffer on success
    List<byte[]> failed = super.emit(buffer);
    // calls S3Emitter to write objects to Amazon S3
    if (!failed.isEmpty()) {
        return buffer.getRecords();
    }
    String s3File = getS3FileName(buffer.getFirstSequenceNumber(), buffer.getLastSequenceNumber());
    // wrap the name of the Amazon S3 file as the record data
    ByteBuffer data = ByteBuffer.wrap(s3File.getBytes());
    // Put the list of file names to the manifest Amazon Kinesis stream
    PutRecordRequest putRecordRequest = new PutRecordRequest();
    putRecordRequest.setData(data);
    putRecordRequest.setStreamName(manifestStream);
    // Use constant partition key to ensure file order
    putRecordRequest.setPartitionKey(manifestStream);
    try {
        kinesisClient.putRecord(putRecordRequest);
        LOG.info("S3ManifestEmitter emitted record downstream: " + s3File);
        return Collections.emptyList();
    } catch (Exception e) {
        LOG.error(e);
        return buffer.getRecords();
    }
}
 
Example 10
Source File: BatchedStreamSource.java    From amazon-kinesis-connectors with Apache License 2.0 5 votes vote down vote up
private void flushBuffer() throws IOException {
    PutRecordRequest putRecordRequest = new PutRecordRequest();
    putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM);
    putRecordRequest.setData(ByteBuffer.wrap(bufferToBytes()));
    putRecordRequest.setPartitionKey(String.valueOf(UUID.randomUUID()));
    kinesisClient.putRecord(putRecordRequest);
    buffer.clear();
}