Java Code Examples for org.apache.flume.Event#getBody()

The following examples show how to use org.apache.flume.Event#getBody() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MorphlineHandlerImpl.java    From mt-flume with Apache License 2.0 6 votes vote down vote up
@Override
public void process(Event event) {
  Record record = new Record();
  for (Entry<String, String> entry : event.getHeaders().entrySet()) {
    record.put(entry.getKey(), entry.getValue());
  }
  byte[] bytes = event.getBody();
  if (bytes != null && bytes.length > 0) {
    record.put(Fields.ATTACHMENT_BODY, bytes);
  }    
  try {
    Notifications.notifyStartSession(morphline);
    if (!morphline.process(record)) {
      LOG.warn("Morphline {} failed to process record: {}", morphlineFileAndId, record);
    }
  } catch (RuntimeException t) {
    morphlineContext.getExceptionHandler().handleException(t, record);
  }
}
 
Example 2
Source File: TestElasticSearchSink.java    From mt-flume with Apache License 2.0 5 votes vote down vote up
@Override
protected void prepareIndexRequest(IndexRequestBuilder indexRequest,
    String indexName, String indexType, Event event) throws IOException {
  actualIndexName = indexName;
  actualIndexType = indexType;
  actualEventBody = event.getBody();
  indexRequest.setIndex(indexName).setType(indexType)
      .setSource(event.getBody());
}
 
Example 3
Source File: ApacheLogAvroEventSerializer.java    From flume-plugins with MIT License 5 votes vote down vote up
@Override
protected ApacheEvent convert(Event event) {
    ApacheEvent apacheEvent = new ApacheEvent();

    String logline = new String(event.getBody(), Charsets.UTF_8);
    apacheEvent.setHeaders(event.getHeaders());
    apacheEvent.setOriginal(logline);

    Pattern pattern = Pattern.compile(REGEXP);
    Matcher m = pattern.matcher(logline);
    if (m.matches()){
        apacheEvent.setIp(m.group(1));
        apacheEvent.setIdentd(m.group(2));
        apacheEvent.setUser(m.group(3));
        apacheEvent.setTime(m.group(4));
        apacheEvent.setMethod(m.group(5));
        apacheEvent.setUri(m.group(6));
        apacheEvent.setProtocol(m.group(7));
        apacheEvent.setStatuscode(Integer.valueOf(m.group(8)));
        apacheEvent.setBytesSend(m.group(9));
        apacheEvent.setReferer(m.group(10));
        apacheEvent.setUseragent(m.group(11));
        apacheEvent.setServername(m.group(12));
        apacheEvent.setExtraservername(m.group(13));
        apacheEvent.setTimeSecond(m.group(14));
        apacheEvent.setTimeMicro(m.group(15));
        apacheEvent.setConnectionstatus(m.group(16));
        apacheEvent.setConnectiontype(m.group(17));
        apacheEvent.setSessioncookie(m.group(18));
        apacheEvent.setBytesIn(m.group(19));
        apacheEvent.setBytesOut(m.group(20));
    } else {
        log.warn("The event doesn't match the Apache LogFormat! [{}]", logline);
    }

    // log.debug("Serialized event as: {}", apacheEvent);

    return apacheEvent;
}
 
Example 4
Source File: TailSource.java    From flume-plugin with Apache License 2.0 5 votes vote down vote up
private long getAllSize(List<Event> events) {
    long size = 0;
    for (Event e: events) {
        size += e.getBody().length;
    }
    return size;
}
 
Example 5
Source File: FlumeEventTransformer.java    From ignite-book-code-samples with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Format event - String:String
 * example - transactionId:amount [56102:232], where transactionId is the key and amount is the value
 * */
private Map<String, Integer> transform(Event event){
    final Map<String, Integer> map = new HashMap<>();
    String eventBody = new String(event.getBody());
    if(!eventBody.isEmpty()){
        // parse the string by :
        String[] tokens = eventBody.split(":");
        map.put(tokens[0].trim(), Integer.valueOf(tokens[1].trim()));
    }

    return map;
}
 
Example 6
Source File: IRCSink.java    From mt-flume with Apache License 2.0 5 votes vote down vote up
private void sendLine(Event event) {
  String body = new String(event.getBody());
  
  if (splitLines) {
    String[] lines = body.split(splitChars);
    for(String line: lines) {
      connection.doPrivmsg(IRC_CHANNEL_PREFIX + this.chan, line);
    }
  } else {
    connection.doPrivmsg(IRC_CHANNEL_PREFIX + this.chan, body);
  }
  
}
 
Example 7
Source File: FlumeEmbeddedAgentTest.java    From logging-log4j2 with Apache License 2.0 5 votes vote down vote up
private String getBody(final Event event) throws IOException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final InputStream is = new GZIPInputStream(new ByteArrayInputStream(event.getBody()));
    int n = 0;
    while (-1 != (n = is.read())) {
        baos.write(n);
    }
    return new String(baos.toByteArray());

}
 
Example 8
Source File: EventHelper.java    From mt-flume with Apache License 2.0 5 votes vote down vote up
public static String dumpEvent(Event event, int maxBytes) {
  StringBuilder buffer = new StringBuilder();
  if (event == null || event.getBody() == null) {
    buffer.append("null");
  } else if (event.getBody().length == 0) {
    // do nothing... in this case, HexDump.dump() will throw an exception
  } else {
    byte[] body = event.getBody();
    byte[] data = Arrays.copyOf(body, Math.min(body.length, maxBytes));
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    try {
      HexDump.dump(data, 0, out, 0);
      String hexDump = new String(out.toByteArray());
      // remove offset since it's not relevant for such a small dataset
      if(hexDump.startsWith(HEXDUMP_OFFSET)) {
        hexDump = hexDump.substring(HEXDUMP_OFFSET.length());
      }
      buffer.append(hexDump);
    } catch (Exception e) {
     if(LOGGER.isInfoEnabled()) {
       LOGGER.info("Exception while dumping event", e);
     }
      buffer.append("...Exception while dumping: ").append(e.getMessage());
    }
    String result = buffer.toString();
    if(result.endsWith(EOL) && buffer.length() > EOL.length()) {
      buffer.delete(buffer.length() - EOL.length(), buffer.length()).toString();
    }
  }
  return "{ headers:" + event.getHeaders() + " body:" + buffer + " }";
}
 
Example 9
Source File: TestFlumeFailoverTarget.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testWriteJsonRecords() throws InterruptedException, StageException, IOException {

  DataGeneratorFormatConfig dataGeneratorFormatConfig = new DataGeneratorFormatConfig();
  FlumeTarget flumeTarget = FlumeTestUtil.createFlumeTarget(
    FlumeTestUtil.createDefaultFlumeConfig(port, false),
    DataFormat.SDC_JSON,
    dataGeneratorFormatConfig
  );

  TargetRunner targetRunner = new TargetRunner.Builder(FlumeDTarget.class, flumeTarget).build();

  targetRunner.runInit();
  List<Record> logRecords = FlumeTestUtil.createJsonRecords();
  targetRunner.runWrite(logRecords);
  targetRunner.runDestroy();

  ContextExtensions ctx = (ContextExtensions) ContextInfoCreator.createTargetContext("", false, OnRecordError.TO_ERROR);
  for(Record r : logRecords) {
    Transaction transaction = ch.getTransaction();
    transaction.begin();
    Event event = ch.take();
    Assert.assertNotNull(event);
    ByteArrayInputStream bais = new ByteArrayInputStream(event.getBody());
    RecordReader rr = ctx.createRecordReader(bais, 0, Integer.MAX_VALUE);
    Assert.assertEquals(r, rr.readRecord());
    Assert.assertTrue(event.getHeaders().containsKey("charset"));
    Assert.assertEquals("UTF-8", event.getHeaders().get("charset"));
    rr.close();
    transaction.commit();
    transaction.close();
  }
}
 
Example 10
Source File: FlumeAppenderTest.java    From logging-log4j2 with Apache License 2.0 5 votes vote down vote up
private String getBody(final Event event) throws IOException {
    if (event == null) {
        return "";
    }
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final InputStream is = new GZIPInputStream(new ByteArrayInputStream(
            event.getBody()));
    int n = 0;
    while (-1 != (n = is.read())) {
        baos.write(n);
    }
    return new String(baos.toByteArray());

}
 
Example 11
Source File: TestSequenceGeneratorSource.java    From mt-flume with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchProcessWithLifeCycle() throws InterruptedException, LifecycleException,
    EventDeliveryException {

  int batchSize = 10;

  Channel channel = new PseudoTxnMemoryChannel();
  Context context = new Context();

  context.put("logicalNode.name", "test");
  context.put("batchSize", Integer.toString(batchSize));

  Configurables.configure(source, context);
  Configurables.configure(channel, context);

  List<Channel> channels = new ArrayList<Channel>();
  channels.add(channel);

  ChannelSelector rcs = new ReplicatingChannelSelector();
  rcs.setChannels(channels);

  source.setChannelProcessor(new ChannelProcessor(rcs));

  source.start();

  for (long i = 0; i < 100; i++) {
    source.process();

    for (long j = batchSize; j > 0; j--) {
      Event event = channel.take();
      String expectedVal = String.valueOf(((i+1)*batchSize)-j);
      String resultedVal = new String(event.getBody());
      Assert.assertTrue("Expected " + expectedVal + " is not equals to " +
          resultedVal, expectedVal.equals(resultedVal));
    }
  }

  source.stop();
}
 
Example 12
Source File: MockHDFSWriter.java    From mt-flume with Apache License 2.0 4 votes vote down vote up
public void append(Event e) throws IOException {
  eventsWritten++;
  bytesWritten += e.getBody().length;
}
 
Example 13
Source File: TestMultiportSyslogTCPSource.java    From mt-flume with Apache License 2.0 4 votes vote down vote up
/**
 * Test the reassembly of a single line across multiple packets.
 */
@Test
public void testFragmented() throws CharacterCodingException {
  final int maxLen = 100;

  IoBuffer savedBuf = IoBuffer.allocate(maxLen);

  String origMsg = "<1>- - blah blam foo\n";
  IoBuffer buf1 = IoBuffer.wrap(
      origMsg.substring(0, 11).getBytes(Charsets.UTF_8));
  IoBuffer buf2 = IoBuffer.wrap(
      origMsg.substring(11, 16).getBytes(Charsets.UTF_8));
  IoBuffer buf3 = IoBuffer.wrap(
      origMsg.substring(16, 21).getBytes(Charsets.UTF_8));

  LineSplitter lineSplitter = new LineSplitter(maxLen);
  ParsedBuffer parsedLine = new ParsedBuffer();

  Assert.assertFalse("Incomplete line should not be parsed",
      lineSplitter.parseLine(buf1, savedBuf, parsedLine));
  Assert.assertFalse("Incomplete line should not be parsed",
      lineSplitter.parseLine(buf2, savedBuf, parsedLine));
  Assert.assertTrue("Completed line should be parsed",
      lineSplitter.parseLine(buf3, savedBuf, parsedLine));

  // the fragmented message should now be reconstructed
  Assert.assertEquals(origMsg.trim(),
      parsedLine.buffer.getString(Charsets.UTF_8.newDecoder()));
  parsedLine.buffer.rewind();

  MultiportSyslogTCPSource.MultiportSyslogHandler handler =
      new MultiportSyslogTCPSource.MultiportSyslogHandler(maxLen, 100, null,
      null, SyslogSourceConfigurationConstants.DEFAULT_PORT_HEADER,
      new ThreadSafeDecoder(Charsets.UTF_8),
      new ConcurrentHashMap<Integer, ThreadSafeDecoder>());

  Event event = handler.parseEvent(parsedLine, Charsets.UTF_8.newDecoder());
  String body = new String(event.getBody(), Charsets.UTF_8);
  Assert.assertEquals("Event body incorrect",
      origMsg.trim().substring(7), body);
}
 
Example 14
Source File: FlumeEventStringBodyAvroEventSerializer.java    From avro-flume-hive-example with Apache License 2.0 4 votes vote down vote up
@Override
protected Container convert(Event event) {
  return new Container(event.getHeaders(), new String(event.getBody(), Charsets.UTF_8));
}
 
Example 15
Source File: TestLog4jAppender.java    From kite with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayout() throws IOException {
  props.put("log4j.appender.out2.layout", "org.apache.log4j.PatternLayout");
  props.put("log4j.appender.out2.layout.ConversionPattern",
      "%-5p [%t]: %m%n");
  PropertyConfigurator.configure(props);
  Logger logger = LogManager.getLogger(TestLog4jAppender.class);
  Thread.currentThread().setName("Log4jAppenderTest");
  for(int count = 0; count <= 100; count++){
    /*
     * Log4j internally defines levels as multiples of 10000. So if we
     * create levels directly using count, the level will be set as the
     * default.
     */
    int level = ((count % 5)+1)*10000;
    String msg = "This is log message number" + String.valueOf(count);

    logger.log(Level.toLevel(level), msg);
    Transaction transaction = ch.getTransaction();
    transaction.begin();
    Event event = ch.take();
    Assert.assertNotNull(event);
    StringBuilder builder = new StringBuilder();
    builder.append("[").append("Log4jAppenderTest").append("]: ")
        .append(msg);
    //INFO seems to insert an extra space, so lets split the string.
    String eventBody = new String(event.getBody(), "UTF-8");
    String eventLevel = eventBody.split("\\s+")[0];
    Assert.assertEquals(Level.toLevel(level).toString(), eventLevel);
    Assert.assertEquals(
        new String(event.getBody(), "UTF8").trim()
            .substring(eventLevel.length()).trim(), builder.toString());

    Map<String, String> hdrs = event.getHeaders();

    Assert.assertNotNull(hdrs.get(Log4jAvroHeaders.TIMESTAMP.toString()));

    Assert.assertEquals(Level.toLevel(level),
        Level.toLevel(Integer.parseInt(hdrs.get(Log4jAvroHeaders.LOG_LEVEL
            .toString()))));

    Assert.assertEquals(logger.getName(),
        hdrs.get(Log4jAvroHeaders.LOGGER_NAME.toString()));

    Assert.assertEquals("UTF8",
        hdrs.get(Log4jAvroHeaders.MESSAGE_ENCODING.toString()));
    transaction.commit();
    transaction.close();
  }


}
 
Example 16
Source File: TestFlumeFailoverTarget.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteAvroRecordsSingleEvent() throws InterruptedException, StageException, IOException {

  DataGeneratorFormatConfig dataGeneratorFormatConfig = new DataGeneratorFormatConfig();
  dataGeneratorFormatConfig.avroSchema = SdcAvroTestUtil.AVRO_SCHEMA1;
  dataGeneratorFormatConfig.avroSchemaSource = INLINE;
  dataGeneratorFormatConfig.includeSchema = true;
  dataGeneratorFormatConfig.avroCompression = AvroCompression.NULL;
  FlumeTarget flumeTarget = FlumeTestUtil.createFlumeTarget(
    FlumeTestUtil.createDefaultFlumeConfig(port, true),
    DataFormat.AVRO,
    dataGeneratorFormatConfig
  );
  TargetRunner targetRunner = new TargetRunner.Builder(FlumeDTarget.class, flumeTarget).build();

  targetRunner.runInit();
  List<Record> records = SdcAvroTestUtil.getRecords1();
  targetRunner.runWrite(records);
  targetRunner.runDestroy();

  List<GenericRecord> genericRecords = new ArrayList<>();
  DatumReader<GenericRecord> datumReader = new GenericDatumReader<>(); //Reader schema argument is optional

  int eventCounter = 0;
  Transaction transaction = ch.getTransaction();
  transaction.begin();
  Event event = ch.take();
  while(event != null) {
    eventCounter++;
    DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(
      new SeekableByteArrayInput(event.getBody()), datumReader);
    while(dataFileReader.hasNext()) {
      genericRecords.add(dataFileReader.next());
    }
    event = ch.take();
  }
  transaction.commit();
  transaction.close();

  Assert.assertEquals(1, eventCounter);
  Assert.assertEquals(3, genericRecords.size());
  SdcAvroTestUtil.compare1(genericRecords);
}
 
Example 17
Source File: ElasticSearchLogStashEventSerializer.java    From mt-flume with Apache License 2.0 4 votes vote down vote up
private void appendBody(XContentBuilder builder, Event event)
    throws IOException, UnsupportedEncodingException {
  byte[] body = event.getBody();
  ContentBuilderUtil.appendField(builder, "@message", body);
}
 
Example 18
Source File: TestSpoolingFileLineReader.java    From mt-flume with Apache License 2.0 4 votes vote down vote up
static String bodyAsString(Event event) {
  return new String(event.getBody());
}
 
Example 19
Source File: ElasticSearchSerializerWithMapping.java    From ingestion with Apache License 2.0 4 votes vote down vote up
private void appendBody(XContentBuilder builder, Event event)
		throws IOException, UnsupportedEncodingException {
	byte[] body = event.getBody();
	ContentBuilderUtil.appendField(builder, "@message", body);
}
 
Example 20
Source File: TestLog4jAppender.java    From mt-flume with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayout() throws IOException {
  configureSource();
  props.put("log4j.appender.out2.layout", "org.apache.log4j.PatternLayout");
  props.put("log4j.appender.out2.layout.ConversionPattern",
      "%-5p [%t]: %m%n");
  PropertyConfigurator.configure(props);
  Logger logger = LogManager.getLogger(TestLog4jAppender.class);
  Thread.currentThread().setName("Log4jAppenderTest");
  for(int count = 0; count <= 100; count++){
    /*
     * Log4j internally defines levels as multiples of 10000. So if we
     * create levels directly using count, the level will be set as the
     * default.
     */
    int level = ((count % 5)+1)*10000;
    String msg = "This is log message number" + String.valueOf(count);

    logger.log(Level.toLevel(level), msg);
    Transaction transaction = ch.getTransaction();
    transaction.begin();
    Event event = ch.take();
    Assert.assertNotNull(event);
    StringBuilder builder = new StringBuilder();
    builder.append("[").append("Log4jAppenderTest").append("]: ")
        .append(msg);
    //INFO seems to insert an extra space, so lets split the string.
    String eventBody = new String(event.getBody(), "UTF-8");
    String eventLevel = eventBody.split("\\s+")[0];
    Assert.assertEquals(Level.toLevel(level).toString(), eventLevel);
    Assert.assertEquals(
        new String(event.getBody(), "UTF8").trim()
            .substring(eventLevel.length()).trim(), builder.toString());

    Map<String, String> hdrs = event.getHeaders();

    Assert.assertNotNull(hdrs.get(Log4jAvroHeaders.TIMESTAMP.toString()));

    Assert.assertEquals(Level.toLevel(level),
        Level.toLevel(Integer.parseInt(hdrs.get(Log4jAvroHeaders.LOG_LEVEL
            .toString()))));

    Assert.assertEquals(logger.getName(),
        hdrs.get(Log4jAvroHeaders.LOGGER_NAME.toString()));

    Assert.assertEquals("UTF8",
        hdrs.get(Log4jAvroHeaders.MESSAGE_ENCODING.toString()));
    transaction.commit();
    transaction.close();
  }


}