Java Code Examples for org.apache.flume.Context

The following are top voted examples for showing how to use org.apache.flume.Context. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: flume-release-1.7.0   File: TestLog4jAppender.java   Source Code and License 7 votes vote down vote up
@Test(expected = EventDeliveryException.class)
public void testSlowness() throws Throwable {
  ch = new SlowMemoryChannel(2000);
  Configurables.configure(ch, new Context());
  configureSource();
  props.put("log4j.appender.out2.Timeout", "1000");
  props.put("log4j.appender.out2.layout", "org.apache.log4j.PatternLayout");
  props.put("log4j.appender.out2.layout.ConversionPattern",
      "%-5p [%t]: %m%n");
  PropertyConfigurator.configure(props);
  Logger logger = LogManager.getLogger(TestLog4jAppender.class);
  Thread.currentThread().setName("Log4jAppenderTest");
  int level = 10000;
  String msg = "This is log message number" + String.valueOf(1);
  try {
    logger.log(Level.toLevel(level), msg);
  } catch (FlumeException ex) {
    throw ex.getCause();
  }
}
 
Example 2
Project: flume-release-1.7.0   File: SinkGroupConfiguration.java   Source Code and License 7 votes vote down vote up
@Override
public void configure(Context context) throws ConfigurationException {
  super.configure(context);
  sinks = Arrays.asList(context.getString(
      BasicConfigurationConstants.CONFIG_SINKS).split("\\s+"));
  Map<String, String> params = context.getSubProperties(
      BasicConfigurationConstants.CONFIG_SINK_PROCESSOR_PREFIX);
  processorContext = new Context();
  processorContext.putAll(params);
  SinkProcessorType spType = getKnownSinkProcessor(processorContext.getString(
          BasicConfigurationConstants.CONFIG_TYPE));

  if (spType != null) {
    processorConf =
        (SinkProcessorConfiguration) ComponentConfigurationFactory.create(
            this.getComponentName() + "-processor",
            spType.toString(),
            ComponentType.SINK_PROCESSOR);
    if (processorConf != null) {
      processorConf.setSinks(new HashSet<String>(sinks));
      processorConf.configure(processorContext);
    }
  }
  setConfigured();
}
 
Example 3
Project: flume-release-1.7.0   File: TestHostInterceptor.java   Source Code and License 6 votes vote down vote up
/**
 * Ensure host is NOT overwritten when preserveExisting=true.
 */
@Test
public void testPreserve() throws Exception {
  Context ctx = new Context();
  ctx.put("preserveExisting", "true");

  Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
          InterceptorType.HOST.toString());
  builder.configure(ctx);
  Interceptor interceptor = builder.build();

  final String ORIGINAL_HOST = "originalhost";
  Event eventBeforeIntercept = EventBuilder.withBody("test event",
          Charsets.UTF_8);
  eventBeforeIntercept.getHeaders().put(Constants.HOST, ORIGINAL_HOST);
  Assert.assertEquals(ORIGINAL_HOST,
          eventBeforeIntercept.getHeaders().get(Constants.HOST));

  String expectedHost = ORIGINAL_HOST;
  Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
  String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);

  Assert.assertNotNull(actualHost);
  Assert.assertEquals(expectedHost, actualHost);
}
 
Example 4
Project: flume-release-1.7.0   File: TestNetcatSource.java   Source Code and License 6 votes vote down vote up
@Before
public void setUp() {
  logger.info("Running setup");

  channel = new MemoryChannel();
  source = new NetcatSource();

  Context context = new Context();

  Configurables.configure(channel, context);
  List<Channel> channels = Lists.newArrayList(channel);
  ChannelSelector rcs = new ReplicatingChannelSelector();
  rcs.setChannels(channels);

  source.setChannelProcessor(new ChannelProcessor(rcs));
}
 
Example 5
Project: flume-release-1.7.0   File: TestKafkaSink.java   Source Code and License 6 votes vote down vote up
@Test
public void testEmptyChannel() throws UnsupportedEncodingException, EventDeliveryException {
  Sink kafkaSink = new KafkaSink();
  Context context = prepareDefaultContext();
  Configurables.configure(kafkaSink, context);
  Channel memoryChannel = new MemoryChannel();
  Configurables.configure(memoryChannel, context);
  kafkaSink.setChannel(memoryChannel);
  kafkaSink.start();

  Sink.Status status = kafkaSink.process();
  if (status != Sink.Status.BACKOFF) {
    fail("Error Occurred");
  }
  assertNull(testUtil.getNextMessageFromConsumer(DEFAULT_TOPIC));
}
 
Example 6
Project: flume-release-1.7.0   File: TestAvroSink.java   Source Code and License 6 votes vote down vote up
public void setUp(String compressionType, int compressionLevel) {
  if (sink != null) {
    throw new RuntimeException("double setup");
  }
  sink = new AvroSink();
  channel = new MemoryChannel();

  Context context = new Context();

  context.put("hostname", hostname);
  context.put("port", String.valueOf(port));
  context.put("batch-size", String.valueOf(2));
  context.put("connect-timeout", String.valueOf(2000L));
  context.put("request-timeout", String.valueOf(3000L));
  if (compressionType.equals("deflate")) {
    context.put("compression-type", compressionType);
    context.put("compression-level", Integer.toString(compressionLevel));
  }

  sink.setChannel(channel);

  Configurables.configure(sink, context);
  Configurables.configure(channel, context);
}
 
Example 7
Project: flume-release-1.7.0   File: TestTaildirSource.java   Source Code and License 6 votes vote down vote up
@Test
public void testPutFilenameHeader() throws IOException {
  File f1 = new File(tmpDir, "file1");
  Files.write("f1\n", f1, Charsets.UTF_8);

  Context context = new Context();
  context.put(POSITION_FILE, posFilePath);
  context.put(FILE_GROUPS, "fg");
  context.put(FILE_GROUPS_PREFIX + "fg", tmpDir.getAbsolutePath() + "/file.*");
  context.put(FILENAME_HEADER, "true");
  context.put(FILENAME_HEADER_KEY, "path");

  Configurables.configure(source, context);
  source.start();
  source.process();
  Transaction txn = channel.getTransaction();
  txn.begin();
  Event e = channel.take();
  txn.commit();
  txn.close();

  assertNotNull(e.getHeaders().get("path"));
  assertEquals(f1.getAbsolutePath(),
          e.getHeaders().get("path"));
}
 
Example 8
Project: Transwarp-Sample-Code   File: AbstractHDFSWriter.java   Source Code and License 6 votes vote down vote up
@Override
public void configure(Context context) {
  configuredMinReplicas = context.getInteger("hdfs.minBlockReplicas");
  if (configuredMinReplicas != null) {
    Preconditions.checkArgument(configuredMinReplicas >= 0,
        "hdfs.minBlockReplicas must be greater than or equal to 0");
  }
  numberOfCloseRetries = context.getInteger("hdfs.closeTries", 1) - 1;

  if (numberOfCloseRetries > 1) {
    try {
      timeBetweenCloseRetries = context.getLong("hdfs.callTimeout", 10000l);
    } catch (NumberFormatException e) {
      logger.warn("hdfs.callTimeout can not be parsed to a long: " + context.getLong("hdfs.callTimeout"));
    }
    timeBetweenCloseRetries = Math.max(timeBetweenCloseRetries/numberOfCloseRetries, 1000);
  }

}
 
Example 9
Project: flume-release-1.7.0   File: TestSyslogUdpSource.java   Source Code and License 6 votes vote down vote up
private void init(String keepFields) {
  source = new SyslogUDPSource();
  channel = new MemoryChannel();

  Configurables.configure(channel, new Context());

  List<Channel> channels = new ArrayList<Channel>();
  channels.add(channel);

  ChannelSelector rcs = new ReplicatingChannelSelector();
  rcs.setChannels(channels);

  source.setChannelProcessor(new ChannelProcessor(rcs));
  Context context = new Context();
  context.put("host", InetAddress.getLoopbackAddress().getHostAddress());
  context.put("port", String.valueOf(TEST_SYSLOG_PORT));
  context.put("keepFields", keepFields);

  source.configure(context);

}
 
Example 10
Project: flume-release-1.7.0   File: TestKafkaSource.java   Source Code and License 6 votes vote down vote up
@Test
public void testKafkaProperties() {
  Context context = new Context();
  context.put(TOPICS, "test1, test2");
  context.put(KAFKA_CONSUMER_PREFIX + ConsumerConfig.GROUP_ID_CONFIG,
              "override.default.group.id");
  context.put(KAFKA_CONSUMER_PREFIX + "fake.property", "kafka.property.value");
  context.put(BOOTSTRAP_SERVERS, "real-bootstrap-servers-list");
  context.put(KAFKA_CONSUMER_PREFIX + "bootstrap.servers", "bad-bootstrap-servers-list");
  KafkaSource source = new KafkaSource();
  source.doConfigure(context);
  Properties kafkaProps = source.getConsumerProps();

  //check that we have defaults set
  assertEquals(String.valueOf(DEFAULT_AUTO_COMMIT),
               kafkaProps.getProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG));
  //check that kafka properties override the default and get correct name
  assertEquals("override.default.group.id",
               kafkaProps.getProperty(ConsumerConfig.GROUP_ID_CONFIG));
  //check that any kafka property gets in
  assertEquals("kafka.property.value",
               kafkaProps.getProperty("fake.property"));
  //check that documented property overrides defaults
  assertEquals("real-bootstrap-servers-list",
               kafkaProps.getProperty("bootstrap.servers"));
}
 
Example 11
Project: flume-release-1.7.0   File: TestMorphlineInterceptor.java   Source Code and License 6 votes vote down vote up
@Test
/** morphline says route to southpole if it's an avro file, otherwise route to northpole */
public void testIfDetectMimeTypeRouteToSouthPole() throws Exception {
  Context context = new Context();
  context.put(MorphlineHandlerImpl.MORPHLINE_FILE_PARAM,
              RESOURCES_DIR + "/test-morphlines/ifDetectMimeType.conf");
  context.put(MorphlineHandlerImpl.MORPHLINE_VARIABLE_PARAM + ".MY.MIME_TYPE", "avro/binary");

  Event input = EventBuilder.withBody(Files.toByteArray(
      new File(RESOURCES_DIR + "/test-documents/sample-statuses-20120906-141433.avro")));
  Event actual = build(context).intercept(input);

  Map<String, String> expected = new HashMap();
  expected.put(Fields.ATTACHMENT_MIME_TYPE, "avro/binary");
  expected.put("flume.selector.header", "goToSouthPole");
  Event expectedEvent = EventBuilder.withBody(input.getBody(), expected);
  assertEqualsEvent(expectedEvent, actual);
}
 
Example 12
Project: flume-release-1.7.0   File: TestHTTPMetricsServer.java   Source Code and License 6 votes vote down vote up
public void doTestForbiddenMethods(int port, String method) throws Exception {
  MonitorService srv = new HTTPMetricsServer();
  Context context = new Context();
  if (port > 1024) {
    context.put(HTTPMetricsServer.CONFIG_PORT, String.valueOf(port));
  } else {
    port = HTTPMetricsServer.DEFAULT_PORT;
  }
  srv.configure(context);
  srv.start();
  Thread.sleep(1000);
  URL url = new URL("http://0.0.0.0:" + String.valueOf(port) + "/metrics");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  conn.setRequestMethod(method);
  Assert.assertEquals(HttpServletResponse.SC_FORBIDDEN, conn.getResponseCode());
  srv.stop();
}
 
Example 13
Project: flume-release-1.7.0   File: TestFlumeEventAvroEventSerializer.java   Source Code and License 6 votes vote down vote up
public void createAvroFile(File file, String codec) throws FileNotFoundException, IOException {

    if (file.exists()) {
      FileUtils.forceDelete(file);
    }

    // serialize a few events using the reflection-based avro serializer
    OutputStream out = new FileOutputStream(file);

    Context ctx = new Context();
    if (codec != null) {
      ctx.put("compressionCodec", codec);
    }

    EventSerializer.Builder builder =
        new FlumeEventAvroEventSerializer.Builder();
    EventSerializer serializer = builder.build(ctx, out);

    serializer.afterCreate();
    serializer.write(EventBuilder.withBody("yo man!", Charsets.UTF_8));
    serializer.write(EventBuilder.withBody("2nd event!", Charsets.UTF_8));
    serializer.write(EventBuilder.withBody("last one!", Charsets.UTF_8));
    serializer.flush();
    serializer.beforeClose();
    out.flush();
    out.close();
  }
 
Example 14
Project: flume-release-1.7.0   File: TestAbstractPollableSource.java   Source Code and License 6 votes vote down vote up
@Before
public void setUp() {
  source = spy(new AbstractPollableSource() {
    @Override
    protected Status doProcess() throws EventDeliveryException {
      return Status.BACKOFF;
    }
    @Override
    protected void doConfigure(Context context) throws FlumeException {
      throw new FlumeException("dummy");
    }
    @Override
    protected void doStart() throws FlumeException {

    }
    @Override
    protected void doStop() throws FlumeException {

    }
  });
}
 
Example 15
Project: flume-release-1.7.0   File: TestNetcatSource.java   Source Code and License 6 votes vote down vote up
/**
 * We set up the the Netcat source and Flume Memory Channel on localhost
 *
 * @throws UnknownHostException
 */
@Before
public void setUp() throws UnknownHostException {
  localhost = InetAddress.getByName("127.0.0.1");
  source = new NetcatSource();
  channel = new MemoryChannel();

  Configurables.configure(channel, new Context());

  List<Channel> channels = new ArrayList<Channel>();
  channels.add(channel);

  ChannelSelector rcs = new ReplicatingChannelSelector();
  rcs.setChannels(channels);

  source.setChannelProcessor(new ChannelProcessor(rcs));
}
 
Example 16
Project: flume-release-1.7.0   File: TestMemoryChannel.java   Source Code and License 6 votes vote down vote up
@Test(expected = ChannelException.class)
public void testCapacityOverload() {
  Context context = new Context();
  Map<String, String> parms = new HashMap<String, String>();
  parms.put("capacity", "5");
  parms.put("transactionCapacity", "3");
  context.putAll(parms);
  Configurables.configure(channel,  context);

  Transaction transaction = channel.getTransaction();
  transaction.begin();
  channel.put(EventBuilder.withBody("test".getBytes()));
  channel.put(EventBuilder.withBody("test".getBytes()));
  channel.put(EventBuilder.withBody("test".getBytes()));
  transaction.commit();
  transaction.close();

  transaction = channel.getTransaction();
  transaction.begin();
  channel.put(EventBuilder.withBody("test".getBytes()));
  channel.put(EventBuilder.withBody("test".getBytes()));
  channel.put(EventBuilder.withBody("test".getBytes()));
  // this should kill  it
  transaction.commit();
  Assert.fail();
}
 
Example 17
Project: flume-release-1.7.0   File: TestTaildirSource.java   Source Code and License 6 votes vote down vote up
@Test
public void testLifecycle() throws IOException, InterruptedException {
  File f1 = new File(tmpDir, "file1");
  Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);

  Context context = new Context();
  context.put(POSITION_FILE, posFilePath);
  context.put(FILE_GROUPS, "f1");
  context.put(FILE_GROUPS_PREFIX + "f1", tmpDir.getAbsolutePath() + "/file1$");
  Configurables.configure(source, context);

  for (int i = 0; i < 3; i++) {
    source.start();
    source.process();
    assertTrue("Reached start or error", LifecycleController.waitForOneOf(
        source, LifecycleState.START_OR_ERROR));
    assertEquals("Server is started", LifecycleState.START,
        source.getLifecycleState());

    source.stop();
    assertTrue("Reached stop or error",
        LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
    assertEquals("Server is stopped", LifecycleState.STOP,
        source.getLifecycleState());
  }
}
 
Example 18
Project: flume-release-1.7.0   File: TestMorphlineInterceptor.java   Source Code and License 6 votes vote down vote up
@Test
/** morphline says route to southpole if it's an avro file, otherwise route to northpole */
public void testIfDetectMimeTypeRouteToNorthPole() throws Exception {
  Context context = new Context();
  context.put(MorphlineHandlerImpl.MORPHLINE_FILE_PARAM,
              RESOURCES_DIR + "/test-morphlines/ifDetectMimeType.conf");
  context.put(MorphlineHandlerImpl.MORPHLINE_VARIABLE_PARAM + ".MY.MIME_TYPE", "avro/binary");

  Event input = EventBuilder.withBody(
      Files.toByteArray(new File(RESOURCES_DIR + "/test-documents/testPDF.pdf")));
  Event actual = build(context).intercept(input);

  Map<String, String> expected = new HashMap();
  expected.put(Fields.ATTACHMENT_MIME_TYPE, "application/pdf");
  expected.put("flume.selector.header", "goToNorthPole");
  Event expectedEvent = EventBuilder.withBody(input.getBody(), expected);
  assertEqualsEvent(expectedEvent, actual);
}
 
Example 19
Project: flume-release-1.7.0   File: TestMemoryChannel.java   Source Code and License 6 votes vote down vote up
@Test
public void testNegativeCapacities() {
  Context context = new Context();
  Map<String, String> parms = new HashMap<String, String>();
  parms.put("capacity", "-3");
  parms.put("transactionCapacity", "-1");
  context.putAll(parms);
  Configurables.configure(channel, context);

  Assert.assertTrue(field("queue")
          .ofType(LinkedBlockingDeque.class)
          .in(channel).get()
          .remainingCapacity() > 0);

  Assert.assertTrue(field("transCapacity")
          .ofType(Integer.class)
          .in(channel).get() > 0);
}
 
Example 20
Project: flume-release-1.7.0   File: TestReplicatingChannelSelector.java   Source Code and License 6 votes vote down vote up
@Test
public void testOptionalChannels() throws Exception {
  Context context = new Context();
  context.put(ReplicatingChannelSelector.CONFIG_OPTIONAL, "ch1");
  Configurables.configure(selector, context);
  List<Channel> channels = selector.getRequiredChannels(new MockEvent());
  Assert.assertNotNull(channels);
  Assert.assertEquals(3, channels.size());
  Assert.assertEquals("ch2", channels.get(0).getName());
  Assert.assertEquals("ch3", channels.get(1).getName());
  Assert.assertEquals("ch4", channels.get(2).getName());

  List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
  Assert.assertEquals(1, optCh.size());
  Assert.assertEquals("ch1", optCh.get(0).getName());

}
 
Example 21
Project: flume-release-1.7.0   File: TestTaildirSource.java   Source Code and License 6 votes vote down vote up
@Before
public void setUp() {
  source = new TaildirSource();
  channel = new MemoryChannel();

  Configurables.configure(channel, new Context());

  List<Channel> channels = new ArrayList<Channel>();
  channels.add(channel);

  ChannelSelector rcs = new ReplicatingChannelSelector();
  rcs.setChannels(channels);

  source.setChannelProcessor(new ChannelProcessor(rcs));
  tmpDir = Files.createTempDir();
  posFilePath = tmpDir.getAbsolutePath() + "/taildir_position_test.json";
}
 
Example 22
Project: flume-release-1.7.0   File: TestStaticInterceptor.java   Source Code and License 6 votes vote down vote up
@Test
public void testCustomKeyValue() throws ClassNotFoundException,
    InstantiationException, IllegalAccessException {
  Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
      InterceptorType.STATIC.toString());
  Context ctx = new Context();
  ctx.put(Constants.KEY, "myKey");
  ctx.put(Constants.VALUE, "myVal");

  builder.configure(ctx);
  Interceptor interceptor = builder.build();

  Event event = EventBuilder.withBody("test", Charsets.UTF_8);
  Assert.assertNull(event.getHeaders().get("myKey"));

  event = interceptor.intercept(event);
  String val = event.getHeaders().get("myKey");

  Assert.assertNotNull(val);
  Assert.assertEquals("myVal", val);
}
 
Example 23
Project: flume-release-1.7.0   File: TestStressSource.java   Source Code and License 5 votes vote down vote up
@Test
public void testMaxTotalEvents() throws InterruptedException,
    EventDeliveryException {
  StressSource source = new StressSource();
  source.setChannelProcessor(mockProcessor);
  Context context = new Context();
  context.put("maxTotalEvents", "35");
  source.configure(context);
  source.start();

  for (int i = 0; i < 50; i++) {
    source.process();
  }
  verify(mockProcessor, times(35)).processEvent(getEvent(source));
}
 
Example 24
Project: flume-release-1.7.0   File: TaildirSource.java   Source Code and License 5 votes vote down vote up
private Table<String, String, String> getTable(Context context, String prefix) {
  Table<String, String, String> table = HashBasedTable.create();
  for (Entry<String, String> e : context.getSubProperties(prefix).entrySet()) {
    String[] parts = e.getKey().split("\\.", 2);
    table.put(parts[0], parts[1], e.getValue());
  }
  return table;
}
 
Example 25
Project: flume-release-1.7.0   File: BucketWriter.java   Source Code and License 5 votes vote down vote up
BucketWriter(long rollInterval, long rollSize, long rollCount, long batchSize,
    Context context, String filePath, String fileName, String inUsePrefix,
    String inUseSuffix, String fileSuffix, CompressionCodec codeC,
    CompressionType compType, HDFSWriter writer,
    ScheduledExecutorService timedRollerPool, PrivilegedExecutor proxyUser,
    SinkCounter sinkCounter, int idleTimeout, WriterCallback onCloseCallback,
    String onCloseCallbackPath, long callTimeout,
    ExecutorService callTimeoutPool, long retryInterval,
    int maxCloseTries) {
  this.rollInterval = rollInterval;
  this.rollSize = rollSize;
  this.rollCount = rollCount;
  this.batchSize = batchSize;
  this.filePath = filePath;
  this.fileName = fileName;
  this.inUsePrefix = inUsePrefix;
  this.inUseSuffix = inUseSuffix;
  this.fileSuffix = fileSuffix;
  this.codeC = codeC;
  this.compType = compType;
  this.writer = writer;
  this.timedRollerPool = timedRollerPool;
  this.proxyUser = proxyUser;
  this.sinkCounter = sinkCounter;
  this.idleTimeout = idleTimeout;
  this.onCloseCallback = onCloseCallback;
  this.onCloseCallbackPath = onCloseCallbackPath;
  this.callTimeout = callTimeout;
  this.callTimeoutPool = callTimeoutPool;
  fileExtensionCounter = new AtomicLong(clock.currentTimeMillis());

  this.retryInterval = retryInterval;
  this.maxRenameTries = maxCloseTries;
  isOpen = false;
  isUnderReplicated = false;
  this.writer.configure(context);
}
 
Example 26
Project: flume-release-1.7.0   File: TestSpillableMemoryChannel.java   Source Code and License 5 votes vote down vote up
private void configureChannel(Map<String, String> overrides) {
  Context context = new Context();
  File checkPointDir = fileChannelDir.newFolder("checkpoint");
  File dataDir = fileChannelDir.newFolder("data");
  context.put(FileChannelConfiguration.CHECKPOINT_DIR, checkPointDir.getAbsolutePath());
  context.put(FileChannelConfiguration.DATA_DIRS, dataDir.getAbsolutePath());
  // Set checkpoint for 5 seconds otherwise test will run out of memory
  context.put(FileChannelConfiguration.CHECKPOINT_INTERVAL, "5000");

  if (overrides != null) {
    context.putAll(overrides);
  }

  Configurables.configure(channel, context);
}
 
Example 27
Project: flume-release-1.7.0   File: ChannelSelectorFactory.java   Source Code and License 5 votes vote down vote up
public static ChannelSelector create(List<Channel> channels,
    Map<String, String> config) {

  ChannelSelector selector = getSelectorForType(config.get(
      BasicConfigurationConstants.CONFIG_TYPE));

  selector.setChannels(channels);

  Context context = new Context();
  context.putAll(config);

  Configurables.configure(selector, context);
  return selector;
}
 
Example 28
Project: flume-release-1.7.0   File: TestKafkaSink.java   Source Code and License 5 votes vote down vote up
private Context prepareDefaultContext() {
  // Prepares a default context with Kafka Server Properties
  Context context = new Context();
  context.put(BOOTSTRAP_SERVERS_CONFIG, testUtil.getKafkaServerUrl());
  context.put(BATCH_SIZE, "1");
  return context;
}
 
Example 29
Project: flume-release-1.7.0   File: TestHDFSEventSink.java   Source Code and License 5 votes vote down vote up
@Test
public void testBadConfigurationForRetryCountZero() throws Exception {
  Context context = getContextForRetryTests();
  context.put("hdfs.closeTries" ,"0");

  Configurables.configure(sink, context);
  Assert.assertEquals(Integer.MAX_VALUE, sink.getTryCount());
}
 
Example 30
Project: flume-release-1.7.0   File: TestLoadBalancingSinkProcessor.java   Source Code and License 5 votes vote down vote up
private Context getContext(String selectorType) {
  Map<String, String> p = new HashMap<String, String>();
  p.put("selector", selectorType);
  Context ctx = new Context(p);

  return ctx;
}
 
Example 31
Project: flume-release-1.7.0   File: TestBLOBHandler.java   Source Code and License 5 votes vote down vote up
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testCSVData() throws Exception {
  Map requestParameterMap = new HashMap();
  requestParameterMap.put("param1", new String[] { "value1" });
  requestParameterMap.put("param2", new String[] { "value2" });

  HttpServletRequest req = mock(HttpServletRequest.class);
  final String csvData = "a,b,c";

  ServletInputStream servletInputStream = new DelegatingServletInputStream(
      new ByteArrayInputStream(csvData.getBytes()));

  when(req.getInputStream()).thenReturn(servletInputStream);
  when(req.getParameterMap()).thenReturn(requestParameterMap);

  Context context = mock(Context.class);
  when(
      context.getString(BLOBHandler.MANDATORY_PARAMETERS,
          BLOBHandler.DEFAULT_MANDATORY_PARAMETERS)).thenReturn(
      "param1,param2");

  handler.configure(context);
  List<Event> deserialized = handler.getEvents(req);
  assertEquals(1, deserialized.size());
  Event e = deserialized.get(0);

  assertEquals(new String(e.getBody()), csvData);
  assertEquals(e.getHeaders().get("param1"), "value1");
  assertEquals(e.getHeaders().get("param2"), "value2");
}
 
Example 32
Project: flume-release-1.7.0   File: TestSpoolingFileLineReader.java   Source Code and License 5 votes vote down vote up
private ReliableSpoolingFileEventReader getParser(int maxLineLength) {
  Context ctx = new Context();
  ctx.put(LineDeserializer.MAXLINE_KEY, Integer.toString(maxLineLength));
  ReliableSpoolingFileEventReader parser;
  try {
    parser = new ReliableSpoolingFileEventReader.Builder()
        .spoolDirectory(tmpDir)
        .completedSuffix(completedSuffix)
        .deserializerContext(ctx)
        .build();
  } catch (IOException ioe) {
    throw Throwables.propagate(ioe);
  }
  return parser;
}
 
Example 33
Project: flume-release-1.7.0   File: TestSpoolDirectorySource.java   Source Code and License 5 votes vote down vote up
@Test
public void testValidSortOrder() {
  Context context = new Context();
  context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
      tmpDir.getAbsolutePath());
  context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
      "oLdESt");
  Configurables.configure(source, context);
  context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
      "yoUnGest");
  Configurables.configure(source, context);
  context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
      "rAnDom");
  Configurables.configure(source, context);
}
 
Example 34
Project: flume-release-1.7.0   File: RegexFilteringInterceptor.java   Source Code and License 5 votes vote down vote up
@Override
public void configure(Context context) {
  String regexString = context.getString(REGEX, DEFAULT_REGEX);
  regex = Pattern.compile(regexString);
  excludeEvents = context.getBoolean(EXCLUDE_EVENTS,
      DEFAULT_EXCLUDE_EVENTS);
}
 
Example 35
Project: flume-release-1.7.0   File: TestLoadBalancingSinkProcessor.java   Source Code and License 5 votes vote down vote up
private Context getContext(String selectorType, boolean backoff) {
  Map<String, String> p = new HashMap<String, String>();
  p.put("selector", selectorType);
  p.put("backoff", String.valueOf(backoff));
  Context ctx = new Context(p);

  return ctx;
}
 
Example 36
Project: flume-release-1.7.0   File: TestThriftSink.java   Source Code and License 5 votes vote down vote up
@Test
public void testSslProcess() throws Exception {
  Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
  src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(), port,
          ThriftRpcClient.COMPACT_PROTOCOL, "src/test/resources/keystorefile.jks",
          "password", KeyManagerFactory.getDefaultAlgorithm(), "JKS");
  Context context = new Context();
  context.put("hostname", hostname);
  context.put("port", String.valueOf(port));
  context.put("ssl", String.valueOf(true));
  context.put("batch-size", String.valueOf(2));
  context.put("connect-timeout", String.valueOf(2000L));
  context.put("request-timeout", String.valueOf(3000L));
  context.put("truststore", "src/test/resources/truststorefile.jks");
  context.put("truststore-password", "password");
  context.put("trustmanager-type", TrustManagerFactory.getDefaultAlgorithm());

  Configurables.configure(sink, context);
  channel.start();
  sink.start();
  Transaction transaction = channel.getTransaction();
  transaction.begin();
  for (int i = 0; i < 11; i++) {
    channel.put(event);
  }
  transaction.commit();
  transaction.close();
  for (int i = 0; i < 6; i++) {
    Sink.Status status = sink.process();
    Assert.assertEquals(Sink.Status.READY, status);
  }
  Assert.assertEquals(Sink.Status.BACKOFF, sink.process());

  sink.stop();
  Assert.assertEquals(11, src.flumeEvents.size());
  Assert.assertEquals(6, src.batchCount);
  Assert.assertEquals(0, src.individualCount);
}
 
Example 37
Project: flume-release-1.7.0   File: SinkGroup.java   Source Code and License 5 votes vote down vote up
@Override
public void configure(Context context) {
  conf = new SinkGroupConfiguration("sinkgrp");
  try {
    conf.configure(context);
  } catch (ConfigurationException e) {
    throw new FlumeException("Invalid Configuration!", e);
  }
  configure(conf);

}
 
Example 38
Project: flume-release-1.7.0   File: TestLoadBalancingSinkProcessor.java   Source Code and License 5 votes vote down vote up
@Test
public void testCustomSelector() throws Exception {
  Channel ch = new MockChannel();
  int n = 10;
  int numEvents = n;
  for (int i = 0; i < numEvents; i++) {
    ch.put(new MockEvent("test" + i));
  }

  MockSink s1 = new MockSink(1);
  s1.setChannel(ch);

  // s1 always fails
  s1.setFail(true);

  MockSink s2 = new MockSink(2);
  s2.setChannel(ch);

  MockSink s3 = new MockSink(3);
  s3.setChannel(ch);

  List<Sink> sinks = new ArrayList<Sink>();
  sinks.add(s1);
  sinks.add(s2);
  sinks.add(s3);

  // This selector will result in all events going to s2
  Context ctx = getContext(FixedOrderSelector.class.getCanonicalName());
  ctx.put("selector." + FixedOrderSelector.SET_ME, "foo");
  LoadBalancingSinkProcessor lbsp = getProcessor(sinks, ctx);

  Sink.Status s = Sink.Status.READY;
  while (s != Sink.Status.BACKOFF) {
    s = lbsp.process();
  }

  Assert.assertTrue(s1.getEvents().size() == 0);
  Assert.assertTrue(s2.getEvents().size() == n);
  Assert.assertTrue(s3.getEvents().size() == 0);
}
 
Example 39
Project: flume-release-1.7.0   File: LoadBalancingSinkProcessor.java   Source Code and License 5 votes vote down vote up
@Override
public void configure(Context context) {
  Preconditions.checkState(getSinks().size() > 1,
      "The LoadBalancingSinkProcessor cannot be used for a single sink. "
      + "Please configure more than one sinks and try again.");

  String selectorTypeName = context.getString(CONFIG_SELECTOR,
      SELECTOR_NAME_ROUND_ROBIN);

  Boolean shouldBackOff = context.getBoolean(CONFIG_BACKOFF, false);

  selector = null;

  if (selectorTypeName.equalsIgnoreCase(SELECTOR_NAME_ROUND_ROBIN)) {
    selector = new RoundRobinSinkSelector(shouldBackOff);
  } else if (selectorTypeName.equalsIgnoreCase(SELECTOR_NAME_RANDOM)) {
    selector = new RandomOrderSinkSelector(shouldBackOff);
  } else {
    try {
      @SuppressWarnings("unchecked")
      Class<? extends SinkSelector> klass = (Class<? extends SinkSelector>)
          Class.forName(selectorTypeName);

      selector = klass.newInstance();
    } catch (Exception ex) {
      throw new FlumeException("Unable to instantiate sink selector: "
          + selectorTypeName, ex);
    }
  }

  selector.setSinks(getSinks());
  selector.configure(
      new Context(context.getSubProperties(CONFIG_SELECTOR_PREFIX)));

  LOGGER.debug("Sink selector: " + selector + " initialized");
}
 
Example 40
Project: flume-release-1.7.0   File: LoadBalancingSinkProcessor.java   Source Code and License 5 votes vote down vote up
@Override
public void configure(Context context) {
  super.configure(context);
  if (maxTimeOut != 0) {
    selector.setMaxTimeOut(maxTimeOut);
  }
}