Java Code Examples for com.datatorrent.api.DAG

The following examples show how to use com.datatorrent.api.DAG. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  cleanTable();
  insertEvents(10, true, 0);
  JdbcPOJOPollInputOperator inputOperator = dag.addOperator("JdbcPOJOPollInput", new JdbcPOJOPollInputOperator());
  JdbcStore store = new JdbcStore();
  store.setDatabaseDriver(DB_DRIVER);
  store.setDatabaseUrl(URL);
  inputOperator.setStore(store);
  inputOperator.setTableName(TABLE_POJO_NAME);
  inputOperator.setKey("id");
  inputOperator.setFetchSize(100);
  inputOperator.setBatchSize(100);
  inputOperator.setPartitionCount(2);
  dag.getMeta(inputOperator).getMeta(inputOperator.outputPort).getAttributes().put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
  ResultCollector result = dag.addOperator("result", new ResultCollector());
  dag.addStream("pojo", inputOperator.outputPort, result.input);
}
 
Example 2
@Override
  public void populateDAG(DAG dag, Configuration conf)
  {
    TestStatsListener sl = new TestStatsListener();
    sl.adjustRate = conf.getBoolean("dt.ManagedStateBenchmark.adjustRate", false);

    G generator = createGenerator();
    dag.addOperator("Generator", generator);
    //generator.setRange(timeRange);
    dag.setAttribute(generator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));

    O windowedOperator = createWindowedOperator(conf);
    dag.addOperator("windowedOperator", windowedOperator);
    dag.setAttribute(windowedOperator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));
    //dag.addStream("Data", generator.data, windowedOperator.input).setLocality(Locality.CONTAINER_LOCAL);
    connectGeneratorToWindowedOperator(dag, generator, windowedOperator);

//    WatermarkGenerator watermarkGenerator = new WatermarkGenerator();
//    dag.addOperator("WatermarkGenerator", watermarkGenerator);
//    dag.addStream("Control", watermarkGenerator.control, windowedOperator.controlInput)
//      .setLocality(Locality.CONTAINER_LOCAL);

    DevNull output = dag.addOperator("output", new DevNull());
    dag.addStream("output", windowedOperator.output, output.data).setLocality(Locality.CONTAINER_LOCAL);
  }
 
Example 3
Source Project: attic-apex-malhar   Source File: SpillableBenchmarkApp.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // Create ActiveMQStringSinglePortOutputOperator
  SpillableTestInputOperator input = new SpillableTestInputOperator();
  input.batchSize = 100;
  input.sleepBetweenBatch = 0;
  input = dag.addOperator("input", input);

  SpillableTestOperator testOperator = new SpillableTestOperator();
  testOperator.store = createStore(conf);
  testOperator.shutdownCount = -1;
  testOperator = dag.addOperator("test", testOperator );


  // Connect ports
  dag.addStream("stream", input.output, testOperator.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
}
 
Example 4
Source Project: attic-apex-malhar   Source File: JdbcPollerApplication.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  JdbcPOJOPollInputOperator poller = dag.addOperator("JdbcPoller", new JdbcPOJOPollInputOperator());

  JdbcStore store = new JdbcStore();
  poller.setStore(store);

  poller.setFieldInfos(addFieldInfos());

  FileLineOutputOperator writer = dag.addOperator("Writer", new FileLineOutputOperator());
  dag.setInputPortAttribute(writer.input, PortContext.PARTITION_PARALLEL, true);
  writer.setRotationWindows(60);

  dag.addStream("dbrecords", poller.outputPort, writer.input);
}
 
Example 5
Source Project: attic-apex-malhar   Source File: SQLExecEnvironment.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * This is the main method takes SQL statement as input and contructs a DAG using contructs registered with this
 * {@link SQLExecEnvironment}.
 *
 * @param sql SQL statement that should be converted to a DAG.
 */
public void executeSQL(DAG dag, String sql)
{
  FrameworkConfig config = buildFrameWorkConfig();
  Planner planner = Frameworks.getPlanner(config);
  try {
    logger.info("Parsing SQL statement: {}", sql);
    SqlNode parsedTree = planner.parse(sql);
    SqlNode validatedTree = planner.validate(parsedTree);
    RelNode relationalTree = planner.rel(validatedTree).rel;
    logger.info("RelNode relationalTree generate from SQL statement is:\n {}",
        Util.toLinux(RelOptUtil.toString(relationalTree)));
    RelNodeVisitor visitor = new RelNodeVisitor(dag, typeFactory);
    visitor.traverse(relationalTree);
  } catch (Exception e) {
    throw Throwables.propagate(e);
  } finally {
    planner.close();
  }
}
 
Example 6
Source Project: examples   Source File: FileToJdbcCustomParser.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration) {
  // create operators
  FileReader fileReader = dag.addOperator("FileReader", FileReader.class);
  CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class);
  JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class);

  // configure operators
  jdbcOutputOperator.setFieldInfos(addFieldInfos());
  JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
  jdbcOutputOperator.setStore(outputStore);

  // add stream
  dag.addStream("Data", fileReader.output, customParser.input);
  dag.addStream("POJOs", customParser.output, jdbcOutputOperator.input);
}
 
Example 7
Source Project: attic-apex-malhar   Source File: FSSliceReaderTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected void starting(org.junit.runner.Description description)
{
  output = "target/" + description.getClassName() + "/" + description.getMethodName();
  try {
    FileUtils.forceMkdir(new File(output));
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
  dataFile = new File("src/test/resources/reader_test_data.csv");
  blockReader = getBlockReader();

  Attribute.AttributeMap.DefaultAttributeMap readerAttr = new Attribute.AttributeMap.DefaultAttributeMap();
  readerAttr.put(DAG.APPLICATION_ID, Long.toHexString(System.currentTimeMillis()));
  readerAttr.put(Context.OperatorContext.SPIN_MILLIS, 10);
  readerContext = mockOperatorContext(1, readerAttr);

  blockReader.setup(readerContext);

  messageSink = new CollectorTestSink<>();
  blockReader.messages.setSink(messageSink);

  blockMetadataSink = new CollectorTestSink<>();
  blockReader.blocksMetadataOutput.setSink(blockMetadataSink);
}
 
Example 8
Source Project: attic-apex-malhar   Source File: Application.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  WordGenerator inputOperator = new WordGenerator();
  KeyedWindowedOperatorImpl<String, Long, MutableLong, Long> windowedOperator = new KeyedWindowedOperatorImpl<>();
  Accumulation<Long, MutableLong, Long> sum = new SumAccumulation();

  windowedOperator.setAccumulation(sum);
  windowedOperator.setDataStorage(new InMemoryWindowedKeyedStorage<String, MutableLong>());
  windowedOperator.setRetractionStorage(new InMemoryWindowedKeyedStorage<String, Long>());
  windowedOperator.setWindowStateStorage(new InMemoryWindowedStorage<WindowState>());
  windowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(1)));
  windowedOperator.setTriggerOption(TriggerOption.AtWatermark().withEarlyFiringsAtEvery(Duration.millis(1000)).accumulatingAndRetractingFiredPanes());
  //windowedOperator.setAllowedLateness(Duration.millis(14000));

  ConsoleOutputOperator outputOperator = new ConsoleOutputOperator();
  dag.addOperator("inputOperator", inputOperator);
  dag.addOperator("windowedOperator", windowedOperator);
  dag.addOperator("outputOperator", outputOperator);
  dag.addStream("input_windowed", inputOperator.output, windowedOperator.input);
  dag.addStream("windowed_output", windowedOperator.output, outputOperator.input);
}
 
Example 9
private static TestOutputOperator createOperator(ProcessingMode processingMode)
{
  JdbcNonTransactionalStore store = new JdbcNonTransactionalStore();
  store.setDatabaseDriver(JdbcNonTransactionalOutputOperatorTest.DB_DRIVER);
  store.setDatabaseUrl(JdbcNonTransactionalOutputOperatorTest.URL);

  TestOutputOperator outputOperator = new TestOutputOperator();

  com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
  attributeMap.put(OperatorContext.PROCESSING_MODE, processingMode);
  attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L);
  attributeMap.put(DAG.APPLICATION_ID, APP_ID);
  OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
  outputOperator.setStore(store);
  outputOperator.setBatchSize(BATCH_SIZE);

  outputOperator.setup(context);

  return outputOperator;
}
 
Example 10
Source Project: examples   Source File: ApplicationCPPAppMetrics.java    License: Apache License 2.0 6 votes vote down vote up
public void populateDAG(DAG dag, Configuration conf)
{
  POJOGenerator generator = dag.addOperator("POJOGenerator", POJOGenerator.class);
  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  FilterOperator filterOperator = dag.addOperator("filter", new FilterOperator());
  TopNAccounts topN = dag.addOperator("topN", new TopNAccounts());
  CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter());
  ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator());

  dag.addStream("data", generator.out, csvParser.in).setLocality(DAG.Locality.THREAD_LOCAL);
  dag.addStream("pojo", csvParser.out, filterOperator.input, topN.in);
  dag.addStream("filtered", filterOperator.truePort, formatter.in);
  dag.addStream("string", formatter.out, console.input).setLocality(DAG.Locality.THREAD_LOCAL);

  dag.setAttribute(Context.DAGContext.METRICS_TRANSPORT, null);
  dag.setAttribute(topN, Context.OperatorContext.METRICS_AGGREGATOR, new TopNAggregator());
  dag.setAttribute(AppMetricProcessor.APP_METRIC_PROCESSOR, new AppMetricsService());
}
 
Example 11
/**
 * this is the DAG for write tuples into HBase
 * @param dag
 * @param conf
 */
protected void populateOutputDAG(DAG dag, Configuration conf)
{
  customerServiceGenerator = new SpecificCustomerServiceGenerateOperator();
  customerServiceGenerator.capacity = CAPACITY;
  
  dag.addOperator("CustomerService-Generator", customerServiceGenerator);

  cacheOperator = new TupleCacheOperator<>("cacheOperatorData");
  dag.addOperator("Cache", cacheOperator);
  
  dag.addStream("GenerateStream", customerServiceGenerator.outputPort, cacheOperator.inputPort).setLocality(Locality.CONTAINER_LOCAL);

  {
    CustomerServiceHbaseOutputOperator hbaseOutput = new CustomerServiceHbaseOutputOperator();
    hbaseOutput.setStartOver(true);  //remove old table and create new
    
    dag.addOperator("CustomerService-Output", hbaseOutput);

    dag.addStream("CustomerService", cacheOperator.outputPort, hbaseOutput.input).setLocality(Locality.CONTAINER_LOCAL);
  }
}
 
Example 12
Source Project: attic-apex-malhar   Source File: StatefulUniqueCountTest.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  KeyGen keyGen = dag.addOperator("KeyGenerator", new KeyGen());
  UniqueValueCount<Integer> valCount = dag.addOperator("ValueCounter", new UniqueValueCount<Integer>());
  IntegerUniqueValueCountAppender uniqueUnifier = dag.addOperator("Unique", new IntegerUniqueValueCountAppender());
  VerifyTable verifyTable = dag.addOperator("VerifyTable", new VerifyTable());

  @SuppressWarnings("rawtypes")
  DefaultOutputPort valOut = valCount.output;
  @SuppressWarnings("rawtypes")
  DefaultOutputPort uniqueOut = uniqueUnifier.output;
  dag.addStream("DataIn", keyGen.output, valCount.input);
  dag.addStream("UnifyWindows", valOut, uniqueUnifier.input);
  dag.addStream("ResultsOut", uniqueOut, verifyTable.input);
}
 
Example 13
Source Project: attic-apex-malhar   Source File: OldFaithfulApplication.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Create the DAG
 */
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  InputGenerator randomInputGenerator = dag.addOperator("rand", new InputGenerator());
  FaithfulRScript rScriptOp = dag.addOperator("rScriptOp", new FaithfulRScript("com/datatorrent/examples/r/oldfaithful/eruptionModel.R", "eruptionModel", "retVal"));
  ConsoleOutputOperator consoles = dag.addOperator("consoles", new ConsoleOutputOperator());

  Map<String, FaithfulRScript.REXP_TYPE> argTypeMap = new HashMap<String, FaithfulRScript.REXP_TYPE>();

  argTypeMap.put("ELAPSEDTIME", FaithfulRScript.REXP_TYPE.REXP_INT);
  argTypeMap.put("ERUPTIONS", FaithfulRScript.REXP_TYPE.REXP_ARRAY_DOUBLE);
  argTypeMap.put("WAITING", FaithfulRScript.REXP_TYPE.REXP_ARRAY_INT);

  rScriptOp.setArgTypeMap(argTypeMap);

  dag.addStream("ingen_faithfulRscript", randomInputGenerator.outputPort, rScriptOp.faithfulInput).setLocality(locality);
  dag.addStream("ingen_faithfulRscript_eT", randomInputGenerator.elapsedTime, rScriptOp.inputElapsedTime).setLocality(locality);
  dag.addStream("faithfulRscript_console_s", rScriptOp.strOutput, consoles.input).setLocality(locality);

}
 
Example 14
Source Project: attic-apex-malhar   Source File: Application.java    License: Apache License 2.0 6 votes vote down vote up
private MultiWindowDimensionAggregation getSystemAggregationOper(String name, DAG dag)
{
  MultiWindowDimensionAggregation oper = dag.addOperator(name, MultiWindowDimensionAggregation.class);
  oper.setWindowSize(3);
  List<int[]> dimensionArrayList = new ArrayList<int[]>();
  int[] dimensionArray = {0};
  dimensionArrayList.add(dimensionArray);
  oper.setDimensionArray(dimensionArrayList);

  oper.setTimeBucket(TIME_BUCKETS.m.name());
  oper.setDimensionKeyVal("1");

  oper.setOperationType(AggregateOperation.AVERAGE);
  oper.setWindowSize(120); // 1 min window

  return oper;
}
 
Example 15
Source Project: attic-apex-malhar   Source File: FSWindowDataManager.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void setup(Context.OperatorContext context)
{
  serializationBuffer = new SerializationBuffer(new WindowedBlockStream());
  operatorId = context.getId();

  if (isStatePathRelativeToAppPath) {
    fullStatePath = context.getValue(DAG.APPLICATION_PATH) + Path.SEPARATOR + statePath;
  } else {
    fullStatePath = statePath;
  }

  try {
    fileContext = FileContextUtils.getFileContext(fullStatePath);
    setupWals(context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID));
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
}
 
Example 16
Source Project: attic-apex-malhar   Source File: StatefulApplication.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  RandomKeyValGenerator randGen = dag.addOperator("RandomGenerator", new RandomKeyValGenerator());
  UniqueValueCount<Integer> valCount = dag.addOperator("UniqueCounter", new UniqueValueCount<Integer>());
  ConsoleOutputOperator consOut = dag.addOperator("Console", new ConsoleOutputOperator());
  IntegerUniqueValueCountAppender uniqueUnifier = dag.addOperator("StatefulUniqueCounter", new IntegerUniqueValueCountAppender());
  dag.getOperatorMeta("StatefulUniqueCounter").getMeta(uniqueUnifier.input).getAttributes().put(Context.PortContext.STREAM_CODEC, new KeyBasedStreamCodec());

  @SuppressWarnings("rawtypes")
  DefaultOutputPort valOut = valCount.output;
  @SuppressWarnings("rawtypes")
  DefaultOutputPort uniqueOut = uniqueUnifier.output;

  dag.addStream("Events", randGen.outport, valCount.input);
  dag.addStream("Unified", valOut, uniqueUnifier.input);
  dag.addStream("Result", uniqueOut, consOut.input);
}
 
Example 17
Source Project: attic-apex-malhar   Source File: TrafficRoutes.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  InfoGen infoGen = new InfoGen();
  Collector collector = new Collector();

  // Create a stream from the input operator.
  ApexStream<Tuple.TimestampedTuple<String>> stream = StreamFactory.fromInput(infoGen, infoGen.output, name("infoGen"))

      // Extract the timestamp from the input and wrap it into a TimestampedTuple.
      .map(new ExtractTimestamps(), name("ExtractTimestamps"));

  stream
      // Extract the average speed of a station.
      .flatMap(new ExtractStationSpeedFn(), name("ExtractStationSpeedFn"))

      // Apply window and trigger option.
      .window(new WindowOption.SlidingTimeWindows(Duration.standardMinutes(WINDOW_DURATION), Duration.standardMinutes(WINDOW_SLIDE_EVERY)), new TriggerOption().withEarlyFiringsAtEvery(Duration.millis(5000)).accumulatingFiredPanes())

      // Apply TrackSpeed composite transformation to compute the route information.
      .addCompositeStreams(new TrackSpeed())

      // print the result to console.
      .print(name("console"))
      .endWith(collector, collector.input, name("Collector"))
      .populateDag(dag);
}
 
Example 18
Source Project: attic-apex-malhar   Source File: FileSplitterBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  MockFileInput fileInput = dag.addOperator("Input", new MockFileInput());
  fileInput.filePaths = baseTestMeta.filePaths;

  FileSplitterBase splitter = dag.addOperator("Splitter", new FileSplitterBase());
  splitter.setFile(baseTestMeta.dataDirectory);

  receiver = dag.addOperator("Receiver", new MockReceiver());

  dag.addStream("files", fileInput.files, splitter.input);
  dag.addStream("file-metadata", splitter.filesMetadataOutput, receiver.fileMetadata);
}
 
Example 19
Source Project: examples   Source File: JdbcPollerApplication.java    License: Apache License 2.0 5 votes vote down vote up
public void populateDAG(DAG dag, Configuration conf)
{
  JdbcPOJOPollInputOperator poller = dag.addOperator("JdbcPoller", new JdbcPOJOPollInputOperator());

  JdbcStore store = new JdbcStore();
  poller.setStore(store);

  poller.setFieldInfos(addFieldInfos());

  FileLineOutputOperator writer = dag.addOperator("Writer", new FileLineOutputOperator());
  dag.setInputPortAttribute(writer.input, PortContext.PARTITION_PARALLEL, true);
  writer.setRotationWindows(60);

  dag.addStream("dbrecords", poller.outputPort, writer.input);
}
 
Example 20
Source Project: examples   Source File: AtomicFileOutputApp.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  KafkaSinglePortStringInputOperator kafkaInput = dag.addOperator("kafkaInput",
      new KafkaSinglePortStringInputOperator());
  kafkaInput.setWindowDataManager(new FSWindowDataManager());

  Application.UniqueCounterFlat count = dag.addOperator("count", new Application.UniqueCounterFlat());

  FileWriter fileWriter = dag.addOperator("fileWriter", new FileWriter());

  ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator());
  dag.addStream("words", kafkaInput.outputPort, count.data);
  dag.addStream("counts", count.counts, fileWriter.input, cons.input);
}
 
Example 21
Source Project: examples   Source File: Application.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  KafkaSinglePortStringInputOperator kafkaInput = dag.addOperator("kafkaInput", new KafkaSinglePortStringInputOperator());
  kafkaInput.setWindowDataManager(new FSWindowDataManager());
  UniqueCounterFlat count = dag.addOperator("count", new UniqueCounterFlat());
  CountStoreOperator store = dag.addOperator("store", new CountStoreOperator());
  store.setStore(new JdbcTransactionalStore());
  ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator());
  dag.addStream("words", kafkaInput.outputPort, count.data);
  dag.addStream("counts", count.counts, store.input, cons.input);
}
 
Example 22
Source Project: attic-apex-malhar   Source File: WidgetOutputOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void setup(OperatorContext context)
{
  if (PubSubHelper.isGatewayConfigured(context)) {
    wsoo.setUri(PubSubHelper.getURI(context));
    wsoo.setup(context);
  } else {
    isWebSocketConnected = false;
    coo.setup(context);
  }
  appId = context.getValue(DAG.APPLICATION_ID);
  operId = context.getId();

}
 
Example 23
@Test
public void testPopulateDAG() throws Exception
{
  Configuration configuration = new Configuration(false);

  LocalMode lm = LocalMode.newInstance();
  DAG prepareDAG = lm.prepareDAG(new TwitterDumpApplication(), configuration);
  DAG clonedDAG = lm.cloneDAG();

  assertEquals("Serialization", prepareDAG, clonedDAG);
}
 
Example 24
Source Project: attic-apex-malhar   Source File: Application.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This function sets up the DAG for calculating the average
 *
 * @param dag  the DAG instance
 * @param conf the configuration instance
 * @return MachineInfoAveragingPrerequisitesOperator
 */
private MachineInfoAveragingPrerequisitesOperator addAverageCalculation(DAG dag, Configuration conf)
{
  MachineInfoAveragingPrerequisitesOperator prereqAverageOper = dag.addOperator("Aggregator", MachineInfoAveragingPrerequisitesOperator.class);
  MachineInfoAveragingOperator averageOperator = dag.addOperator("AverageCalculator", MachineInfoAveragingOperator.class);
  RedisKeyValPairOutputOperator<MachineKey, Map<String, String>> redisAvgOperator = dag.addOperator("Persister", new RedisKeyValPairOutputOperator<MachineKey, Map<String, String>>());
  dag.addStream("Average", averageOperator.outputPort, redisAvgOperator.input);
  SmtpOutputOperator smtpOutputOperator = dag.addOperator("Alerter", new SmtpOutputOperator());
  dag.addStream("Aggregates", prereqAverageOper.outputPort, averageOperator.inputPort);
  dag.addStream("Alerts", averageOperator.smtpAlert, smtpOutputOperator.input);
  return prereqAverageOper;
}
 
Example 25
Source Project: attic-apex-malhar   Source File: Application.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  BatchSequenceGenerator sequenceGenerator = dag.addOperator("sequenceGenerator", BatchSequenceGenerator.class);
  PassthroughFailOperator passthroughFailOperator = dag.addOperator("passthrough", PassthroughFailOperator.class);
  KafkaSinglePortExactlyOnceOutputOperator<String> kafkaExactlyOnceOutputOperator =
      dag.addOperator("kafkaExactlyOnceOutputOperator", KafkaSinglePortExactlyOnceOutputOperator.class);
  KafkaSinglePortOutputOperator kafkaOutputOperator =
      dag.addOperator("kafkaOutputOperator", KafkaSinglePortOutputOperator.class);

  dag.addStream("sequenceToPassthrough", sequenceGenerator.out, passthroughFailOperator.input);
  dag.addStream("linesToKafka", passthroughFailOperator.output, kafkaOutputOperator.inputPort,
      kafkaExactlyOnceOutputOperator.inputPort);

  KafkaSinglePortInputOperator kafkaInputTopicExactly = dag.addOperator("kafkaTopicExactly", KafkaSinglePortInputOperator.class);
  kafkaInputTopicExactly.setInitialOffset(KafkaSinglePortInputOperator.InitialOffset.EARLIEST.name());

  KafkaSinglePortInputOperator kafkaInputTopicAtLeast = dag.addOperator("kafkaTopicAtLeast", KafkaSinglePortInputOperator.class);
  kafkaInputTopicAtLeast.setInitialOffset(KafkaSinglePortInputOperator.InitialOffset.EARLIEST.name());

  ValidationToFile validationToFile = dag.addOperator("validationToFile", ValidationToFile.class);

  dag.addStream("messagesFromExactly", kafkaInputTopicExactly.outputPort, validationToFile.topicExactlyInput);
  dag.addStream("messagesFromAtLeast", kafkaInputTopicAtLeast.outputPort, validationToFile.topicAtLeastInput);

}
 
Example 26
Source Project: attic-apex-malhar   Source File: FileEndpoint.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public RelInfo populateOutputDAG(DAG dag, JavaTypeFactory typeFactory)
{
  RelInfo spec = messageFormat.populateOutputDAG(dag, typeFactory);

  GenericFileOutputOperator.StringFileOutputOperator fileOutput =
      dag.addOperator(OperatorUtils.getUniqueOperatorName("FileOutput"),
      GenericFileOutputOperator.StringFileOutputOperator.class);
  fileOutput.setFilePath((String)operands.get(FILE_OUT_PATH));
  fileOutput.setOutputFileName((String)operands.get(FILE_OUT_NAME));

  dag.addStream(OperatorUtils.getUniqueStreamName("Formatter", "File"), spec.getOutPort(), fileOutput.input);

  return new RelInfo("Output", spec.getInputPorts(), spec.getOperator(), null, messageFormat.getRowType(typeFactory));
}
 
Example 27
public void testTransactionOutputOperator() throws IOException
{
  TransactionableKeyValueStoreOperatorTest.TransactionOutputOperator<S> outputOperator = new TransactionableKeyValueStoreOperatorTest.TransactionOutputOperator<S>();
  String appId = "test_appid";
  int operatorId = 0;
  operatorStore.removeCommittedWindowId(appId, operatorId);

  com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
  attributes.put(DAG.APPLICATION_ID, appId);

  try {
    testStore.connect();
    outputOperator.setStore(operatorStore);
    outputOperator.setup(mockOperatorContext(operatorId, attributes));
    outputOperator.beginWindow(100);
    Map<String, String> m = new HashMap<String, String>();
    m.put("test_abc", "123");
    m.put("test_def", "456");
    outputOperator.input.process(m);
    Assert.assertNull(testStore.get("test_abc"));
    Assert.assertNull(testStore.get("test_def"));
    m = new HashMap<String, String>();
    m.put("test_ghi", "789");
    outputOperator.input.process(m);
    Assert.assertNull(testStore.get("test_ghi"));
    outputOperator.endWindow();
    outputOperator.teardown();
    Assert.assertEquals("123", testStore.get("test_abc"));
    Assert.assertEquals("456", testStore.get("test_def"));
    Assert.assertEquals("789", testStore.get("test_ghi"));
  } finally {
    testStore.remove("test_abc");
    testStore.remove("test_def");
    testStore.remove("test_ghi");
    testStore.disconnect();
  }
}
 
Example 28
public void testInputOperator() throws Exception
{
  testStore.connect();
  testStore.put("test_abc", "789");
  testStore.put("test_def", "456");
  testStore.put("test_ghi", "123");
  try {
    LocalMode lma = LocalMode.newInstance();
    DAG dag = lma.getDAG();
    @SuppressWarnings("unchecked")
    InputOperator<S> inputOperator = dag.addOperator("input", new InputOperator<S>());
    CollectorModule<Object> collector = dag.addOperator("collector", new CollectorModule<Object>());
    inputOperator.addKey("test_abc");
    inputOperator.addKey("test_def");
    inputOperator.addKey("test_ghi");
    inputOperator.setStore(operatorStore);
    dag.addStream("stream", inputOperator.outputPort, collector.inputPort);
    final LocalMode.Controller lc = lma.getController();
    lc.run(3000);
    lc.shutdown();
    Assert.assertEquals("789", CollectorModule.resultMap.get("test_abc"));
    Assert.assertEquals("456", CollectorModule.resultMap.get("test_def"));
    Assert.assertEquals("123", CollectorModule.resultMap.get("test_ghi"));

  } finally {
    testStore.remove("test_abc");
    testStore.remove("test_def");
    testStore.remove("test_ghi");
    testStore.disconnect();
  }
}
 
Example 29
Source Project: attic-apex-malhar   Source File: FSSliceReaderTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBlockSize() throws IOException
{
  long blockSize = 1000;
  Path path = new Path(testMeta.output);
  when(fileSystem.getDefaultBlockSize(path)).thenReturn(blockSize);
  Attribute.AttributeMap.DefaultAttributeMap readerAttr = new Attribute.AttributeMap.DefaultAttributeMap();
  readerAttr.put(DAG.APPLICATION_ID, Long.toHexString(System.currentTimeMillis()));
  readerAttr.put(Context.OperatorContext.SPIN_MILLIS, 10);

  FSTestReader reader = new FSTestReader();
  reader.setBasePath(testMeta.output);
  reader.setup(mockOperatorContext(1, readerAttr));
  Assert.assertEquals("Block Size", blockSize, (long)((ReaderContext.FixedBytesReaderContext)reader.getReaderContext()).getLength());
}
 
Example 30
Source Project: examples   Source File: Application.java    License: Apache License 2.0 5 votes vote down vote up
public void populateDAG(DAG dag, Configuration conf)
{
  FSRecordReaderModule recordReader = dag.addModule("lineInput", FSRecordReaderModule.class);
  S3BytesOutputModule s3StringOutputModule = dag.addModule("s3output", S3BytesOutputModule.class);
  dag.addStream("data", recordReader.records, s3StringOutputModule.input);
  
}