com.datatorrent.api.DAG Java Examples

The following examples show how to use com.datatorrent.api.DAG. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StatefulApplication.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  RandomKeyValGenerator randGen = dag.addOperator("RandomGenerator", new RandomKeyValGenerator());
  UniqueValueCount<Integer> valCount = dag.addOperator("UniqueCounter", new UniqueValueCount<Integer>());
  ConsoleOutputOperator consOut = dag.addOperator("Console", new ConsoleOutputOperator());
  IntegerUniqueValueCountAppender uniqueUnifier = dag.addOperator("StatefulUniqueCounter", new IntegerUniqueValueCountAppender());
  dag.getOperatorMeta("StatefulUniqueCounter").getMeta(uniqueUnifier.input).getAttributes().put(Context.PortContext.STREAM_CODEC, new KeyBasedStreamCodec());

  @SuppressWarnings("rawtypes")
  DefaultOutputPort valOut = valCount.output;
  @SuppressWarnings("rawtypes")
  DefaultOutputPort uniqueOut = uniqueUnifier.output;

  dag.addStream("Events", randGen.outport, valCount.input);
  dag.addStream("Unified", valOut, uniqueUnifier.input);
  dag.addStream("Result", uniqueOut, consOut.input);
}
 
Example #2
Source File: SQLExecEnvironment.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
/**
 * This is the main method takes SQL statement as input and contructs a DAG using contructs registered with this
 * {@link SQLExecEnvironment}.
 *
 * @param sql SQL statement that should be converted to a DAG.
 */
public void executeSQL(DAG dag, String sql)
{
  FrameworkConfig config = buildFrameWorkConfig();
  Planner planner = Frameworks.getPlanner(config);
  try {
    logger.info("Parsing SQL statement: {}", sql);
    SqlNode parsedTree = planner.parse(sql);
    SqlNode validatedTree = planner.validate(parsedTree);
    RelNode relationalTree = planner.rel(validatedTree).rel;
    logger.info("RelNode relationalTree generate from SQL statement is:\n {}",
        Util.toLinux(RelOptUtil.toString(relationalTree)));
    RelNodeVisitor visitor = new RelNodeVisitor(dag, typeFactory);
    visitor.traverse(relationalTree);
  } catch (Exception e) {
    throw Throwables.propagate(e);
  } finally {
    planner.close();
  }
}
 
Example #3
Source File: SpillableBenchmarkApp.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // Create ActiveMQStringSinglePortOutputOperator
  SpillableTestInputOperator input = new SpillableTestInputOperator();
  input.batchSize = 100;
  input.sleepBetweenBatch = 0;
  input = dag.addOperator("input", input);

  SpillableTestOperator testOperator = new SpillableTestOperator();
  testOperator.store = createStore(conf);
  testOperator.shutdownCount = -1;
  testOperator = dag.addOperator("test", testOperator );


  // Connect ports
  dag.addStream("stream", input.output, testOperator.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
}
 
Example #4
Source File: JdbcPollerApplication.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  JdbcPOJOPollInputOperator poller = dag.addOperator("JdbcPoller", new JdbcPOJOPollInputOperator());

  JdbcStore store = new JdbcStore();
  poller.setStore(store);

  poller.setFieldInfos(addFieldInfos());

  FileLineOutputOperator writer = dag.addOperator("Writer", new FileLineOutputOperator());
  dag.setInputPortAttribute(writer.input, PortContext.PARTITION_PARALLEL, true);
  writer.setRotationWindows(60);

  dag.addStream("dbrecords", poller.outputPort, writer.input);
}
 
Example #5
Source File: AbstractWindowedOperatorBenchmarkApp.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
  public void populateDAG(DAG dag, Configuration conf)
  {
    TestStatsListener sl = new TestStatsListener();
    sl.adjustRate = conf.getBoolean("dt.ManagedStateBenchmark.adjustRate", false);

    G generator = createGenerator();
    dag.addOperator("Generator", generator);
    //generator.setRange(timeRange);
    dag.setAttribute(generator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));

    O windowedOperator = createWindowedOperator(conf);
    dag.addOperator("windowedOperator", windowedOperator);
    dag.setAttribute(windowedOperator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));
    //dag.addStream("Data", generator.data, windowedOperator.input).setLocality(Locality.CONTAINER_LOCAL);
    connectGeneratorToWindowedOperator(dag, generator, windowedOperator);

//    WatermarkGenerator watermarkGenerator = new WatermarkGenerator();
//    dag.addOperator("WatermarkGenerator", watermarkGenerator);
//    dag.addStream("Control", watermarkGenerator.control, windowedOperator.controlInput)
//      .setLocality(Locality.CONTAINER_LOCAL);

    DevNull output = dag.addOperator("output", new DevNull());
    dag.addStream("output", windowedOperator.output, output.data).setLocality(Locality.CONTAINER_LOCAL);
  }
 
Example #6
Source File: JdbcInputOperatorApplicationTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  cleanTable();
  insertEvents(10, true, 0);
  JdbcPOJOPollInputOperator inputOperator = dag.addOperator("JdbcPOJOPollInput", new JdbcPOJOPollInputOperator());
  JdbcStore store = new JdbcStore();
  store.setDatabaseDriver(DB_DRIVER);
  store.setDatabaseUrl(URL);
  inputOperator.setStore(store);
  inputOperator.setTableName(TABLE_POJO_NAME);
  inputOperator.setKey("id");
  inputOperator.setFetchSize(100);
  inputOperator.setBatchSize(100);
  inputOperator.setPartitionCount(2);
  dag.getMeta(inputOperator).getMeta(inputOperator.outputPort).getAttributes().put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
  ResultCollector result = dag.addOperator("result", new ResultCollector());
  dag.addStream("pojo", inputOperator.outputPort, result.input);
}
 
Example #7
Source File: FileToJdbcCustomParser.java    From examples with Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration) {
  // create operators
  FileReader fileReader = dag.addOperator("FileReader", FileReader.class);
  CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class);
  JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class);

  // configure operators
  jdbcOutputOperator.setFieldInfos(addFieldInfos());
  JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
  jdbcOutputOperator.setStore(outputStore);

  // add stream
  dag.addStream("Data", fileReader.output, customParser.input);
  dag.addStream("POJOs", customParser.output, jdbcOutputOperator.input);
}
 
Example #8
Source File: FSSliceReaderTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
protected void starting(org.junit.runner.Description description)
{
  output = "target/" + description.getClassName() + "/" + description.getMethodName();
  try {
    FileUtils.forceMkdir(new File(output));
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
  dataFile = new File("src/test/resources/reader_test_data.csv");
  blockReader = getBlockReader();

  Attribute.AttributeMap.DefaultAttributeMap readerAttr = new Attribute.AttributeMap.DefaultAttributeMap();
  readerAttr.put(DAG.APPLICATION_ID, Long.toHexString(System.currentTimeMillis()));
  readerAttr.put(Context.OperatorContext.SPIN_MILLIS, 10);
  readerContext = mockOperatorContext(1, readerAttr);

  blockReader.setup(readerContext);

  messageSink = new CollectorTestSink<>();
  blockReader.messages.setSink(messageSink);

  blockMetadataSink = new CollectorTestSink<>();
  blockReader.blocksMetadataOutput.setSink(blockMetadataSink);
}
 
Example #9
Source File: Application.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  WordGenerator inputOperator = new WordGenerator();
  KeyedWindowedOperatorImpl<String, Long, MutableLong, Long> windowedOperator = new KeyedWindowedOperatorImpl<>();
  Accumulation<Long, MutableLong, Long> sum = new SumAccumulation();

  windowedOperator.setAccumulation(sum);
  windowedOperator.setDataStorage(new InMemoryWindowedKeyedStorage<String, MutableLong>());
  windowedOperator.setRetractionStorage(new InMemoryWindowedKeyedStorage<String, Long>());
  windowedOperator.setWindowStateStorage(new InMemoryWindowedStorage<WindowState>());
  windowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(1)));
  windowedOperator.setTriggerOption(TriggerOption.AtWatermark().withEarlyFiringsAtEvery(Duration.millis(1000)).accumulatingAndRetractingFiredPanes());
  //windowedOperator.setAllowedLateness(Duration.millis(14000));

  ConsoleOutputOperator outputOperator = new ConsoleOutputOperator();
  dag.addOperator("inputOperator", inputOperator);
  dag.addOperator("windowedOperator", windowedOperator);
  dag.addOperator("outputOperator", outputOperator);
  dag.addStream("input_windowed", inputOperator.output, windowedOperator.input);
  dag.addStream("windowed_output", windowedOperator.output, outputOperator.input);
}
 
Example #10
Source File: JdbcNonTransactionalBatchOutputOperatorTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
private static TestOutputOperator createOperator(ProcessingMode processingMode)
{
  JdbcNonTransactionalStore store = new JdbcNonTransactionalStore();
  store.setDatabaseDriver(JdbcNonTransactionalOutputOperatorTest.DB_DRIVER);
  store.setDatabaseUrl(JdbcNonTransactionalOutputOperatorTest.URL);

  TestOutputOperator outputOperator = new TestOutputOperator();

  com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
  attributeMap.put(OperatorContext.PROCESSING_MODE, processingMode);
  attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L);
  attributeMap.put(DAG.APPLICATION_ID, APP_ID);
  OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
  outputOperator.setStore(store);
  outputOperator.setBatchSize(BATCH_SIZE);

  outputOperator.setup(context);

  return outputOperator;
}
 
Example #11
Source File: ApplicationCPPAppMetrics.java    From examples with Apache License 2.0 6 votes vote down vote up
public void populateDAG(DAG dag, Configuration conf)
{
  POJOGenerator generator = dag.addOperator("POJOGenerator", POJOGenerator.class);
  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  FilterOperator filterOperator = dag.addOperator("filter", new FilterOperator());
  TopNAccounts topN = dag.addOperator("topN", new TopNAccounts());
  CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter());
  ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator());

  dag.addStream("data", generator.out, csvParser.in).setLocality(DAG.Locality.THREAD_LOCAL);
  dag.addStream("pojo", csvParser.out, filterOperator.input, topN.in);
  dag.addStream("filtered", filterOperator.truePort, formatter.in);
  dag.addStream("string", formatter.out, console.input).setLocality(DAG.Locality.THREAD_LOCAL);

  dag.setAttribute(Context.DAGContext.METRICS_TRANSPORT, null);
  dag.setAttribute(topN, Context.OperatorContext.METRICS_AGGREGATOR, new TopNAggregator());
  dag.setAttribute(AppMetricProcessor.APP_METRIC_PROCESSOR, new AppMetricsService());
}
 
Example #12
Source File: CustomerServiceHbaseOutputOperatorTester.java    From examples with Apache License 2.0 6 votes vote down vote up
/**
 * this is the DAG for write tuples into HBase
 * @param dag
 * @param conf
 */
protected void populateOutputDAG(DAG dag, Configuration conf)
{
  customerServiceGenerator = new SpecificCustomerServiceGenerateOperator();
  customerServiceGenerator.capacity = CAPACITY;
  
  dag.addOperator("CustomerService-Generator", customerServiceGenerator);

  cacheOperator = new TupleCacheOperator<>("cacheOperatorData");
  dag.addOperator("Cache", cacheOperator);
  
  dag.addStream("GenerateStream", customerServiceGenerator.outputPort, cacheOperator.inputPort).setLocality(Locality.CONTAINER_LOCAL);

  {
    CustomerServiceHbaseOutputOperator hbaseOutput = new CustomerServiceHbaseOutputOperator();
    hbaseOutput.setStartOver(true);  //remove old table and create new
    
    dag.addOperator("CustomerService-Output", hbaseOutput);

    dag.addStream("CustomerService", cacheOperator.outputPort, hbaseOutput.input).setLocality(Locality.CONTAINER_LOCAL);
  }
}
 
Example #13
Source File: StatefulUniqueCountTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  KeyGen keyGen = dag.addOperator("KeyGenerator", new KeyGen());
  UniqueValueCount<Integer> valCount = dag.addOperator("ValueCounter", new UniqueValueCount<Integer>());
  IntegerUniqueValueCountAppender uniqueUnifier = dag.addOperator("Unique", new IntegerUniqueValueCountAppender());
  VerifyTable verifyTable = dag.addOperator("VerifyTable", new VerifyTable());

  @SuppressWarnings("rawtypes")
  DefaultOutputPort valOut = valCount.output;
  @SuppressWarnings("rawtypes")
  DefaultOutputPort uniqueOut = uniqueUnifier.output;
  dag.addStream("DataIn", keyGen.output, valCount.input);
  dag.addStream("UnifyWindows", valOut, uniqueUnifier.input);
  dag.addStream("ResultsOut", uniqueOut, verifyTable.input);
}
 
Example #14
Source File: OldFaithfulApplication.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
/**
 * Create the DAG
 */
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  InputGenerator randomInputGenerator = dag.addOperator("rand", new InputGenerator());
  FaithfulRScript rScriptOp = dag.addOperator("rScriptOp", new FaithfulRScript("com/datatorrent/examples/r/oldfaithful/eruptionModel.R", "eruptionModel", "retVal"));
  ConsoleOutputOperator consoles = dag.addOperator("consoles", new ConsoleOutputOperator());

  Map<String, FaithfulRScript.REXP_TYPE> argTypeMap = new HashMap<String, FaithfulRScript.REXP_TYPE>();

  argTypeMap.put("ELAPSEDTIME", FaithfulRScript.REXP_TYPE.REXP_INT);
  argTypeMap.put("ERUPTIONS", FaithfulRScript.REXP_TYPE.REXP_ARRAY_DOUBLE);
  argTypeMap.put("WAITING", FaithfulRScript.REXP_TYPE.REXP_ARRAY_INT);

  rScriptOp.setArgTypeMap(argTypeMap);

  dag.addStream("ingen_faithfulRscript", randomInputGenerator.outputPort, rScriptOp.faithfulInput).setLocality(locality);
  dag.addStream("ingen_faithfulRscript_eT", randomInputGenerator.elapsedTime, rScriptOp.inputElapsedTime).setLocality(locality);
  dag.addStream("faithfulRscript_console_s", rScriptOp.strOutput, consoles.input).setLocality(locality);

}
 
Example #15
Source File: Application.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
private MultiWindowDimensionAggregation getSystemAggregationOper(String name, DAG dag)
{
  MultiWindowDimensionAggregation oper = dag.addOperator(name, MultiWindowDimensionAggregation.class);
  oper.setWindowSize(3);
  List<int[]> dimensionArrayList = new ArrayList<int[]>();
  int[] dimensionArray = {0};
  dimensionArrayList.add(dimensionArray);
  oper.setDimensionArray(dimensionArrayList);

  oper.setTimeBucket(TIME_BUCKETS.m.name());
  oper.setDimensionKeyVal("1");

  oper.setOperationType(AggregateOperation.AVERAGE);
  oper.setWindowSize(120); // 1 min window

  return oper;
}
 
Example #16
Source File: FSWindowDataManager.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Override
public void setup(Context.OperatorContext context)
{
  serializationBuffer = new SerializationBuffer(new WindowedBlockStream());
  operatorId = context.getId();

  if (isStatePathRelativeToAppPath) {
    fullStatePath = context.getValue(DAG.APPLICATION_PATH) + Path.SEPARATOR + statePath;
  } else {
    fullStatePath = statePath;
  }

  try {
    fileContext = FileContextUtils.getFileContext(fullStatePath);
    setupWals(context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID));
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
}
 
Example #17
Source File: AbstractReconcilerTest.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  RandomWordGenerator generator = dag.addOperator("words", new RandomWordGenerator());
  TestReconciler reconciler = dag.addOperator("synchronizer", new TestReconciler());
  generator.setTuplesPerWindow(10);
  dag.addStream("toWriter", generator.output, reconciler.input);
}
 
Example #18
Source File: CombinePerKeyExamples.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
/**
 * Populate dag using High-Level API.
 * @param dag
 * @param conf
 */
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  SampleInput input = new SampleInput();
  Collector collector = new Collector();
  StreamFactory.fromInput(input, input.beanOutput, name("input"))
    .addCompositeStreams(new PlaysForWord())
    .print(name("console"))
    .endWith(collector, collector.input, name("Collector"))
    .populateDag(dag);

}
 
Example #19
Source File: AvroFileInputOperatorTest.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  AvroFileInputOperator avroInputOperator = dag.addOperator("avroInputOperator", getAvroFileInput());
  AvroToPojo avroToPojo = dag.addOperator("AvroToPojo", getAvroToPojo());
  ConsoleOutputOperator consoleOutput = dag.addOperator("GenericRecordOp", new ConsoleOutputOperator());
  dag.getMeta(avroToPojo).getMeta(avroToPojo.output).getAttributes().put(Context.PortContext.TUPLE_CLASS,
      SimpleOrder.class);

  dag.addStream("GenericRecords", avroInputOperator.output, avroToPojo.data).setLocality(Locality.THREAD_LOCAL);
  dag.addStream("POJO", avroToPojo.output, consoleOutput.input).setLocality(Locality.CONTAINER_LOCAL);
}
 
Example #20
Source File: FunctionOperatorTest.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Test
public void testFilterOperator() throws Exception
{
  LocalMode lma = LocalMode.newInstance();
  DAG dag = lma.getDAG();

  FunctionOperator.FilterFunctionOperator<Integer> filter0
      = new FunctionOperator.FilterFunctionOperator<Integer>(new Function.FilterFunction<Integer>()
      {
        @Override
        public boolean f(Integer in)
        {
          return in % divider == 0;
        }
      });

  NumberGenerator numGen = dag.addOperator("numGen", new NumberGenerator());
  FunctionOperator.FilterFunctionOperator<Integer> filter = dag.addOperator("filter", filter0);
  ResultCollector collector = dag.addOperator("collector", new ResultCollector());

  dag.addStream("raw numbers", numGen.output, filter.input);
  dag.addStream("filtered results", filter.output, collector.input);

  // Create local cluster
  LocalMode.Controller lc = lma.getController();
  lc.setHeartbeatMonitoringEnabled(false);

  ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
  {
    @Override
    public Boolean call() throws Exception
    {
      return TupleCount == NumTuples / divider;
    }
  });

  lc.run(5000);
  Assert.assertEquals(sum, 20);
}
 
Example #21
Source File: AdsDimensionsDemoDifferentTimeBuckets.java    From examples with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  this.appName = APP_NAME;
  this.eventSchemaLocation = EVENT_SCHEMA_LOCATION;
  super.populateDAG(dag, conf);
}
 
Example #22
Source File: XmlParserApplicationTest.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Test
public void testApplication()
{
  try {
    LocalMode lma = LocalMode.newInstance();
    DAG dag = lma.getDAG();
    XmlDataEmitterOperator input = dag.addOperator("data", new XmlDataEmitterOperator());
    XmlParser parser = dag.addOperator("xmlparser", new XmlParser());
    ResultCollector rc = dag.addOperator("rc", new ResultCollector());
    dag.getMeta(parser).getMeta(parser.out).getAttributes().put(Context.PortContext.TUPLE_CLASS, org.apache.apex.malhar.lib.parser.XmlParserTest.EmployeeBean.class);
    ConsoleOutputOperator xmlObjectOp = dag.addOperator("xmlObjectOp", new ConsoleOutputOperator());
    xmlObjectOp.setDebug(true);
    dag.addStream("input", input.output, parser.in);
    dag.addStream("output", parser.parsedOutput, xmlObjectOp.input);
    dag.addStream("pojo", parser.out,rc.input);
    LocalMode.Controller lc = lma.getController();
    lc.setHeartbeatMonitoringEnabled(false);
    ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
    {
      @Override
      public Boolean call() throws Exception
      {
        return TupleCount == 1;
      }
    });
    lc.run(10000);// runs for 10 seconds and quits
    Assert.assertEquals(1,TupleCount);
    Assert.assertEquals("john", obj.getName());
  } catch (ConstraintViolationException e) {
    Assert.fail("constraint violations: " + e.getConstraintViolations());
  }
}
 
Example #23
Source File: AdsDimensionsGenericBenchmark.java    From examples with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  InputItemGenerator input = dag.addOperator("InputGenerator", InputItemGenerator.class);
  DimensionsComputationFlexibleSingleSchemaPOJO dimensions = dag.addOperator("DimensionsComputation",
      DimensionsComputationFlexibleSingleSchemaPOJO.class);
  dag.getMeta(dimensions).getAttributes().put(Context.OperatorContext.APPLICATION_WINDOW_COUNT, 10);
  DevNull<Object> devNull = dag.addOperator("DevNull", new DevNull<Object>());

  //Set input properties
  String eventSchema = SchemaUtils.jarResourceFileToString("adsBenchmarkSchema.json");
  input.setEventSchemaJSON(eventSchema);

  Map<String, String> keyToExpression = Maps.newHashMap();
  keyToExpression.put("publisher", "getPublisher()");
  keyToExpression.put("advertiser", "getAdvertiser()");
  keyToExpression.put("location", "getLocation()");
  keyToExpression.put("time", "getTime()");

  Map<String, String> aggregateToExpression = Maps.newHashMap();
  aggregateToExpression.put("cost", "getCost()");
  aggregateToExpression.put("revenue", "getRevenue()");
  aggregateToExpression.put("impressions", "getImpressions()");
  aggregateToExpression.put("clicks", "getClicks()");

  DimensionsComputationUnifierImpl<InputEvent, Aggregate> unifier = new DimensionsComputationUnifierImpl<InputEvent, Aggregate>();
  dimensions.setUnifier(unifier);
  dimensions.setKeyToExpression(keyToExpression);
  dimensions.setAggregateToExpression(aggregateToExpression);
  dimensions.setConfigurationSchemaJSON(eventSchema);

  dag.addStream("InputStream", input.outputPort, dimensions.input).setLocality(Locality.CONTAINER_LOCAL);
  dag.addStream("DimensionalData", dimensions.output, devNull.data);
}
 
Example #24
Source File: AdsDimensionsStatsBenchmark.java    From examples with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration c)
{
  InputItemGenerator input = dag.addOperator("InputGenerator", InputItemGenerator.class);
  DimensionsComputation<AdInfo, AdInfo.AdInfoAggregateEvent> dimensions = dag.addOperator("DimensionsComputation", new DimensionsComputation<AdInfo, AdInfo.AdInfoAggregateEvent>());
  dag.getMeta(dimensions).getAttributes().put(Context.OperatorContext.APPLICATION_WINDOW_COUNT, 10);
  DevNull<Object> devNull = dag.addOperator("DevNull", new DevNull<Object>());

  input.setEventSchemaJSON(SchemaUtils.jarResourceFileToString("adsBenchmarkSchema.json"));

  String[] dimensionSpecs = new String[] {
      "time=" + TimeUnit.MINUTES, 
      "time=" + TimeUnit.MINUTES + ":location",
      "time=" + TimeUnit.MINUTES + ":advertiser", 
      "time=" + TimeUnit.MINUTES + ":publisher",
      "time=" + TimeUnit.MINUTES + ":advertiser:location", 
      "time=" + TimeUnit.MINUTES + ":publisher:location",
      "time=" + TimeUnit.MINUTES + ":publisher:advertiser",
      "time=" + TimeUnit.MINUTES + ":publisher:advertiser:location" };

  AdInfoAggregator[] aggregators = new AdInfoAggregator[dimensionSpecs.length];
  for (int index = 0; index < dimensionSpecs.length; index++) {
    AdInfoAggregator aggregator = new AdInfoAggregator();
    aggregator.init(dimensionSpecs[index], index);
    aggregators[index] = aggregator;
  }

  dimensions.setAggregators(aggregators);
  DimensionsComputationUnifierImpl<AdInfo, AdInfo.AdInfoAggregateEvent> unifier = new DimensionsComputationUnifierImpl<AdInfo, AdInfo.AdInfoAggregateEvent>();
  unifier.setAggregators(aggregators);
  dimensions.setUnifier(unifier);

  dag.addStream("InputStream", input.outputPort, dimensions.data).setLocality(Locality.CONTAINER_LOCAL);
  dag.addStream("DimensionalData", dimensions.output, devNull.data);
}
 
Example #25
Source File: AbstractManagedStateInnerJoinOperator.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void setup(Context.OperatorContext context)
{
  super.setup(context);
  ((FileAccessFSImpl)stream1Store.getFileAccess()).setBasePath(context.getValue(DAG.APPLICATION_PATH) + Path.SEPARATOR + stateDir + Path.SEPARATOR + String.valueOf(context.getId()) + Path.SEPARATOR + stream1State);
  ((FileAccessFSImpl)stream2Store.getFileAccess()).setBasePath(context.getValue(DAG.APPLICATION_PATH) + Path.SEPARATOR + stateDir + Path.SEPARATOR + String.valueOf(context.getId()) + Path.SEPARATOR + stream2State);
  stream1Store.getCheckpointManager().setStatePath("managed_state_" + stream1State);
  stream1Store.getCheckpointManager().setStatePath("managed_state_" + stream2State);
  stream1Store.setup(context);
  stream2Store.setup(context);
}
 
Example #26
Source File: JMSTransactionableStoreTestBase.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
protected void starting(org.junit.runner.Description description)
{
  //Create fresh operator context
  DefaultAttributeMap attributes = new DefaultAttributeMap();
  attributes.put(DAG.APPLICATION_ID, APP_ID);
  testOperatorContext = mockOperatorContext(OPERATOR_ID, attributes);
  testOperator2Context = mockOperatorContext(OPERATOR_2_ID, attributes);
  FileUtils.deleteQuietly(new File(FSPsuedoTransactionableStore.DEFAULT_RECOVERY_DIRECTORY));
}
 
Example #27
Source File: KafkaOutputBenchmark.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  dag.setAttribute(DAG.APPLICATION_NAME, "KafkaOutputBenchmark");
  BenchmarkPartitionableKafkaOutputOperator bpkoo = dag.addOperator(
      "KafkaBenchmarkProducer", BenchmarkPartitionableKafkaOutputOperator.class);
  bpkoo.setBrokerList(conf.get("kafka.brokerlist"));
  bpkoo.setPartitionCount(2);
}
 
Example #28
Source File: App.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  Gen gen         = dag.addOperator("gen",     Gen.class);
  DevNull devNull = dag.addOperator("devNull", DevNull.class);

  dag.addStream("data", gen.out, devNull.data);
}
 
Example #29
Source File: AtomicFileOutputApp.java    From examples with Apache License 2.0 5 votes vote down vote up
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
  KafkaSinglePortStringInputOperator kafkaInput = dag.addOperator("kafkaInput",
      new KafkaSinglePortStringInputOperator());
  kafkaInput.setWindowDataManager(new FSWindowDataManager());

  Application.UniqueCounterFlat count = dag.addOperator("count", new Application.UniqueCounterFlat());

  FileWriter fileWriter = dag.addOperator("fileWriter", new FileWriter());

  ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator());
  dag.addStream("words", kafkaInput.outputPort, count.data);
  dag.addStream("counts", count.counts, fileWriter.input, cons.input);
}
 
Example #30
Source File: ApplicationWithDCWithoutDeserializer.java    From streaming-benchmarks with Apache License 2.0 5 votes vote down vote up
public DefaultOutputPort<DimensionTuple> populateUpstreamDAG(DAG dag, Configuration configuration)
{
  JsonGenerator eventGenerator = dag.addOperator("eventGenerator", new JsonGenerator());
  FilterTuples filterTuples = dag.addOperator("filterTuples", new FilterTuples());
  FilterFields filterFields = dag.addOperator("filterFields", new FilterFields());
  
  // Connect the Ports in the Operators
  dag.addStream("filterTuples", eventGenerator.out, filterTuples.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("filterFields", filterTuples.output, filterFields.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
  
  TupleToDimensionTupleConverter converter = dag.addOperator("converter", new TupleToDimensionTupleConverter());
  
  if(includeRedisJoin) {
    RedisJoin redisJoin = dag.addOperator("redisJoin", new RedisJoin());
    dag.addStream("redisJoin", filterFields.output, redisJoin.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
    dag.addStream("converter", redisJoin.output, converter.inputPort).setLocality(DAG.Locality.CONTAINER_LOCAL);

    dag.setInputPortAttribute(redisJoin.input, Context.PortContext.PARTITION_PARALLEL, true);

    setupRedis(eventGenerator.getCampaigns());
  } else {
    dag.addStream("convert", filterFields.output, converter.inputPort).setLocality(DAG.Locality.CONTAINER_LOCAL);
  }
  

  dag.setInputPortAttribute(filterTuples.input, Context.PortContext.PARTITION_PARALLEL, true);
  dag.setInputPortAttribute(filterFields.input, Context.PortContext.PARTITION_PARALLEL, true);
  dag.setInputPortAttribute(converter.inputPort, Context.PortContext.PARTITION_PARALLEL, true);

  dag.setAttribute(eventGenerator, Context.OperatorContext.PARTITIONER, new StatelessPartitioner<EventGenerator>(PARTITION_NUM));

  return converter.outputPort;
}