Java Code Examples for com.datatorrent.api.DAG#addStream()
The following examples show how to use
com.datatorrent.api.DAG#addStream() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HDFSFileCopyModule.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { BlockWriter blockWriter = dag.addOperator("BlockWriter", new BlockWriter()); Synchronizer synchronizer = dag.addOperator("BlockSynchronizer", new Synchronizer()); dag.setInputPortAttribute(blockWriter.input, PortContext.PARTITION_PARALLEL, true); dag.setInputPortAttribute(blockWriter.blockMetadataInput, PortContext.PARTITION_PARALLEL, true); dag.addStream("CompletedBlockmetadata", blockWriter.blockMetadataOutput, synchronizer.blocksMetadataInput); HDFSFileMerger merger = new HDFSFileMerger(); merger = dag.addOperator("FileMerger", merger); dag.addStream("MergeTrigger", synchronizer.trigger, merger.input); merger.setFilePath(outputDirectoryPath); merger.setOverwriteOnConflict(overwriteOnConflict); blockWriter.setBlocksDirectory(blocksDirectory); merger.setBlocksDirectory(blocksDirectory); filesMetadataInput.set(synchronizer.filesMetadataInput); blocksMetadataInput.set(blockWriter.blockMetadataInput); blockData.set(blockWriter.input); }
Example 2
Source File: TwitterTrendingHashtagsApplication.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Setup the operator to get the data from twitter sample stream injected into the system. TwitterSampleInput twitterFeed = new TwitterSampleInput(); twitterFeed = dag.addOperator("TweetSampler", twitterFeed); // Setup a node to count the unique Hashtags within a window. UniqueCounter<String> uniqueCounter = dag.addOperator("UniqueHashtagCounter", new UniqueCounter<String>()); // Get the aggregated Hashtag counts and count them over last 5 mins. WindowedTopCounter<String> topCounts = dag.addOperator("TopCounter", new WindowedTopCounter<String>()); topCounts.setTopCount(10); topCounts.setSlidingWindowWidth(600); topCounts.setDagWindowWidth(1); dag.addStream("TwittedHashtags", twitterFeed.hashtag, uniqueCounter.data).setLocality(locality); // Count unique Hashtags dag.addStream("UniqueHashtagCounts", uniqueCounter.count, topCounts.input); TwitterTopCounterApplication.consoleOutput(dag, "topHashtags", topCounts.output, SNAPSHOT_SCHEMA, "hashtag"); }
Example 3
Source File: EventIncrementerApp.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { EventIncrementer eventInc = dag.addOperator("eventInc", new EventIncrementer()); ArrayList<String> keys = new ArrayList<String>(2); ArrayList<Double> low = new ArrayList<Double>(2); ArrayList<Double> high = new ArrayList<Double>(2); keys.add("x"); keys.add("y"); low.add(1.0); low.add(1.0); high.add(100.0); high.add(100.0); eventInc.setKeylimits(keys, low, high); eventInc.setDelta(1); HashMapOperator hmapOper = dag.addOperator("hmapOper", new HashMapOperator()); dag.addStream("eventIncInput1", hmapOper.hmapList_data, eventInc.seed); dag.addStream("eventIncInput2", hmapOper.hmapMap_data, eventInc.increment); DevNull<HashMap<String, Integer>> dev1 = dag.addOperator("dev1", new DevNull()); DevNull<HashMap<String, String>> dev2 = dag.addOperator("dev2", new DevNull()); dag.addStream("eventIncOutput1", eventInc.count, dev1.data).setLocality(locality); dag.addStream("eventIncOutput2", eventInc.data, dev2.data).setLocality(locality); }
Example 4
Source File: JdbcPollerApplication.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { JdbcPOJOPollInputOperator poller = dag.addOperator("JdbcPoller", new JdbcPOJOPollInputOperator()); JdbcStore store = new JdbcStore(); poller.setStore(store); poller.setFieldInfos(addFieldInfos()); FileLineOutputOperator writer = dag.addOperator("Writer", new FileLineOutputOperator()); dag.setInputPortAttribute(writer.input, PortContext.PARTITION_PARALLEL, true); writer.setRotationWindows(60); dag.addStream("dbrecords", poller.outputPort, writer.input); }
Example 5
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { JsonGenerator generator = dag.addOperator("JsonGenerator", JsonGenerator.class); JsonParser parser = dag.addOperator("JsonParser", JsonParser.class); JsonFormatter formatter = dag.addOperator("JsonFormatter", JsonFormatter.class); ConsoleOutputOperator jsonString = dag.addOperator("JsonString", ConsoleOutputOperator.class); ConsoleOutputOperator jsonObject = dag.addOperator("JsonObject", ConsoleOutputOperator.class); ConsoleOutputOperator error = dag.addOperator("Error", ConsoleOutputOperator.class); dag.addStream("json", generator.out, parser.in); dag.addStream("pojo", parser.out, formatter.in); dag.addStream("jsonString", formatter.out, jsonString.input); dag.addStream("jsonObject", parser.parsedOutput, jsonObject.input); dag.addStream("error", parser.err, error.input); }
Example 6
Source File: Application.java From examples with Apache License 2.0 | 6 votes |
public void populateDAG(DAG dag, Configuration conf) { int tuplesCount = conf.getInt("dt.application.CassandraOutputApplication.prop.rowsCount", DEFAULT_WRITE_COUNT); PojoGenerator tuplesGenerator = new PojoGenerator(); tuplesGenerator.setTupleNum(tuplesCount); CassandraTransactionalStore transactionalStore = new CassandraTransactionalStore(); CassandraPOJOOutputOperator cassandraOutput = new CassandraPOJOOutputOperator(); cassandraOutput.setStore(transactionalStore); dag.addOperator("TuplesDataGenerator", tuplesGenerator); dag.addOperator("CassandraDataWriter", cassandraOutput); dag.addStream("tuplesToDatabase", tuplesGenerator.outputPort, cassandraOutput.input); }
Example 7
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public void populateDAG(DAG dag, Configuration conf) { FSRecordReaderModule recordReader = dag.addModule("lineInput", FSRecordReaderModule.class); S3BytesOutputModule s3StringOutputModule = dag.addModule("s3TupleOutput", S3BytesOutputModule.class); dag.addStream("data", recordReader.records, s3StringOutputModule.input); }
Example 8
Source File: S3RecordReaderMockTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public void populateDAG(DAG dag, Configuration conf) { FSRecordReaderModule recordReader = dag.addModule("S3RecordReaderModuleMock", FSRecordReaderModule.class); recordReader.setMode("FIXED_WIDTH_RECORD"); FixedWidthValidator validator = dag.addOperator("Validator", new FixedWidthValidator()); dag.addStream("records", recordReader.records, validator.data); }
Example 9
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
/** * This function sets up the DAG for calculating the average * * @param dag the DAG instance * @param conf the configuration instance * @return MachineInfoAveragingPrerequisitesOperator */ private MachineInfoAveragingPrerequisitesOperator addAverageCalculation(DAG dag, Configuration conf) { MachineInfoAveragingPrerequisitesOperator prereqAverageOper = dag.addOperator("Aggregator", MachineInfoAveragingPrerequisitesOperator.class); MachineInfoAveragingOperator averageOperator = dag.addOperator("AverageCalculator", MachineInfoAveragingOperator.class); RedisKeyValPairOutputOperator<MachineKey, Map<String, String>> redisAvgOperator = dag.addOperator("Persister", new RedisKeyValPairOutputOperator<MachineKey, Map<String, String>>()); dag.addStream("Average", averageOperator.outputPort, redisAvgOperator.input); SmtpOutputOperator smtpOutputOperator = dag.addOperator("Alerter", new SmtpOutputOperator()); dag.addStream("Aggregates", prereqAverageOper.outputPort, averageOperator.inputPort); dag.addStream("Alerts", averageOperator.smtpAlert, smtpOutputOperator.input); return prereqAverageOper; }
Example 10
Source File: TelecomHiveOutputOperatorTester.java From examples with Apache License 2.0 | 5 votes |
public void populateDAG(DAG dag, Configuration conf) { CallDetailRecordGenerateOperator generator = new CallDetailRecordGenerateOperator(); dag.addOperator("CDR-Generator", generator); TelecomHiveOutputOperator hiveOutput = new TelecomHiveOutputOperator(); hiveOutput.setFilePath(FILE_DIR); hiveOutput.setOutputFileName(FILE_NAME); hiveOutput.setMaxLength(1024 * 1024); hiveOutput.setFilePermission((short)511); dag.addOperator("hiveOutput", hiveOutput); dag.addStream("CDR-Stream", generator.cdrOutputPort, hiveOutput.input); TelecomHiveExecuteOperator hiveOperator = new TelecomHiveExecuteOperator(); { HiveStore hiveStore = createStore(null); hiveStore.setFilepath(FILE_DIR); hiveOperator.setHivestore(hiveStore); } hiveOperator.setHiveConfig(EnrichedCDRHiveConfig.instance()); hiveOperator.setTablename(tablemap); ArrayList<String> hivePartitionColumns = new ArrayList<String>(); hivePartitionColumns.add("dt"); hiveOperator.setHivePartitionColumns(hivePartitionColumns); dag.addOperator("hiveOperator", hiveOperator); dag.addStream("hiveCmd", hiveOutput.hiveCmdOutput, hiveOperator.input); }
Example 11
Source File: StramLocalClusterTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { TestGeneratorInputOperator input = dag.addOperator("Input", new TestGeneratorInputOperator()); test = dag.addOperator("Test", new DynamicLoader()); dag.addStream("S1", input.outport, test.input); dag.setAttribute(Context.DAGContext.LIBRARY_JARS, generatedJar); dag.setInputPortAttribute(test.input, Context.PortContext.TUPLE_CLASS, pojo); }
Example 12
Source File: ActiveMQApplication.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { JMSStringInputOperator amqInput = dag.addOperator("amqIn", new JMSStringInputOperator()); LineOutputOperator out = dag.addOperator("fileOut", new LineOutputOperator()); dag.addStream("data", amqInput.output, out.input); }
Example 13
Source File: ModuleAppTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { LOG.debug("Application - PopulateDAG"); DummyInputOperator dummyInputOperator = dag.addOperator("DummyInputOperator", new DummyInputOperator()); DummyOperatorAfterInput dummyOperatorAfterInput = dag.addOperator("DummyOperatorAfterInput", new DummyOperatorAfterInput()); Module m1 = dag.addModule("TestModule1", new TestModule()); Module m2 = dag.addModule("TestModule2", new TestModule()); DummyOutputOperator dummyOutputOperator = dag.addOperator("DummyOutputOperator", new DummyOutputOperator()); dag.addStream("Operator To Operator", dummyInputOperator.output, dummyOperatorAfterInput.input); dag.addStream("Operator To Module", dummyOperatorAfterInput.output, ((TestModule)m1).moduleInput); dag.addStream("Module To Module", ((TestModule)m1).moduleOutput, ((TestModule)m2).moduleInput); dag.addStream("Module To Operator", ((TestModule)m2).moduleOutput, dummyOutputOperator.input); }
Example 14
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration entries) { /* Generate random key-value pairs */ RandomKeysGenerator randGen = dag.addOperator("randomgen", new RandomKeysGenerator()); /* Initialize with three partition to start with */ // UniqueCount1 uniqCount = dag.addOperator("uniqevalue", new UniqueCount1()); UniqueCounter<Integer> uniqCount = dag.addOperator("uniqevalue", new UniqueCounter<Integer>()); MapToKeyHashValuePairConverter<Integer, Integer> converter = dag.addOperator("converter", new MapToKeyHashValuePairConverter()); uniqCount.setCumulative(false); dag.setAttribute(uniqCount, Context.OperatorContext.PARTITIONER, new StatelessPartitioner<UniqueCounter<Integer>>(3)); CountVerifier<Integer> verifier = dag.addOperator("verifier", new CountVerifier<Integer>()); StreamDuplicater<KeyHashValPair<Integer, Integer>> dup = dag.addOperator("dup", new StreamDuplicater<KeyHashValPair<Integer, Integer>>()); ConsoleOutputOperator output = dag.addOperator("output", new ConsoleOutputOperator()); ConsoleOutputOperator successOutput = dag.addOperator("successoutput", new ConsoleOutputOperator()); successOutput.setStringFormat("Success %d"); ConsoleOutputOperator failureOutput = dag.addOperator("failureoutput", new ConsoleOutputOperator()); failureOutput.setStringFormat("Failure %d"); // success and failure counters. Counter successcounter = dag.addOperator("successcounter", new Counter()); Counter failurecounter = dag.addOperator("failurecounter", new Counter()); dag.addStream("datain", randGen.outPort, uniqCount.data); dag.addStream("dataverification0", randGen.verificationPort, verifier.in1); dag.addStream("convert", uniqCount.count, converter.input).setLocality(Locality.THREAD_LOCAL); dag.addStream("split", converter.output, dup.data); dag.addStream("consoutput", dup.out1, output.input); dag.addStream("dataverification1", dup.out2, verifier.in2); dag.addStream("successc", verifier.successPort, successcounter.input); dag.addStream("failurec", verifier.failurePort, failurecounter.input); dag.addStream("succconsoutput", successcounter.output, successOutput.input); dag.addStream("failconsoutput", failurecounter.output, failureOutput.input); }
Example 15
Source File: csvParserApplication.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { AdDataGenerator dataGenerator = dag.addOperator("dataGenerator", new AdDataGenerator()); CsvParser parserOperator = dag.addOperator("csvParser", new CsvParser()); FileOutputOperator dataOutput = dag.addOperator("dataOutput", new FileOutputOperator()); FileOutputOperator errorOutput = dag.addOperator("errorOutput", new FileOutputOperator()); ConsoleOutputOperator consoleOutput = dag.addOperator("consoleOutput", new ConsoleOutputOperator()); dag.addStream("inputData", dataGenerator.out, parserOperator.in); dag.addStream("parsedData", parserOperator.parsedOutput, dataOutput.input); dag.addStream("errorData", parserOperator.err, errorOutput.input); dag.addStream("pojoData", parserOperator.out, consoleOutput.input); }
Example 16
Source File: ThroughputBasedApplication.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { ThroughputBasedReader reader = dag.addOperator("read", ThroughputBasedReader.class); BytesFileWriter writer = dag.addOperator("write", BytesFileWriter.class); dag.setInputPortAttribute(writer.input, PARTITION_PARALLEL, true); dag.setInputPortAttribute(writer.control, PARTITION_PARALLEL, true); dag.addStream("data", reader.output, writer.input); dag.addStream("ctrl", reader.control, writer.control); }
Example 17
Source File: AdsDimensionsDemoPerformant.java From examples with Apache License 2.0 | 4 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { //Declare operators InputItemGenerator input = dag.addOperator("InputGenerator", InputItemGenerator.class); DimensionsComputation<AdInfo, AdInfo.AdInfoAggregateEvent> dimensions = dag.addOperator("DimensionsComputation", new DimensionsComputation<AdInfo, AdInfo.AdInfoAggregateEvent>()); DimensionsComputationUnifierImpl<AdInfo, AdInfo.AdInfoAggregateEvent> unifier = new DimensionsComputationUnifierImpl<AdInfo, AdInfo.AdInfoAggregateEvent>(); dimensions.setUnifier(unifier); dag.getMeta(dimensions).getAttributes().put(Context.OperatorContext.APPLICATION_WINDOW_COUNT, 10); AdsConverter adsConverter = dag.addOperator("AdsConverter", new AdsConverter()); AppDataSingleSchemaDimensionStoreHDHT store = dag.addOperator("Store", AppDataSingleSchemaDimensionStoreHDHT.class); String eventSchema = SchemaUtils.jarResourceFileToString(EVENT_SCHEMA); input.setEventSchemaJSON(eventSchema); String[] dimensionSpecs = new String[] { "time=" + TimeUnit.MINUTES, "time=" + TimeUnit.MINUTES + ":location", "time=" + TimeUnit.MINUTES + ":advertiser", "time=" + TimeUnit.MINUTES + ":publisher", "time=" + TimeUnit.MINUTES + ":advertiser:location", "time=" + TimeUnit.MINUTES + ":publisher:location", "time=" + TimeUnit.MINUTES + ":publisher:advertiser", "time=" + TimeUnit.MINUTES + ":publisher:advertiser:location" }; //Set operator properties AdInfoAggregator[] aggregators = new AdInfoAggregator[dimensionSpecs.length]; //Set input properties input.setEventSchemaJSON(eventSchema); for (int index = 0; index < dimensionSpecs.length; index++) { String dimensionSpec = dimensionSpecs[index]; AdInfoAggregator aggregator = new AdInfoAggregator(); aggregator.init(dimensionSpec, index); aggregators[index] = aggregator; } unifier.setAggregators(aggregators); dimensions.setAggregators(aggregators); dag.getMeta(dimensions).getMeta(dimensions.output).getUnifierMeta().getAttributes().put(OperatorContext.MEMORY_MB, 8092); //Configuring the converter adsConverter.setEventSchemaJSON(eventSchema); adsConverter.setDimensionSpecs(dimensionSpecs); //Set store properties String basePath = Preconditions.checkNotNull(conf.get(PROP_STORE_PATH), "a base path should be specified in the properties.xml"); TFileImpl hdsFile = new TFileImpl.DTFileImpl(); basePath += Path.SEPARATOR + System.currentTimeMillis(); hdsFile.setBasePath(basePath); store.setFileStore(hdsFile); store.getResultFormatter().setContinuousFormatString("#.00"); store.setConfigurationSchemaJSON(eventSchema); PubSubWebSocketAppDataQuery wsIn = new PubSubWebSocketAppDataQuery(); store.setEmbeddableQueryInfoProvider(wsIn); PubSubWebSocketAppDataResult wsOut = dag.addOperator("QueryResult", new PubSubWebSocketAppDataResult()); //Set remaining dag options dag.setAttribute(store, Context.OperatorContext.COUNTERS_AGGREGATOR, new BasicCounters.LongAggregator<MutableLong>()); dag.addStream("InputStream", input.outputPort, dimensions.data).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("DimensionalData", dimensions.output, adsConverter.inputPort); dag.addStream("Converter", adsConverter.outputPort, store.input); dag.addStream("QueryResult", store.queryResult, wsOut.input); }
Example 18
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @Override public void populateDAG(DAG dag, Configuration conf) { try { URI duri = PubSubHelper.getURIWithDefault(dag, "localhost:9090"); PubSubWebSocketInputOperator userTxWsInput = getPubSubWebSocketInputOperator("userTxInput", dag, duri, "examples.app.frauddetect.submitTransaction"); PubSubWebSocketOutputOperator ccUserAlertWsOutput = getPubSubWebSocketOutputOperator("ccUserAlertQueryOutput", dag, duri, "examples.app.frauddetect.fraudAlert"); PubSubWebSocketOutputOperator avgUserAlertwsOutput = getPubSubWebSocketOutputOperator("avgUserAlertQueryOutput", dag, duri, "examples.app.frauddetect.fraudAlert"); PubSubWebSocketOutputOperator binUserAlertwsOutput = getPubSubWebSocketOutputOperator("binUserAlertOutput", dag, duri, "examples.app.frauddetect.fraudAlert"); PubSubWebSocketOutputOperator txSummaryWsOutput = getPubSubWebSocketOutputOperator("txSummaryWsOutput", dag, duri, "examples.app.frauddetect.txSummary"); SlidingWindowSumKeyVal<KeyValPair<MerchantKey, String>, Integer> smsOperator = dag.addOperator("movingSum", SlidingWindowSumKeyVal.class); MerchantTransactionGenerator txReceiver = dag.addOperator("txReceiver", MerchantTransactionGenerator.class); MerchantTransactionInputHandler txInputHandler = dag.addOperator("txInputHandler", new MerchantTransactionInputHandler()); BankIdNumberSamplerOperator binSampler = dag.addOperator("bankInfoFraudDetector", BankIdNumberSamplerOperator.class); MerchantTransactionBucketOperator txBucketOperator = dag.addOperator("txFilter", MerchantTransactionBucketOperator.class); RangeKeyVal rangeOperator = dag.addOperator("rangePerMerchant", new RangeKeyVal<MerchantKey, Long>()); SimpleMovingAverage<MerchantKey, Long> smaOperator = dag.addOperator("smaPerMerchant", SimpleMovingAverage.class); TransactionStatsAggregator txStatsAggregator = dag.addOperator("txStatsAggregator", TransactionStatsAggregator.class); AverageAlertingOperator avgAlertingOperator = dag.addOperator("avgAlerter", AverageAlertingOperator.class); CreditCardAmountSamplerOperator ccSamplerOperator = dag.addOperator("amountFraudDetector", CreditCardAmountSamplerOperator.class); HdfsStringOutputOperator hdfsOutputOperator = getHdfsOutputOperator("hdfsOutput", dag, "fraud"); MongoDBOutputOperator mongoTxStatsOperator = dag.addOperator("mongoTxStatsOutput", MongoDBOutputOperator.class); MongoDBOutputOperator mongoBinAlertsOperator = dag.addOperator("mongoBinAlertsOutput", MongoDBOutputOperator.class); MongoDBOutputOperator mongoCcAlertsOperator = dag.addOperator("mongoCcAlertsOutput", MongoDBOutputOperator.class); MongoDBOutputOperator mongoAvgAlertsOperator = dag.addOperator("mongoAvgAlertsOutput", MongoDBOutputOperator.class); dag.addStream("userTxStream", userTxWsInput.outputPort, txInputHandler.userTxInputPort); dag.addStream("transactions", txReceiver.txOutputPort, txBucketOperator.inputPort).setLocality(DAG.Locality.CONTAINER_LOCAL); dag.addStream("txData", txReceiver.txDataOutputPort, hdfsOutputOperator.input); // dump all tx into Hdfs dag.addStream("userTransactions", txInputHandler.txOutputPort, txBucketOperator.txUserInputPort); dag.addStream("bankInfoData", txBucketOperator.binCountOutputPort, smsOperator.data); dag.addStream("bankInfoCount", smsOperator.integerSum, binSampler.txCountInputPort); dag.addStream("filteredTransactions", txBucketOperator.txOutputPort, rangeOperator.data, smaOperator.data, avgAlertingOperator.txInputPort); KeyPartitionCodec<MerchantKey, Long> txCodec = new KeyPartitionCodec<MerchantKey, Long>(); dag.setInputPortAttribute(rangeOperator.data, Context.PortContext.STREAM_CODEC, txCodec); dag.setInputPortAttribute(smaOperator.data, Context.PortContext.STREAM_CODEC, txCodec); dag.setInputPortAttribute(avgAlertingOperator.txInputPort, Context.PortContext.STREAM_CODEC, txCodec); dag.addStream("creditCardData", txBucketOperator.ccAlertOutputPort, ccSamplerOperator.inputPort); dag.addStream("txnSummaryData", txBucketOperator.summaryTxnOutputPort, txSummaryWsOutput.input); dag.addStream("smaAlerts", smaOperator.doubleSMA, avgAlertingOperator.smaInputPort); dag.addStream("binAlerts", binSampler.countAlertOutputPort, mongoBinAlertsOperator.inputPort); dag.addStream("binAlertsNotification", binSampler.countAlertNotificationPort, binUserAlertwsOutput.input); dag.addStream("rangeData", rangeOperator.range, txStatsAggregator.rangeInputPort); dag.addStream("smaData", smaOperator.longSMA, txStatsAggregator.smaInputPort); dag.addStream("txStatsOutput", txStatsAggregator.txDataOutputPort, mongoTxStatsOperator.inputPort); dag.addStream("avgAlerts", avgAlertingOperator.avgAlertOutputPort, mongoAvgAlertsOperator.inputPort); dag.addStream("avgAlertsNotification", avgAlertingOperator.avgAlertNotificationPort, avgUserAlertwsOutput.input); dag.addStream("ccAlerts", ccSamplerOperator.ccAlertOutputPort, mongoCcAlertsOperator.inputPort); dag.addStream("ccAlertsNotification", ccSamplerOperator.ccAlertNotificationPort, ccUserAlertWsOutput.input); } catch (Exception exc) { DTThrowable.rethrow(exc); } }
Example 19
Source File: ApplicationWithQuerySupport.java From attic-apex-malhar with Apache License 2.0 | 4 votes |
/** * Populates the DAG with operators and connecting streams * * @param dag The directed acyclic graph of operators to populate * @param conf The configuration */ @Override public void populateDAG(DAG dag, Configuration conf) { // create operators LineReader lineReader = dag.addOperator("lineReader", new LineReader()); WordReader wordReader = dag.addOperator("wordReader", new WordReader()); WindowWordCount windowWordCount = dag.addOperator("windowWordCount", new WindowWordCount()); FileWordCount fileWordCount = dag.addOperator("fileWordCount", new FileWordCount()); WordCountWriter wcWriter = dag.addOperator("wcWriter", new WordCountWriter()); ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator()); console.setStringFormat("wordCount: %s"); // create streams dag.addStream("lines", lineReader.output, wordReader.input); dag.addStream("control", lineReader.control, fileWordCount.control); dag.addStream("words", wordReader.output, windowWordCount.input); dag.addStream("windowWordCounts", windowWordCount.output, fileWordCount.input); dag.addStream("fileWordCounts", fileWordCount.fileOutput, wcWriter.input); if (PubSubHelper.isGatewayConfigured(dag)) { // add query support URI uri = PubSubHelper.getURI(dag); AppDataSnapshotServerMap snapshotServerFile = dag.addOperator("snapshotServerFile", new AppDataSnapshotServerMap()); AppDataSnapshotServerMap snapshotServerGlobal = dag.addOperator("snapshotServerGlobal", new AppDataSnapshotServerMap()); String snapshotServerJSON = SchemaUtils.jarResourceFileToString(SNAPSHOT_SCHEMA); snapshotServerFile.setSnapshotSchemaJSON(snapshotServerJSON); snapshotServerGlobal.setSnapshotSchemaJSON(snapshotServerJSON); PubSubWebSocketAppDataQuery wsQueryFile = new PubSubWebSocketAppDataQuery(); PubSubWebSocketAppDataQuery wsQueryGlobal = new PubSubWebSocketAppDataQuery(); wsQueryFile.setUri(uri); wsQueryGlobal.setUri(uri); snapshotServerFile.setEmbeddableQueryInfoProvider(wsQueryFile); snapshotServerGlobal.setEmbeddableQueryInfoProvider(wsQueryGlobal); PubSubWebSocketAppDataResult wsResultFile = dag.addOperator("wsResultFile", new PubSubWebSocketAppDataResult()); PubSubWebSocketAppDataResult wsResultGlobal = dag.addOperator("wsResultGlobal", new PubSubWebSocketAppDataResult()); wsResultFile.setUri(uri); wsResultGlobal.setUri(uri); Operator.InputPort<String> queryResultFilePort = wsResultFile.input; Operator.InputPort<String> queryResultGlobalPort = wsResultGlobal.input; dag.addStream("WordCountsFile", fileWordCount.outputPerFile, snapshotServerFile.input, console.input); dag.addStream("WordCountsGlobal", fileWordCount.outputGlobal, snapshotServerGlobal.input); dag.addStream("ResultFile", snapshotServerFile.queryResult, queryResultFilePort); dag.addStream("ResultGlobal", snapshotServerGlobal.queryResult, queryResultGlobalPort); } else { //throw new RuntimeException("Error: No GATEWAY_CONNECT_ADDRESS"); dag.addStream("WordCounts", fileWordCount.outputPerFile, console.input); } LOG.info("done with populateDAG, isDebugEnabled = " + LOG.isDebugEnabled()); LOG.info("Returning from populateDAG"); }
Example 20
Source File: ApplicationWithDerbySQL.java From attic-apex-malhar with Apache License 2.0 | 4 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { String symbolStr = conf.get(ApplicationWithDerbySQL.class.getName() + ".tickerSymbols", "AABA,GOOG,AAPL,FB,AMZN,NFLX,IBM"); String[] symbols = symbolStr.split(","); YahooFinanceCSVInputOperator input1 = dag.addOperator("input1", new YahooFinanceCSVInputOperator()); YahooFinanceCSVInputOperator input2 = dag.addOperator("input2", new YahooFinanceCSVInputOperator()); DerbySqlStreamOperator sqlOper = dag.addOperator("sqlOper", new DerbySqlStreamOperator()); ConsoleOutputOperator consoleOperator = dag.addOperator("console", new ConsoleOutputOperator()); for (String symbol : symbols) { input1.addSymbol(symbol); input2.addSymbol(symbol); } input1.addFormat("s0"); input1.addFormat("l1"); input2.addFormat("s0"); input2.addFormat("e0"); input2.addFormat("b4"); AbstractSqlStreamOperator.InputSchema inputSchema1 = new AbstractSqlStreamOperator.InputSchema("t1"); AbstractSqlStreamOperator.InputSchema inputSchema2 = new AbstractSqlStreamOperator.InputSchema("t2"); inputSchema1.setColumnInfo("s0", "varchar(100)", true); // symbol inputSchema1.setColumnInfo("l1", "float", false); // last trade inputSchema2.setColumnInfo("s0", "varchar(100)", true); // symbol inputSchema2.setColumnInfo("e0", "float", false); // EPS inputSchema2.setColumnInfo("b4", "float", false); // Book value sqlOper.setInputSchema(0, inputSchema1); sqlOper.setInputSchema(1, inputSchema2); // Calculate PE Ratio and PB Ratio using SQL sqlOper.addExecStatementString("SELECT SESSION.t1.s0 AS symbol, SESSION.t1.l1 / SESSION.t2.e0 AS pe_ratio, SESSION.t1.l1 / SESSION.t2.b4 AS pb_ratio FROM SESSION.t1,SESSION.t2 WHERE SESSION.t1.s0 = SESSION.t2.s0"); dag.addStream("input1_sql", input1.outputPort, sqlOper.in1); dag.addStream("input2_sql", input2.outputPort, sqlOper.in2); dag.addStream("result_console", sqlOper.result, consoleOperator.input); }