Java Code Examples for org.apache.commons.lang3.time.StopWatch#stop()

The following examples show how to use org.apache.commons.lang3.time.StopWatch#stop() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ElasticSearchServer.java    From vind with Apache License 2.0 6 votes vote down vote up
@Override
public GetResult execute(RealTimeGet search, DocumentFactory assets) {
    try {
        final StopWatch elapsedTime = StopWatch.createStarted();
        final MultiGetResponse response = elasticSearchClient.realTimeGet(search.getValues());
        elapsedTime.stop();

        if(response!=null){
            return ResultUtils.buildRealTimeGetResult(response, search, assets, elapsedTime.getTime()).setElapsedTime(elapsedTime.getTime());
        }else {
            log.error("Null result from ElasticClient");
            throw new SearchServerException("Null result from ElasticClient");
        }

    } catch (ElasticsearchException | IOException e) {
        log.error("Cannot execute realTime get query");
        throw new SearchServerException("Cannot execute realTime get query", e);
    }
}
 
Example 2
Source File: ElasticSearchServer.java    From vind with Apache License 2.0 6 votes vote down vote up
private IndexResult indexSingleDocument(Document doc, int withinMs) {
    log.warn("Parameter 'within' not in use in elastic search backend");
    final StopWatch elapsedTime = StopWatch.createStarted();
    final Map<String,Object> document = DocumentUtil.createInputDocument(doc);

    try {
        if (elasticClientLogger.isTraceEnabled()) {
            elasticClientLogger.debug(">>> add({})", doc.getId());
        } else {
            elasticClientLogger.debug(">>> add({})", doc.getId());
        }

        final BulkResponse response = this.elasticSearchClient.add(document);
        elapsedTime.stop();
        return new IndexResult(response.getTook().getMillis()).setElapsedTime(elapsedTime.getTime());

    } catch (ElasticsearchException | IOException e) {
        log.error("Cannot index document {}", document.get(FieldUtil.ID) , e);
        throw new SearchServerException("Cannot index document", e);
    }
}
 
Example 3
Source File: AbstractTimerTask.java    From chronus with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    try {
        ServiceContext serviceContext = ServiceContext.getContext();
        serviceContext.setRequestNo(ServiceContext.genUniqueId());
        ContextLog4j2Util.addContext2ThreadContext();

        this.process();
    } catch (Throwable e) {
        log.error("{}:{} error", this.type, this.name, e);
    } finally {
        stopWatch.stop();
        log.info("{}:{} 执行完成,耗时:{}ms", this.type, this.name, stopWatch.getTime());
    }
}
 
Example 4
Source File: BaseUpdateChannelCommand.java    From uyuni with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Private helper method to create a new UpdateErrataCacheEvent and publish it to the
 * MessageQueue.
 * @param orgIn The org we're updating.
 */
private void publishUpdateErrataCacheEvent() {
    StopWatch sw = new StopWatch();
    if (log.isDebugEnabled()) {
        log.debug("Updating errata cache");
        sw.start();
    }

    UpdateErrataCacheEvent uece =
        new UpdateErrataCacheEvent(UpdateErrataCacheEvent.TYPE_ORG);
    uece.setOrgId(user.getOrg().getId());
    MessageQueue.publish(uece);

    if (log.isDebugEnabled()) {
        sw.stop();
        log.debug("Finished Updating errata cache. Took [" +
                sw.getTime() + "]");
    }
}
 
Example 5
Source File: MetaMinerExample.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main(final String[] args) throws Exception {
	// Load data for a data set and create a train-test-split
	logger.info("Load data.");
	WekaInstances data = new WekaInstances(OpenMLDatasetReader.deserializeDataset(40984));
	List<IWekaInstances> split = WekaUtil.getStratifiedSplit(data, 0, .7f);

	// Initialize meta mlplan and let it run for 2 minutes
	logger.info("Configure ML-Plan");
	MetaMLPlan metaMLPlan = new MetaMLPlan(data);
	metaMLPlan.setCPUs(4);
	metaMLPlan.setTimeOutInSeconds(60);
	metaMLPlan.setMetaFeatureSetName("all");
	metaMLPlan.setDatasetSetName("metaminer_standard");

	// Limit results to 20 pipelines so that the conversion / downloading doesn't take too long
	logger.info("Build meta components");
	StopWatch watch = new StopWatch();
	watch.start();
	metaMLPlan.buildMetaComponents(args[0], args[1], args[2], 5);
	watch.stop();
	logger.info("Find solution");
	metaMLPlan.buildClassifier(split.get(0).getInstances());

	// Evaluate solution produced by meta mlplan
	logger.info("Evaluate.");
	double score = MLEvaluationUtil.getLossForTrainedClassifier(new WekaClassifier(metaMLPlan), split.get(1), EClassificationPerformanceMeasure.ERRORRATE);
	logger.info("Error Rate of the solution produced by Meta ML-Plan: {}", score);
	logger.info("Time in Seconds: {}",watch.getTime()/1000);
}
 
Example 6
Source File: TestBulkWithout.java    From sqlg with MIT License 5 votes vote down vote up
@Test
public void testBulkWithinMultipleHasContainers() throws InterruptedException {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    Vertex god = this.sqlgGraph.addVertex(T.label, "God");
    Vertex person1 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 1, "name", "pete");
    god.addEdge("creator", person1);
    Vertex person2 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 2, "name", "pete");
    god.addEdge("creator", person2);
    Vertex person3 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 3, "name", "john");
    god.addEdge("creator", person3);
    Vertex person4 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 4, "name", "pete");
    god.addEdge("creator", person4);
    Vertex person5 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 5, "name", "pete");
    god.addEdge("creator", person5);
    Vertex person6 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 6, "name", "pete");
    god.addEdge("creator", person6);
    Vertex person7 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 7, "name", "pete");
    god.addEdge("creator", person7);
    Vertex person8 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 8, "name", "pete");
    god.addEdge("creator", person8);
    Vertex person9 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 9, "name", "pete");
    god.addEdge("creator", person9);
    Vertex person10 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 10, "name", "pete");
    god.addEdge("creator", person10);

    this.sqlgGraph.tx().commit();
    stopWatch.stop();
    System.out.println(stopWatch.toString());
    stopWatch.reset();
    stopWatch.start();
    testBulkWithinMultipleHasContainers_assert(this.sqlgGraph);
    if (this.sqlgGraph1 != null) {
        Thread.sleep(SLEEP_TIME);
        testBulkWithinMultipleHasContainers_assert(this.sqlgGraph1);
    }
    stopWatch.stop();
    System.out.println(stopWatch.toString());
}
 
Example 7
Source File: NetSuiteClientServiceIT.java    From components with Apache License 2.0 5 votes vote down vote up
@Test
public void testRetrieveCustomRecordTypes() throws Exception {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    Collection<RecordTypeInfo> recordTypes = connection.getMetaDataSource().getRecordTypes();
    stopWatch.stop();

    for (RecordTypeInfo recordType : recordTypes) {
        logger.debug("Record type info: {}", recordType);
    }
}
 
Example 8
Source File: NodeComponent.java    From liteFlow with Apache License 2.0 5 votes vote down vote up
public void execute() throws Exception{
	Slot slot = this.getSlot();
	LOG.info("[{}]:[O]start component[{}] execution",slot.getRequestId(),this.getClass().getSimpleName());
	slot.addStep(new CmpStep(nodeId, CmpStepType.START));
	StopWatch stopWatch = new StopWatch();
	stopWatch.start();

	process();

	stopWatch.stop();
	long timeSpent = stopWatch.getTime();

	slot.addStep(new CmpStep(nodeId, CmpStepType.END));

	//性能统计
	CompStatistics statistics = new CompStatistics();
	statistics.setComponentClazzName(this.getClass().getSimpleName());
	statistics.setTimeSpent(timeSpent);
	MonitorBus.load().addStatistics(statistics);


	if(this instanceof NodeCondComponent){
		String condNodeId = slot.getCondResult(this.getClass().getName());
		if(StringUtils.isNotBlank(condNodeId)){
			Node thisNode = FlowBus.getNode(nodeId);
			Node condNode = thisNode.getCondNode(condNodeId);
			if(condNode != null){
				NodeComponent condComponent = condNode.getInstance();
				condComponent.setSlotIndex(slotIndexTL.get());
				condComponent.execute();
			}
		}
	}

	LOG.debug("[{}]:componnet[{}] finished in {} milliseconds",slot.getRequestId(),this.getClass().getSimpleName(),timeSpent);
}
 
Example 9
Source File: AeronNDArraySerdeTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void timeOldVsNew() throws Exception {
    int numTrials = 1000;
    long oldTotal = 0;
    long newTotal = 0;
    INDArray arr = Nd4j.create(100000);
    Nd4j.getCompressor().compressi(arr, "GZIP");
    for (int i = 0; i < numTrials; i++) {
        StopWatch oldStopWatch = new StopWatch();
        BufferedOutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream((int) arr.length()));
        DataOutputStream dos = new DataOutputStream(bos);
        oldStopWatch.start();
        Nd4j.write(arr, dos);
        oldStopWatch.stop();
        // System.out.println("Old " + oldStopWatch.getNanoTime());
        oldTotal += oldStopWatch.getNanoTime();
        StopWatch newStopWatch = new StopWatch();
        newStopWatch.start();
        AeronNDArraySerde.toBuffer(arr);
        newStopWatch.stop();
        //  System.out.println("New " + newStopWatch.getNanoTime());
        newTotal += newStopWatch.getNanoTime();

    }

    oldTotal /= numTrials;
    newTotal /= numTrials;
    System.out.println("Old avg " + oldTotal + " New avg " + newTotal);

}
 
Example 10
Source File: SpecialPriceTokenGenerator.java    From alf.io with GNU General Public License v3.0 5 votes vote down vote up
public void generatePendingCodes() {
    StopWatch stopWatch = new StopWatch();
    log.trace("start pending codes generation");
    stopWatch.start();
    specialPriceRepository.findWaitingElements().forEach(this::generateCode);
    stopWatch.stop();
    log.trace("end. Took {} ms", stopWatch.getTime());
}
 
Example 11
Source File: TelemetryServiceIT.java    From snowflake-jdbc with Apache License 2.0 5 votes vote down vote up
@Ignore
@Test
public void stressTestCreateUrgentEvent()
{
  // this log will be delivered to snowflake
  TelemetryService service = TelemetryService.getInstance();
  // send one http request for each event
  StopWatch sw = new StopWatch();
  sw.start();
  int rate = 1;
  int sent = 0;
  int duration = 5;
  while (sw.getTime() < duration * 1000)
  {
    int toSend = (int) (sw.getTime() / 1000) * rate - sent;
    for (int i = 0; i < toSend; i++)
    {
      TelemetryEvent log = new TelemetryEvent.LogBuilder()
          .withName("StressUrgentTestLog")
          .withValue("This is an example urgent log for stress test " + sent)
          .build();
      System.out.println("stress test: " + sent++ + " sent.");
      service.report(log);
    }
  }
  sw.stop();
}
 
Example 12
Source File: TelemetryServiceIT.java    From snowflake-jdbc with Apache License 2.0 5 votes vote down vote up
@Ignore
@Test
public void stressTestCreateLog()
{
  // this log will be delivered to snowflake
  TelemetryService service = TelemetryService.getInstance();
  // send one http request for each event
  StopWatch sw = new StopWatch();
  sw.start();
  int rate = 50;
  int sent = 0;
  int duration = 60;
  while (sw.getTime() < duration * 1000)
  {
    int toSend = (int) (sw.getTime() / 1000) * rate - sent;
    for (int i = 0; i < toSend; i++)
    {
      TelemetryEvent log = new TelemetryEvent.LogBuilder()
          .withName("StressTestLog")
          .withValue("This is an example log for stress test " + sent)
          .build();
      System.out.println("stress test: " + sent++ + " sent.");
      service.report(log);
    }
  }
  sw.stop();
}
 
Example 13
Source File: GoLoaderIntegrationRunner.java    From owltools with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Test
@Ignore("This test requires a missing resource.")
public void testLoadGoTaxon() throws Exception {
	ParserWrapper pw = new ParserWrapper();
	pw.addIRIMapper(new CatalogXmlIRIMapper(catalogXml));
	OWLGraphWrapper g = pw.parseToOWLGraph(goFile);
	
	GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
	GafSolrDocumentLoaderIntegrationRunner.gc();
	GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
	
	ConfigManager configManager = new ConfigManager();
	configManager.add("src/test/resources/test-ont-config.yaml");
	
	StopWatch watch = new StopWatch();
	watch.start();
	FlexCollection c = new FlexCollection(configManager, g);
	
	GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
	GafSolrDocumentLoaderIntegrationRunner.gc();
	GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
	
	FlexSolrDocumentLoader loader = new FlexSolrDocumentLoader((SolrServer)null, c) {

		@Override
		protected void addToServer(Collection<SolrInputDocument> docs)
				throws SolrServerException, IOException {
			solrCounter += docs.size();
		}
		
	};
	loader.load();
	watch.stop();
	GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
	System.out.println("Loaded "+solrCounter+" Solr docs in "+watch);
	assertTrue(solrCounter > 0);
}
 
Example 14
Source File: AeronNDArraySerdeTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void timeOldVsNew() throws Exception {
    int numTrials = 1000;
    long oldTotal = 0;
    long newTotal = 0;
    INDArray arr = Nd4j.create(100000);
    Nd4j.getCompressor().compressi(arr, "GZIP");
    for (int i = 0; i < numTrials; i++) {
        StopWatch oldStopWatch = new StopWatch();
        // FIXME: int cast
        BufferedOutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream((int) arr.length()));
        DataOutputStream dos = new DataOutputStream(bos);
        oldStopWatch.start();
        Nd4j.write(arr, dos);
        oldStopWatch.stop();
        // System.out.println("Old " + oldStopWatch.getNanoTime());
        oldTotal += oldStopWatch.getNanoTime();
        StopWatch newStopWatch = new StopWatch();
        newStopWatch.start();
        AeronNDArraySerde.toBuffer(arr);
        newStopWatch.stop();
        //  System.out.println("New " + newStopWatch.getNanoTime());
        newTotal += newStopWatch.getNanoTime();

    }

    oldTotal /= numTrials;
    newTotal /= numTrials;
    System.out.println("Old avg " + oldTotal + " New avg " + newTotal);

}
 
Example 15
Source File: DBPopulatorRunner.java    From gazpachoquest with GNU General Public License v3.0 5 votes vote down vote up
public static void main(final String... args) {
    ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("dbpopulator-datasource-context.xml",
            "dbpopulator-jpa-context.xml", "services-context.xml", "facades-context.xml", "components-context.xml");
    DBPopulator populator = ctx.getBean(DBPopulator.class);
    StopWatch stopWatch = new StopWatch();
    logger.info("Populating database");
    stopWatch.start();
    ShiroLogin.login();
    populator.populate();
    stopWatch.stop();
    ShiroLogin.logout();
    logger.info("Database populated in {} {}", stopWatch.getTime(), "ms");

    ctx.close();
}
 
Example 16
Source File: AmpStage.java    From RAMPART with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Dispatches amp stage to the specified environments
 *
 * @param executionContext The environment to dispatch jobs to
 * @throws ProcessExecutionException Thrown if there is an issue during execution of an external process
 * @throws InterruptedException Thrown if user has interrupted the process during execution
 */
@Override
public ExecutionResult execute(ExecutionContext executionContext) throws ProcessExecutionException, InterruptedException {

    try {

        StopWatch stopWatch = new StopWatch();
        stopWatch.start();

        // Make a shortcut to the args
        Args args = this.getArgs();

        log.info("Starting AMP stage " + args.getIndex());

        // Make sure reads file exists
        if (args.getInputAssembly() == null || !args.getInputAssembly().exists()) {
            throw new IOException("Input file for stage: " + args.getIndex() + " does not exist: " +
                    (args.getInputAssembly() == null ? "null" : args.getInputAssembly().getAbsolutePath()));
        }

        // Make sure the inputs are reasonable
        List<Library> selectedLibs = this.validateInputs(args.getIndex(), args.getInputs(), args.getSample());

        // Create output directory
        if (!args.getOutputDir().exists()) {
            args.getOutputDir().mkdir();
        }

        // Create the configuration for this stage
        AssemblyEnhancer ampProc = this.makeStage(args, selectedLibs);

        // Set a suitable execution context
        ExecutionContext ecCopy = executionContext.copy();
        ecCopy.setContext("AMP-" + args.getIndex(), true, new File(args.getOutputDir(), "amp-" + args.getIndex() + ".log"));
        if (ecCopy.usingScheduler()) {
            ecCopy.getScheduler().getArgs().setThreads(args.getThreads());
            ecCopy.getScheduler().getArgs().setMemoryMB(args.getMemory());
        }

        // Do any setup for this process
        ampProc.setup();

        // Execute the AMP stage
        ExecutionResult result = ampProc.execute(ecCopy);

        if (!ampProc.getOutputFile().exists()) {
            throw new ProcessExecutionException(2, "AMP stage " + args.index + "\" did not produce an output file");
        }

        // Create links for outputs from this assembler to known locations
        this.getConanProcessService().createLocalSymbolicLink(ampProc.getOutputFile(), args.getOutputFile());

        stopWatch.stop();

        log.info("Finished AMP stage " + args.getIndex());

        return new DefaultExecutionResult(
                Integer.toString(args.index) + "-" + args.tool,
                0,
                result.getOutput(),
                null,
                -1,
                new ResourceUsage(result.getResourceUsage() == null ? 0 : result.getResourceUsage().getMaxMem(),
                        stopWatch.getTime() / 1000,
                        result.getResourceUsage() == null ? 0 : result.getResourceUsage().getCpuTime()));
    }
    catch (IOException | ConanParameterException e) {
        throw new ProcessExecutionException(-1, e);
    }
}
 
Example 17
Source File: TestBatchedStreaming.java    From sqlg with MIT License 4 votes vote down vote up
@Test
public void testStreamingWithBatchSize() throws InterruptedException {
    int BATCH_SIZE = 100;
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    LinkedHashMap<String, Object> properties = new LinkedHashMap<>();
    this.sqlgGraph.tx().streamingWithLockBatchModeOn();
    List<Pair<Vertex, Vertex>> uids = new ArrayList<>();
    String uuidCache1 = null;
    String uuidCache2 = null;
    for (int i = 1; i <= 1000; i++) {
        String uuid1 = UUID.randomUUID().toString();
        String uuid2 = UUID.randomUUID().toString();
        if (i == 50) {
            uuidCache1 = uuid1;
            uuidCache2 = uuid2;
        }
        properties.put("id", uuid1);
        Vertex v1 = this.sqlgGraph.addVertex("Person", properties);
        properties.put("id", uuid2);
        Vertex v2 = this.sqlgGraph.addVertex("Person", properties);
        uids.add(Pair.of(v1, v2));
        if (i % (BATCH_SIZE / 2) == 0) {
            this.sqlgGraph.tx().flush();
            this.sqlgGraph.tx().streamingWithLockBatchModeOn();
            for (Pair<Vertex, Vertex> uid : uids) {
                uid.getLeft().addEdge("friend", uid.getRight());
            }
            //This is needed because the number of edges are less than the batch size so it will not be auto flushed
            this.sqlgGraph.tx().flush();
            uids.clear();
            this.sqlgGraph.tx().streamingWithLockBatchModeOn();
        }
    }
    this.sqlgGraph.tx().commit();
    stopWatch.stop();
    System.out.println(stopWatch.toString());

    testStreamingWithBatchSize(this.sqlgGraph, uuidCache1, uuidCache2);
    if (this.sqlgGraph1 != null) {
        Thread.sleep(1000);
        testStreamingWithBatchSize(this.sqlgGraph1, uuidCache1, uuidCache2);
    }
}
 
Example 18
Source File: HMLGradientBoostingTest.java    From pyramid with Apache License 2.0 4 votes vote down vote up
/**
     * add 2 fake labels in spam data set,
     * if x=spam and x_0<0.1, also label it as 2
     * if x=spam and x_1<0.1, also label it as 3
     * @throws Exception
     */
    static void test3_build() throws Exception{


        ClfDataSet singleLabeldataSet = TRECFormat.loadClfDataSet(new File(DATASETS,"spam/trec_data/train.trec"),
                DataSetType.CLF_DENSE, true);
        int numDataPoints = singleLabeldataSet.getNumDataPoints();
        int numFeatures = singleLabeldataSet.getNumFeatures();
        MultiLabelClfDataSet dataSet = MLClfDataSetBuilder.getBuilder()
                .numDataPoints(numDataPoints).numFeatures(numFeatures)
                .numClasses(4).build();
        int[] labels = singleLabeldataSet.getLabels();
        for (int i=0;i<numDataPoints;i++){
            dataSet.addLabel(i,labels[i]);
            if (labels[i]==1 && singleLabeldataSet.getRow(i).get(0)<0.1){
                dataSet.addLabel(i,2);
            }
            if (labels[i]==1 && singleLabeldataSet.getRow(i).get(1)<0.1){
                dataSet.addLabel(i,3);
            }
            for (int j=0;j<numFeatures;j++){
                double value = singleLabeldataSet.getRow(i).get(j);
                dataSet.setFeatureValue(i,j,value);
            }
        }


        List<String> extLabels = new ArrayList<>();
        extLabels.add("non_spam");
        extLabels.add("spam");
        extLabels.add("fake2");
        extLabels.add("fake3");
        LabelTranslator labelTranslator = new LabelTranslator(extLabels);
        dataSet.setLabelTranslator(labelTranslator);

        List<MultiLabel> assignments = new ArrayList<>();
        assignments.add(new MultiLabel().addLabel(0));
        assignments.add(new MultiLabel().addLabel(1));
        assignments.add(new MultiLabel().addLabel(1).addLabel(2));
        assignments.add(new MultiLabel().addLabel(1).addLabel(3));
        assignments.add(new MultiLabel().addLabel(1).addLabel(2).addLabel(3));
        HMLGradientBoosting boosting = new HMLGradientBoosting(4,assignments);


        HMLGBConfig trainConfig = new HMLGBConfig.Builder(dataSet)
                .numLeaves(2).learningRate(0.1).numSplitIntervals(1000).minDataPerLeaf(2)
                .dataSamplingRate(1).featureSamplingRate(1).build();
        System.out.println(Arrays.toString(trainConfig.getActiveFeatures()));


        HMLGBTrainer trainer = new HMLGBTrainer(trainConfig,boosting);

        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        for (int round =0;round<100;round++){
            System.out.println("round="+round);
            trainer.iterate();
            System.out.println("accuracy="+Accuracy.accuracy(boosting,dataSet));
//            System.out.println(Arrays.toString(boosting.getGradients(0)));
//            System.out.println(Arrays.toString(boosting.getGradients(1)));
//            System.out.println(Arrays.toString(boosting.getGradients(2)));
//            System.out.println(Arrays.toString(boosting.getGradients(3)));

        }
        stopWatch.stop();
        System.out.println(stopWatch);
//        System.out.println(boosting);
        for (int i=0;i<numDataPoints;i++){
            Vector featureRow = dataSet.getRow(i);
            MultiLabel label = dataSet.getMultiLabels()[i];
            MultiLabel prediction = boosting.predict(featureRow);
//            System.out.println("label="+label);
//            System.out.println(boosting.calAssignmentScores(featureRow,assignments.get(0)));
//            System.out.println(boosting.calAssignmentScores(featureRow,assignments.get(1)));
//            System.out.println("prediction="+prediction);
            if (!label.equals(prediction)){
                System.out.println(i);
                System.out.println("label="+label);
                System.out.println("prediction="+prediction);
            }
        }
        System.out.println("accuracy");
        System.out.println(Accuracy.accuracy(boosting,dataSet));
        boosting.serialize(new File(TMP,"/hmlgb/boosting.ser"));


    }
 
Example 19
Source File: ElasticSearchServer.java    From vind with Apache License 2.0 4 votes vote down vote up
@Override
public SearchResult execute(FulltextSearch search, DocumentFactory factory) {
    final StopWatch elapsedtime = StopWatch.createStarted();
    final SearchSourceBuilder query = ElasticQueryBuilder.buildQuery(search, factory);

    //query
    try {
        elasticClientLogger.debug(">>> query({})", query.toString());
        final SearchResponse response = elasticSearchClient.query(query);
        if(Objects.nonNull(response)
                && Objects.nonNull(response.getHits())
                && Objects.nonNull(response.getHits().getHits())){
            //TODO: if nested doc search is implemented
            //final Map<String,Integer> childCounts = SolrUtils.getChildCounts(response);

            final List<Document> documents = Arrays.stream(response.getHits().getHits())
                    .map(hit -> DocumentUtil.buildVindDoc(hit, factory, search.getSearchContext()))
                    .collect(Collectors.toList());

            String spellcheckText = null;
            if ( search.isSpellcheck() && CollectionUtils.isEmpty(documents)) {

                //if no results, try spellchecker (if defined and if spellchecked query differs from original)
                final List<String> spellCheckedQuery = ElasticQueryBuilder.getSpellCheckedQuery(response);

                //query with checked query

                if(spellCheckedQuery != null && CollectionUtils.isNotEmpty(spellCheckedQuery)) {
                    final Iterator<String> iterator = spellCheckedQuery.iterator();
                    while(iterator.hasNext()) {
                        final String text = iterator.next();
                        final SearchSourceBuilder spellcheckQuery =
                                ElasticQueryBuilder.buildQuery(search.text(text).spellcheck(false), factory);
                        final SearchResponse spellcheckResponse = elasticSearchClient.query(spellcheckQuery);
                        documents.addAll(Arrays.stream(spellcheckResponse.getHits().getHits())
                                .map(hit -> DocumentUtil.buildVindDoc(hit, factory, search.getSearchContext()))
                                .collect(Collectors.toList()));
                    }
                }
            }

            // Building Vind Facet Results
            final FacetResults facetResults =
                    ResultUtils.buildFacetResults(
                            response.getAggregations(),
                            factory,
                            search.getFacets(),
                            search.getSearchContext());

            elapsedtime.stop();

            final long totalHits = response.getHits().getTotalHits().value;
            switch(search.getResultSet().getType()) {
                case page:{
                    return new PageResult(totalHits, response.getTook().getMillis(), documents, search, facetResults, this, factory).setElapsedTime(elapsedtime.getTime());
                }
                case slice: {
                    return new SliceResult(totalHits, response.getTook().getMillis(), documents, search, facetResults, this, factory).setElapsedTime(elapsedtime.getTime());
                }
                default:
                    return new PageResult(totalHits, response.getTook().getMillis(), documents, search, facetResults, this, factory).setElapsedTime(elapsedtime.getTime());
            }
        }else {
            throw new ElasticsearchException("Empty result from ElasticClient");
        }

    } catch (ElasticsearchException | IOException e) {
        throw new SearchServerException(String.format("Cannot issue query: %s",e.getMessage()), e);
    }
}
 
Example 20
Source File: Select.java    From RAMPART with GNU General Public License v3.0 3 votes vote down vote up
@Override
public TaskResult executeSample(Mecq.Sample sample, ExecutionContext executionContext) throws ProcessExecutionException, InterruptedException, IOException {


    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    Args args = this.getArgs();


    List<ExecutionResult> results = new ArrayList<>();

    // If we have a reference run quast on it to get some stats
    if (args.getOrganism().getReference() != null && args.getOrganism().getReference().getPath() != null) {

        List<File> inputFiles = new ArrayList<>();
        inputFiles.add(args.getOrganism().getReference().getPath());
        QuastV23.Args refArgs = new QuastV23.Args();
        refArgs.setInputFiles(inputFiles);
        refArgs.setFindGenes(true);
        refArgs.setThreads(args.getThreads());
        refArgs.setEukaryote(args.getOrganism().getPloidy() > 1);
        refArgs.setOutputDir(args.getRefOutDir(sample));

        QuastV23 refQuast = new QuastV23(this.conanExecutorService, refArgs);

        results.add(this.conanExecutorService.executeProcess(refQuast, args.getRefOutDir(sample), args.jobPrefix + "-refquast", args.getThreads(), args.getMemoryMb(), args.runParallel));
    }

    stopWatch.stop();

    return new DefaultTaskResult(sample.name + "-" + args.stage.getOutputDirName(), true, results, stopWatch.getTime() / 1000L);
}