org.quartz.JobExecutionException Java Examples

The following examples show how to use org.quartz.JobExecutionException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GlassJobListener.java    From quartz-glass with Apache License 2.0 6 votes vote down vote up
@Override
public void jobWasExecuted(JobExecutionContext context, JobExecutionException exception) {
    JobExecution execution = CurrentJobExecution.get();

    if (exception != null) {
        execution.error();

        JobDetail jobDetail = context.getJobDetail();
        JobLogs.error("Exception occurred while executing job "
                + Jobs.jobCass(jobDetail).getName(), exception);
    }

    executions.jobEnds(execution, context);

    JobLogs.setDefaultLevel();
    CurrentJobExecution.unset();

    CurrentJobExecutionContext.unset();
}
 
Example #2
Source File: NativeJob.java    From AsuraFramework with Apache License 2.0 6 votes vote down vote up
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();
    
    String command = data.getString(PROP_COMMAND);

    String parameters = data.getString(PROP_PARAMETERS);

    if (parameters == null) {
        parameters = "";
    }

    boolean wait = true;
    if(data.containsKey(PROP_WAIT_FOR_PROCESS)) {
        wait = data.getBooleanValue(PROP_WAIT_FOR_PROCESS);
    }
    boolean consumeStreams = false;
    if(data.containsKey(PROP_CONSUME_STREAMS)) {
        consumeStreams = data.getBooleanValue(PROP_CONSUME_STREAMS);
    }
        
    Integer exitCode = this.runNativeCommand(command, parameters, wait, consumeStreams);
    context.setResult(exitCode);
    
}
 
Example #3
Source File: FeedsScheduleJob.java    From rebuild with GNU General Public License v3.0 6 votes vote down vote up
@Override
protected void executeInternalSafe() throws JobExecutionException {
    Calendar time = CalendarUtils.getInstance();
    time.set(Calendar.SECOND, 0);
    time.set(Calendar.MILLISECOND, 0);

    Object[][] array = Application.createQueryNoFilter(
            "select createdBy,feedsId,content,contentMore from Feeds where scheduleTime = ? and type = ?")
            .setParameter(1, time.getTime())
            .setParameter(2, FeedsType.SCHEDULE.getMask())
            .array();

    if (array.length > 0) {
        doInternal(array);
    }
}
 
Example #4
Source File: NodeCleanupJob.java    From alfresco-repository with GNU Lesser General Public License v3.0 6 votes vote down vote up
public void execute(JobExecutionContext context) throws JobExecutionException
{
    JobDataMap jobData = context.getJobDetail().getJobDataMap();
    // extract the content Cleanup to use
    Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker");
    if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker))
    {
        throw new AlfrescoRuntimeException(
                "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference");
    }
    NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj;
    List<String> cleanupLog = nodeCleanupWorker.doClean();
    // Done
    if (logger.isDebugEnabled())
    {
        logger.debug("Node cleanup log:");
        for (String log : cleanupLog)
        {
            logger.debug(log);
        }
    }
}
 
Example #5
Source File: HarvestAgentHeartBeatJob.java    From webcurator with Apache License 2.0 6 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext aJobContext) throws JobExecutionException {
    int triggerState = -2;
    try {
        triggerState = aJobContext.getScheduler().getTriggerState(null, "HeartBeatTriggerGroup");
        aJobContext.getScheduler().pauseTriggerGroup("HeartBeatTriggerGroup");

        HarvestAgentStatusDTO status = harvestAgent.getStatus();
        notifier.heartbeat(status);

        aJobContext.getScheduler().resumeTriggerGroup("HeartBeatTriggerGroup");
    }
    catch (SchedulerException e) {
        e.printStackTrace();
        if (e.getCause() != null)
            e.getCause().printStackTrace();
        throw new JobExecutionException("Heartbeat failed controlling the scheduler. (triggerState is: " + triggerState + ")");
    }
}
 
Example #6
Source File: HbaseResourceSensitivityPollingJob.java    From Eagle with Apache License 2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context)
        throws JobExecutionException {
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
    try {
        List<HbaseResourceSensitivityAPIEntity>
        hbaseResourceSensitivity = load(jobDataMap, "HbaseResourceSensitivityService");
        if(hbaseResourceSensitivity == null) {
        	LOG.warn("Hbase resource sensitivity information is empty");
        	return;
        }
        Map<String, HbaseResourceSensitivityAPIEntity> map = Maps.uniqueIndex(
        		hbaseResourceSensitivity,
        		new Function<HbaseResourceSensitivityAPIEntity, String>() {
        			@Override
        			public String apply(HbaseResourceSensitivityAPIEntity input) {
        				return input.getTags().get("hbaseResource");
        			}
        		});
        ExternalDataCache.getInstance().setJobResult(getClass(), map);
    } catch(Exception ex) {
    	LOG.error("Fail to load hbase resource sensitivity data", ex);
    }
}
 
Example #7
Source File: ContentCheckJob.java    From sakai with Educational Community License v2.0 6 votes vote down vote up
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	MessageDigest digest;
	try {
		digest = MessageDigest.getInstance(ALGORITHM);
	} catch (NoSuchAlgorithmException e) {
		throw new JobExecutionException("Can't get digest for "+ ALGORITHM);
	}
	String[] types = {
			ResourceType.TYPE_HTML, ResourceType.MIME_TYPE_TEXT, ResourceType.TYPE_UPLOAD
	};
	IteratorChain allFiles = new IteratorChain();
	for (String type : types) {
		Iterator<ContentResource> resourceIterator = new ContentHostingIterator<ContentResource>(type);
		allFiles.addIterator(resourceIterator);
	}
	// Now check all the files.
	ContentResourceChecker checker = new ContentResourceChecker(allFiles, digest);
	checker.check();
}
 
Example #8
Source File: TestPullActions.java    From syncope with Apache License 2.0 6 votes vote down vote up
@Override
public void beforeUpdate(
        final ProvisioningProfile<?, ?> profile,
        final SyncDelta delta,
        final EntityTO entityTO,
        final AnyUR anyUR) throws JobExecutionException {

    AttrPatch fullnamePatch = null;
    for (AttrPatch attrPatch : anyUR.getPlainAttrs()) {
        if ("fullname".equals(attrPatch.getAttr().getSchema())) {
            fullnamePatch = attrPatch;
        }
    }
    if (fullnamePatch == null) {
        fullnamePatch = new AttrPatch.Builder(new Attr.Builder("fullname").build()).
                operation(PatchOperation.ADD_REPLACE).
                build();
    }

    fullnamePatch.getAttr().getValues().clear();
    fullnamePatch.getAttr().getValues().add(String.valueOf(counter++));
}
 
Example #9
Source File: CleanKeyLock.java    From o2oa with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
	try {
		List<KeyLock> targets = new ArrayList<>();
		Integer count = 0;
		do {
			try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
				targets = emc.listLessThan(KeyLock.class, JpaObject.createTime_FIELDNAME,
						DateUtils.addMinutes(new Date(), -2));
				if (!targets.isEmpty()) {
					emc.beginTransaction(KeyLock.class);
					for (KeyLock o : targets) {
						emc.remove(o);
						count++;
					}
					emc.commit();
				}
			}
		} while (!targets.isEmpty());
		logger.debug("定时清理值锁定:{}条.", count);
	} catch (Exception e) {
		throw new JobExecutionException(e);
	}
}
 
Example #10
Source File: ScriptJob.java    From engine with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap dataMap = context.getJobDetail().getJobDataMap();
    String scriptUrl = dataMap.getString(SCRIPT_URL_DATA_KEY);
    SiteContext siteContext = (SiteContext)dataMap.get(SITE_CONTEXT_DATA_KEY);
    ServletContext servletContext = (ServletContext)dataMap.get(SERVLET_CONTEXT_DATA_KEY);
    ScriptFactory scriptFactory = siteContext.getScriptFactory();

    if (scriptFactory == null) {
        throw new JobExecutionException(
            "No script factory associate to site context '" + siteContext.getSiteName() + "'");
    }

    SiteContext.setCurrent(siteContext);
    try {
        Map<String, Object> variables = new HashMap<>();
        GroovyScriptUtils.addJobScriptVariables(variables, servletContext);

        scriptFactory.getScript(scriptUrl).execute(variables);
    } catch (Exception e) {
        throw new JobExecutionException("Error executing script job at " + scriptUrl, e);
    } finally {
        SiteContext.clear();
    }
}
 
Example #11
Source File: CommandExecutionJob.java    From roboconf-platform with Apache License 2.0 6 votes vote down vote up
@Override
public void execute( JobExecutionContext context )
throws JobExecutionException {

	String appName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.APP_NAME );
	String jobName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.JOB_NAME );
	String commandsFileName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.CMD_NAME );

	try {
		Manager manager = (Manager) context.getScheduler().getContext().get( RoboconfScheduler.MANAGER );
		Application app = manager.applicationMngr().findApplicationByName( appName );

		// The web console finds jobs by names, not IDs, which remain internal to Quartz
		manager.commandsMngr().execute( app, commandsFileName, CommandHistoryItem.ORIGIN_SCHEDULER, jobName );

	} catch( Exception e ) {
		this.logger.warning( "An error occurred while executing job " + jobName + " (command file =" + commandsFileName + ")." );
		Utils.logException( this.logger, e );
	}
}
 
Example #12
Source File: StateTimer.java    From o2oa with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
	try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
		Business business = new Business(emc);
		new TimerUnitStubs().execute(business);
		new TimerPersonStubs().execute(business);
		new TimerApplicationStubs().execute(business);
		new TimerSummary().execute(business);
		new TimerRunning().execute(business);
		new TimerOrganization().execute(business);
		new TimerCategory().execute(business);
	} catch (Exception e) {
		logger.error(e);
		throw new JobExecutionException(e);
	}
}
 
Example #13
Source File: JobWithDB.java    From olat with Apache License 2.0 6 votes vote down vote up
@Override
protected final void executeInternal(JobExecutionContext arg0) throws JobExecutionException {
    boolean success = false;
    try {
        // init logging framework
        ThreadLocalUserActivityLoggerInstaller.initEmptyUserActivityLogger();

        executeWithDB(arg0);
        DBFactory.getInstance(false).commitAndCloseSession();
        success = true;
    } catch (JobExecutionException e) {
        // for documentation purpose only
        throw e;
    } finally {
        // clean up logging
        ThreadLocalUserActivityLoggerInstaller.resetUserActivityLogger();
        if (!success) {
            DBFactory.getInstance(false).rollbackAndCloseSession();
        }

    }

}
 
Example #14
Source File: DataEnrichJob.java    From eagle with Apache License 2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();

    DataEnrichLCM lcm = (DataEnrichLCM)jobDataMap.getOrDefault("dataEnrichLCM", null);
    if(lcm == null)
        throw new IllegalStateException("dataEnrichLCM implementation should be provided");
    try {
        Collection externalEntities = lcm.loadExternal();
        Map<Object, Object> map = Maps.uniqueIndex(
                externalEntities,
                entity -> lcm.getCacheKey(entity)
            );
        ExternalDataCache.getInstance().setJobResult(lcm.getClass(), map);
    } catch(Exception ex) {
        LOG.error("Fail to load sensitivity data", ex);
    }
}
 
Example #15
Source File: CourseSitePublishJob.java    From sakai with Educational Community License v2.0 6 votes vote down vote up
/**
 * implement the quartz job interface, which is called by the scheduler when a trigger associated with the job fires.
 * this quartz job removes course sites that are more than a specified number of terms old.
 */
public void execute(JobExecutionContext context) throws JobExecutionException {
   synchronized (this) {
      log.info("execute()");

      if (user == null) {
         log.error("The scheduled job to remove course sites can not be run with an invalid user.  No courses were published.");
      } else {
         try {
            // switch the current user to the one specified to run the quartz job
            Session sakaiSesson = sessionManager.getCurrentSession();
            sakaiSesson.setUserId(user.getId());

            int numSitesPublished = courseSitePublishService.publishCourseSites(numDaysBeforeTermStarts);
            log.info(numSitesPublished + " course sites were published.");
         } catch (Exception ex) {
            log.error(ex.getMessage());
         }
      }
   }
}
 
Example #16
Source File: RebootActionCleanup.java    From spacewalk with GNU General Public License v2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
public void execute(JobExecutionContext arg0In)
    throws JobExecutionException {
    List<Map<String, Long>> failedRebootActions = lookupRebootActionCleanup();
    for (Map<String, Long> fa : failedRebootActions) {
        Long sid = fa.get("server_id");
        Long aid = fa.get("action_id");
        List<Long> fAids = invalidateActionRecursive(sid, aid);
        for (Long fAid : fAids) {
            invalidateKickstartSession(sid, fAid);
        }
    }
    if (failedRebootActions.size() > 0) {
        log.info("Set " + failedRebootActions.size() +
                " reboot action(s) to failed. Running longer than 6 hours.");
    }
}
 
Example #17
Source File: StackStatusCheckerJobTest.java    From cloudbreak with Apache License 2.0 5 votes vote down vote up
@Test
public void testNotRunningIfFlowInProgress() throws JobExecutionException {
    when(flowLogService.isOtherFlowRunning(anyLong())).thenReturn(Boolean.TRUE);
    underTest.executeInternal(jobExecutionContext);

    verify(stackService, times(0)).getByIdWithListsInTransaction(anyLong());
}
 
Example #18
Source File: ChangeLogCleanUp.java    From uyuni with GNU General Public License v2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
public void execute(JobExecutionContext arg0In)
    throws JobExecutionException {
    // TODO Auto-generated method stub
    int rowsDeleted = deleteOrphanedChangelogEntries();
    if (rowsDeleted > 0) {
        log.info("Deleted " + rowsDeleted +
                " row(s) of orphaned package changelog data.");
    }
}
 
Example #19
Source File: StockJob.java    From ChengFeng1.5 with MIT License 5 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
  log.info(String.join(":",new Date().toString(),"redis库存同步"));
  stringRedisTemplate.opsForHash().keys(RedisConstant.PRODUCT_STOCKS).parallelStream().forEach(key->{
      List<String> skusPrefix = splitter.splitToList(key.toString());
      String s = skusPrefix.get(1);
      log.info("skuId:"+s);
      String redisStock = (String) stringRedisTemplate.opsForHash().get(RedisConstant.PRODUCT_STOCKS, key);
      PurchaseProductSku productSku = productSkuMapper.selectByPrimaryKey(Integer.parseInt(s));
      productSku.setSales(productSku.getSales()+(productSku.getStock()-Integer.parseInt(redisStock)));
      productSku.setStock(Integer.parseInt(redisStock));
      productSkuMapper.updateByPrimaryKey(productSku);
  });
}
 
Example #20
Source File: ContentJobs.java    From entando-components with GNU Lesser General Public License v3.0 5 votes vote down vote up
private ApplicationContext getApplicationContext(JobExecutionContext context) throws Exception {
	ApplicationContext appCtx = (ApplicationContext) context.getScheduler().getContext()
			.get(APPLICATION_CONTEXT_KEY);
	if (appCtx == null) {
		throw new JobExecutionException(
				ContentThreadConstants.APP_CTX_ERROR + "\"" + APPLICATION_CONTEXT_KEY + "\"");
	}
	return appCtx;
}
 
Example #21
Source File: SNMPPluginJob.java    From SuitAgent with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    long timestamp = System.currentTimeMillis() / 1000;
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
    String pluginName = jobDataMap.getString("pluginName");
    try {
        SNMPV3Plugin plugin = (SNMPV3Plugin) jobDataMap.get("pluginObject");
        List<SNMPV3UserInfo> jobUsers = (List<SNMPV3UserInfo>) jobDataMap.get("userInfoList");
        MetricsCommon metricsValue = new SNMPV3MetricsValue(plugin,jobUsers,timestamp);
        //SNMP监控数据获取时间较长,采用异步方式
        ExecuteThreadUtil.execute(new JobThread(metricsValue,"snmp v3 job thread"));
    } catch (Exception e) {
        log.error("插件 {} 运行异常",pluginName,e);
    }
}
 
Example #22
Source File: ClientOneJob.java    From javabase with Apache License 2.0 5 votes vote down vote up
public void executeJob(JobExecutionContext jobExecutionContext) throws JobExecutionException{
    log.info("开始执行job");
    try {
        Thread.sleep(1000*35);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
    log.info("结束执行job");
}
 
Example #23
Source File: ClearJob.java    From JavaMonitor with Apache License 2.0 5 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
    logger.warn("Clear all data on a regular basis");
    gcService.clearAll();
    classService.clearAll();
    threadService.clearAll();
}
 
Example #24
Source File: ClearJob.java    From JavaMonitor with Apache License 2.0 5 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
    logger.warn("Clear all data on a regular basis");
    gcService.clearAll();
    classService.clearAll();
    threadService.clearAll();
}
 
Example #25
Source File: JobRunShell.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
private boolean notifyJobListenersComplete(JobExecutionContext jobExCtxt, JobExecutionException jobExEx) {
    try {
        qs.notifyJobListenersWasExecuted(jobExCtxt, jobExEx);
    } catch (SchedulerException se) {
        qs.notifySchedulerListenersError(
                "Unable to notify JobListener(s) of Job that was executed: "
                        + "(error will be ignored). trigger= "
                        + jobExCtxt.getTrigger().getKey() + " job= "
                        + jobExCtxt.getJobDetail().getKey(), se);

        return false;
    }

    return true;
}
 
Example #26
Source File: AbstractSuspendableJob.java    From Knowage-Server with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    setTenant(context);
    // Execute internal only if trigger isn't paused
    if (!isTriggerPaused(context)) {
        internalExecute(context);
    }
    unsetTenant();
}
 
Example #27
Source File: CollectDataFromLWM2MJobService.java    From SDA with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public void execute(JobExecutionContext arg0)  throws JobExecutionException{
	String mongodb_server;
	int mongodb_port;
	String mongodb_db;
	String save_path;
	String user_name;
	String password;
	
	mongodb_server = Utils.getSdaProperty("com.pineone.icbms.sda.lwm2m.mongodb.server");
	mongodb_port = Integer.parseInt(Utils.getSdaProperty("com.pineone.icbms.sda.lwm2m.mongodb.port"));
	mongodb_db = Utils.getSdaProperty("com.pineone.icbms.sda.lwm2m.mongodb.db");
	
	save_path = Utils.getSdaProperty("com.pineone.icbms.sda.triple.save_path");
	user_name = Utils.getSdaProperty("com.pineone.icbms.sda.mongo.db.user_name");
	password = Utils.getSdaProperty("com.pineone.icbms.sda.mongo.db.password");

	// 폴더가 없으면 생성
	save_path = Utils.makeSavePath(save_path);

	try {
		collect(mongodb_server, mongodb_port, mongodb_db, save_path, arg0, user_name, password);
	} catch (Exception e) {
		e.printStackTrace();
		log.debug("Exception ("+this.getClass()+")  ....................................> "+e.toString());			
		throw new JobExecutionException(e);
	}
}
 
Example #28
Source File: RemoteHttpJobBean.java    From zuihou-admin-cloud with Apache License 2.0 5 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext context)
        throws JobExecutionException {

    logger.info("最开始调度是从这里开始的-------");
    // load jobId
    JobKey jobKey = context.getTrigger().getJobKey();
    Integer jobId = Integer.valueOf(jobKey.getName());

    logger.info("job id={}, group={}", jobId, jobKey.getGroup());

    // trigger
    JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null);
}
 
Example #29
Source File: DumpDataTask.java    From o2oa with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	try {
		logger.print("schedule dump data task start.");
		DumpData action = new DumpData();
		action.execute(Config.currentNode().dumpData().path());
		if (Config.currentNode().dumpData().size() > 0) {
			File dir = new File(Config.base(), "local/dump");
			List<File> list = new ArrayList<>();
			if (dir.exists() && dir.isDirectory()) {
				for (File f : FileUtils.listFilesAndDirs(dir, FalseFileFilter.FALSE, new RegexFileFilter(
						"^dumpData_[1,2][0,9][0-9][0-9][0,1][0-9][0-3][0-9][0-5][0-9][0-5][0-9][0-5][0-9]$"))) {
					if (dir != f) {
						list.add(f);
					}
				}
				list = list.stream().sorted(Comparator.comparing(File::getName).reversed())
						.collect(Collectors.toList());
				if (list.size() > Config.currentNode().dumpData().size()) {
					for (int i = Config.currentNode().dumpData().size(); i < list.size(); i++) {
						File file = list.get(i);
						logger.print("dumpDataTask delete:{}.", file.getAbsolutePath());
						FileUtils.forceDelete(file);
					}
				}
			}
		}
	} catch (Exception e) {
		throw new JobExecutionException(e);
	}
}
 
Example #30
Source File: AnomalyDetectionInputContextBuilder.java    From incubator-pinot with Apache License 2.0 5 votes vote down vote up
/**
 * Fetch TimeSeriese data from Pinot in the startEndTimeRanges
 * @param startEndTimeRanges
 * the time range when we actually fetch timeseries
 * @return
 * the builder of the AnomalyDetectionInputContext
 * @throws JobExecutionException
 * @throws ExecutionException
 */
public AnomalyDetectionInputContextBuilder fetchTimeSeriesData(List<Pair<Long, Long>> startEndTimeRanges, boolean endTimeInclusive)
    throws JobExecutionException, ExecutionException {
  Map<DimensionKey, MetricTimeSeries> dimensionKeyMetricTimeSeriesMap =
      getTimeSeriesForAnomalyDetection(anomalyFunctionSpec, startEndTimeRanges, endTimeInclusive);

  Map<DimensionMap, MetricTimeSeries> dimensionMapMetricTimeSeriesMap = new HashMap<>();
  for (Map.Entry<DimensionKey, MetricTimeSeries> entry : dimensionKeyMetricTimeSeriesMap.entrySet()) {
    DimensionKey dimensionKey = entry.getKey();

    // If the current time series belongs to OTHER dimension, which consists of time series whose
    // sum of all its values belows 1% of sum of all time series values, then its anomaly is
    // meaningless and hence we don't want to detection anomalies on it.
    String[] dimensionValues = dimensionKey.getDimensionValues();
    boolean isOTHERDimension = false;
    for (String dimensionValue : dimensionValues) {
      if (dimensionValue.equalsIgnoreCase(ResponseParserUtils.OTHER) || dimensionValue.equalsIgnoreCase(
          ResponseParserUtils.UNKNOWN)) {
        isOTHERDimension = true;
        break;
      }
    }
    if (isOTHERDimension) {
      continue;
    }

    DimensionMap dimensionMap = DimensionMap.fromDimensionKey(dimensionKey, collectionDimensions);
    dimensionMapMetricTimeSeriesMap.put(dimensionMap, entry.getValue());

    if (entry.getValue().getTimeWindowSet().size() < 1) {
      LOG.warn("Insufficient data for {} to run anomaly detection function", dimensionMap);
    }
  }
  this.anomalyDetectionInputContext.setDimensionMapMetricTimeSeriesMap(dimensionMapMetricTimeSeriesMap);

  return this;
}