Java Code Examples for org.quartz.JobDataMap#put()
The following examples show how to use
org.quartz.JobDataMap#put() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchedulerTool.java From sakai with Educational Community License v2.0 | 6 votes |
/** * Convenience method for creating a JobDetail object from a JobBeanWrapper. The JobDetail object is * used to actually create a job within Quartz, and is also tracked by the {@link getJobDetail()} property * for use during the property editting process. * * @param job * @return JobDetail object constructed from the job argument */ private JobDetail createJobDetail (JobBeanWrapper job) { jobName = escapeEntities(jobName); JobDetail jd = JobBuilder.newJob(job.getJobClass()) .withIdentity(jobName, Scheduler.DEFAULT_GROUP) .requestRecovery() .storeDurably() .build(); JobDataMap map = jd.getJobDataMap(); map.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId()); map.put(JobBeanWrapper.JOB_NAME, job.getJobName()); return jd; }
Example 2
Source File: EntityMocksHelper.java From griffin with Apache License 2.0 | 6 votes |
public static JobDetailImpl createJobDetail( String measureJson, String predicatesJson) { JobDetailImpl jobDetail = new JobDetailImpl(); JobKey jobKey = new JobKey("name", "group"); jobDetail.setKey(jobKey); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(MEASURE_KEY, measureJson); jobDataMap.put(PREDICATES_KEY, predicatesJson); jobDataMap.put(JOB_NAME, "jobName"); jobDataMap.put("jobName", "jobName"); jobDataMap.put(PREDICATE_JOB_NAME, "predicateJobName"); jobDataMap.put(GRIFFIN_JOB_ID, 1L); jobDetail.setJobDataMap(jobDataMap); return jobDetail; }
Example 3
Source File: CronService.java From aion-germany with GNU General Public License v3.0 | 6 votes |
public void schedule(Runnable r, String cronExpression, boolean longRunning) { try { JobDataMap jdm = new JobDataMap(); jdm.put(RunnableRunner.KEY_RUNNABLE_OBJECT, r); jdm.put(RunnableRunner.KEY_PROPERTY_IS_LONGRUNNING_TASK, longRunning); jdm.put(RunnableRunner.KEY_CRON_EXPRESSION, cronExpression); String jobId = "Started at ms" + System.currentTimeMillis() + "; ns" + System.nanoTime(); JobKey jobKey = new JobKey("JobKey:" + jobId); JobDetail jobDetail = JobBuilder.newJob(runnableRunner).usingJobData(jdm).withIdentity(jobKey).build(); CronScheduleBuilder csb = CronScheduleBuilder.cronSchedule(cronExpression); CronTrigger trigger = TriggerBuilder.newTrigger().withSchedule(csb).build(); scheduler.scheduleJob(jobDetail, trigger); } catch (Exception e) { throw new CronServiceException("Failed to start job", e); } }
Example 4
Source File: GetArchivesJob.java From sakai with Educational Community License v2.0 | 6 votes |
private void scheduleImport(String file, String siteId) { JobDataMap jobData = new JobDataMap(); jobData.put("zip", file); if (siteId != null) { jobData.put("siteId", siteId); } JobDetail jobDetail = JobBuilder.newJob(ImportJob.class) .withIdentity("Import Job") .setJobData(jobData) .build(); Scheduler scheduler = schedulerManager.getScheduler(); try { scheduler.addJob(jobDetail, true, true); scheduler.triggerJob(jobDetail.getKey()); } catch (SchedulerException e) { log.warn("Problem adding job to scheduler to import "+ file, e); } }
Example 5
Source File: QuartzManager.java From quartz-web with Apache License 2.0 | 5 votes |
public JobDetail updateStatefulMethodJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(StatefulMethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); updateJob(schedulerName, jobDetail); return jobDetail; }
Example 6
Source File: Context.java From o2oa with GNU Affero General Public License v3.0 | 5 votes |
public <T extends AbstractJob> void scheduleLocal(Class<T> cls, Trigger existTrigger) throws Exception { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("context", this); JobDetail jobDetail = JobBuilder.newJob(cls).withIdentity(cls.getName(), clazz.getName()) .usingJobData(jobDataMap).withDescription(Config.node()).build(); Trigger trigger = TriggerBuilder.newTrigger().withIdentity(cls.getName(), clazz.getName()) .withDescription("scheduleLocal").withSchedule(existTrigger.getScheduleBuilder()).build(); scheduler.scheduleJob(jobDetail, trigger); this.scheduleLocalRequestList.add(new ScheduleLocalRequest(jobDetail, null, null, null)); }
Example 7
Source File: SchedulerUtil.java From webcurator with Apache License 2.0 | 5 votes |
/** * Schedule the harvest completion to run after a specified delay to allow the * harvester to release all its resources or after a failure to contace the * core or the digital asset store * @param aHarvestName the name of the harvest job * @param aFailueStep the step that the completion failed at * @param aMessageSent a flag to indicated that the failure notification has been sent * @param aRetries the number of retries attempted * @throws SchedulerException thrown if there is a problem scheduling the quartz job */ public static final void scheduleHarvestCompleteJob(String aHarvestName, int aFailueStep, boolean aMessageSent, int aRetries) throws SchedulerException { ApplicationContext context = ApplicationContextFactory.getWebApplicationContext(); Scheduler scheduler = (Scheduler) context.getBean(Constants.BEAN_SCHEDULER_FACTORY); HarvestCompleteConfig hcc = (HarvestCompleteConfig) context.getBean(Constants.BEAN_HARVEST_COMPLETE_CONFIG); JobDetail job = new JobDetail(JOB_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, JOB_GROUP_COMPLETE + SEPERATOR + aHarvestName, HarvestCompleteJob.class); JobDataMap jdm = new JobDataMap(); jdm.put(HarvestCompleteJob.PARAM_JOB_NAME, aHarvestName); jdm.put(HarvestCompleteJob.PARAM_FAILURE_STEP, new Integer(aFailueStep)); jdm.put(HarvestCompleteJob.PARAM_MSG_SENT, new Boolean(aMessageSent)); jdm.put(HarvestCompleteJob.PARAM_RETRIES, new Integer(aRetries)); job.setJobDataMap(jdm); // Set the complete job to run xx seconds after we get the notification Calendar cal = Calendar.getInstance(); if (aRetries == 0) { cal.add(Calendar.SECOND, hcc.getWaitOnCompleteSeconds()); } else if (aRetries < hcc.getLevelRetryBand()) { cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelOneSecs()); } else { cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelTwoSecs()); } Trigger trigger = new SimpleTrigger(TRG_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, TRG_GROUP_COMPLETE + SEPERATOR + aHarvestName, cal.getTime()); scheduler.scheduleJob(job, trigger); }
Example 8
Source File: QuartzManager.java From quartz-web with Apache License 2.0 | 5 votes |
public JobDetail updateMethodInovkeJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(MethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); updateJob(schedulerName, jobDetail); return jobDetail; }
Example 9
Source File: SchedulerTestBase.java From iaf with Apache License 2.0 | 5 votes |
private JobDataMap createConfiguredJobDataMap() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(ConfiguredJob.JOBDEF_KEY, (JobDef) null); jobDataMap.put(ConfiguredJob.MANAGER_KEY, (IbisManager) null); return jobDataMap; }
Example 10
Source File: QuartzManager.java From quartz-web with Apache License 2.0 | 5 votes |
public JobDetail addStatefulMethodJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(StatefulMethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); addJob(schedulerName, jobDetail); return jobDetail; }
Example 11
Source File: QuartzNativeObjectsConverter.java From Knowage-Server with GNU Affero General Public License v3.0 | 5 votes |
public static org.quartz.JobDetail convertJobToNativeObject(Job spagobiJob) { org.quartz.JobDetail quartzJob; quartzJob = new org.quartz.JobDetail(); quartzJob.setName(spagobiJob.getName()); quartzJob.setGroup(spagobiJob.getGroupName()); quartzJob.setDescription(spagobiJob.getDescription()); quartzJob.setJobClass(spagobiJob.getJobClass()); quartzJob.setDurability(spagobiJob.isDurable()); quartzJob.setRequestsRecovery(spagobiJob.isRequestsRecovery()); quartzJob.setVolatility(spagobiJob.isVolatile()); JobDataMap parameters = convertParametersToNativeObject(spagobiJob.getParameters()); if (parameters.containsKey(MERGE_ALL_SNAPSHOTS)) { throw new SpagoBIRuntimeException( "An unexpected error occured while converting Job to native object: " + MERGE_ALL_SNAPSHOTS + " property already defined"); } parameters.put(MERGE_ALL_SNAPSHOTS, spagobiJob.isMergeAllSnapshots() ? "true" : "false"); if (parameters.containsKey(COLLATE_SNAPSHOTS)) { throw new SpagoBIRuntimeException( "An unexpected error occured while converting Job to native object: " + COLLATE_SNAPSHOTS + " property already defined"); } parameters.put(COLLATE_SNAPSHOTS, spagobiJob.isCollateSnapshots() ? "true" : "false"); quartzJob.setJobDataMap(parameters); return quartzJob; }
Example 12
Source File: SakaiJob.java From sakai with Educational Community License v2.0 | 5 votes |
private JobDetail createJobDetail(JobBeanWrapper job, String jobName) { JobDetail jd = JobBuilder.newJob(job.getJobClass()).withIdentity(new JobKey(jobName, Scheduler.DEFAULT_GROUP)).storeDurably().requestRecovery().build(); JobDataMap map = jd.getJobDataMap(); map.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId()); map.put(JobBeanWrapper.JOB_NAME, job.getJobName()); return jd; }
Example 13
Source File: AutoRun.java From sakai with Educational Community License v2.0 | 5 votes |
public void init() { if (config == null || serverConfigurationService.getBoolean(config, false)) { log.info("AutoRun running"); Scheduler scheduler = schedulerManager.getScheduler(); for (JobBeanWrapper job : startup) { try { JobDataMap jobData = new JobDataMap(); jobData.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId()); jobData.put(JobBeanWrapper.JOB_NAME, job.getJobName()); JobDetail jobDetail = JobBuilder.newJob(job.getJobClass()) .withIdentity(job.getJobName(), null) .setJobData(jobData) .build(); // Non durable job that will get removed scheduler.addJob(jobDetail, true, true); scheduler.triggerJob(jobDetail.getKey()); log.info("Triggered job: {}", job.getJobName()); } catch (SchedulerException se) { log.warn("Failed to run job: {}", startup, se); } } } }
Example 14
Source File: ExportResource.java From Knowage-Server with GNU Affero General Public License v3.0 | 4 votes |
/** * Schedula a job to clean old export. * * @throws SchedulerException In case of error during scheduling */ private void scheduleCleanUp() throws SchedulerException { UserProfile userProfile = UserProfileManager.getProfile(); String resoursePath = SpagoBIUtilities.getResourcePath(); String jobName = String.format("delete-old-export-for-%s", userProfile.getUserId()); String jobGroup = "delete-old-export"; String jobDescription = String.format("Delete old exports for user %s", userProfile.getUserId()); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(ExportDeleteOldJob.MAP_KEY_RESOURCE_PATH, resoursePath); jobDataMap.put(ExportDeleteOldJob.MAP_KEY_USER_PROFILE, userProfile); JobDetail job = new JobDetail(jobName, jobGroup, ExportDeleteOldJob.class); job.setDescription(jobDescription); job.setJobDataMap(jobDataMap); Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler(); scheduler.addJob(job, true); scheduler.triggerJob(job.getName(), job.getGroup()); }
Example 15
Source File: LockingJobTest.java From alfresco-repository with GNU Lesser General Public License v3.0 | 4 votes |
@Test public void testJobLocking() throws Exception { HBBaseDataCollector simpleCollector = mock(HBBaseDataCollector.class); when(simpleCollector.getCollectorId()).thenReturn("c1"); when(simpleCollector.getCronExpression()).thenReturn("0 0 0 ? * *"); // mock the job context JobExecutionContext mockJobExecutionContext = mock(JobExecutionContext.class); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("collector", simpleCollector); jobDataMap.put("hbDataSenderService", mockDataSenderService); jobDataMap.put("jobLockService", mockJobLockService); JobDetail jobDetail = JobBuilder.newJob() .setJobData(jobDataMap) .ofType(LockingJob.class) .build(); when(mockJobExecutionContext.getJobDetail()).thenReturn(jobDetail); // Simulate job lock service String lockToken = "token"; when(mockJobLockService.getLock(isA(QName.class), anyLong())) .thenReturn(lockToken) // first job gets the lock .thenThrow(new LockAcquisitionException("", "")); // second job doesn't get the lock // Run two heart beat jobs new LockingJob().execute(mockJobExecutionContext); new LockingJob().execute(mockJobExecutionContext); // Verify that the collector only collects data once, since only one job got the lock verify(simpleCollector, Mockito.times(1)).collectData(); // Verify that data was passed to data sender verify(mockDataSenderService, Mockito.times(1)).sendData(any(List.class)); verify(mockDataSenderService, Mockito.times(0)).sendData(any(HBData.class)); // Verify that both jobs tried to get the lock verify(mockJobLockService, Mockito.times(2)).getLock(any(QName.class), anyLong()); // Verify that a callback was registered once verify(mockJobLockService, Mockito.times(1)).refreshLock(eq(lockToken), any(QName.class), anyLong(), any(JobLockService.JobLockRefreshCallback.class)); }
Example 16
Source File: StdJDBCDelegate.java From AsuraFramework with Apache License 2.0 | 4 votes |
/** * <p> * Select all of the triggers for jobs that are requesting recovery. The * returned trigger objects will have unique "recoverXXX" trigger names and * will be in the <code>{@link * org.quartz.Scheduler}.DEFAULT_RECOVERY_GROUP</code> * trigger group. * </p> * * <p> * In order to preserve the ordering of the triggers, the fire time will be * set from the <code>COL_FIRED_TIME</code> column in the <code>TABLE_FIRED_TRIGGERS</code> * table. The caller is responsible for calling <code>computeFirstFireTime</code> * on each returned trigger. It is also up to the caller to insert the * returned triggers to ensure that they are fired. * </p> * * @param conn * the DB Connection * @return an array of <code>{@link org.quartz.Trigger}</code> objects */ public Trigger[] selectTriggersForRecoveringJobs(Connection conn) throws SQLException, IOException, ClassNotFoundException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn .prepareStatement(rtp(SELECT_INSTANCES_RECOVERABLE_FIRED_TRIGGERS)); ps.setString(1, instanceId); setBoolean(ps, 2, true); rs = ps.executeQuery(); long dumId = System.currentTimeMillis(); ArrayList list = new ArrayList(); while (rs.next()) { String jobName = rs.getString(COL_JOB_NAME); String jobGroup = rs.getString(COL_JOB_GROUP); String trigName = rs.getString(COL_TRIGGER_NAME); String trigGroup = rs.getString(COL_TRIGGER_GROUP); long firedTime = rs.getLong(COL_FIRED_TIME); int priority = rs.getInt(COL_PRIORITY); SimpleTrigger rcvryTrig = new SimpleTrigger("recover_" + instanceId + "_" + String.valueOf(dumId++), Scheduler.DEFAULT_RECOVERY_GROUP, new Date(firedTime)); rcvryTrig.setJobName(jobName); rcvryTrig.setJobGroup(jobGroup); rcvryTrig.setPriority(priority); rcvryTrig .setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW); JobDataMap jd = selectTriggerJobDataMap(conn, trigName, trigGroup); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_NAME, trigName); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_GROUP, trigGroup); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_FIRETIME_IN_MILLISECONDS, String.valueOf(firedTime)); rcvryTrig.setJobDataMap(jd); list.add(rcvryTrig); } Object[] oArr = list.toArray(); Trigger[] tArr = new Trigger[oArr.length]; System.arraycopy(oArr, 0, tArr, 0, oArr.length); return tArr; } finally { closeResultSet(rs); closeStatement(ps); } }
Example 17
Source File: TimerSchedulerService.java From WeEvent with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") public RetCode createTimerScheduler(String jobName, String jobGroupName, String triggerName, String triggerGroupName, Class jobClass, JobDataMap params, TimerScheduler timerScheduler) { try { // get the all timer Iterator<JobKey> jobKeyIterator = scheduler.getJobKeys(GroupMatcher.groupEquals(jobGroupName)).iterator(); List<TimerScheduler> timerSchedulerList = new ArrayList<>(); Map<String, TimerScheduler> timerSchedulerMap = new HashMap<>(); TimerScheduler currentTimer = JsonHelper.json2Object(params.get("timer").toString(), TimerScheduler.class); while (jobKeyIterator.hasNext()) { JobKey jobKey = jobKeyIterator.next(); if (null != scheduler.getJobDetail(jobKey).getJobDataMap().get("timer")) { TimerScheduler timer = JsonHelper.json2Object(scheduler.getJobDetail(jobKey).getJobDataMap().get("timer").toString(),TimerScheduler.class); // if the current is delete timerSchedulerList.add(timer); timerSchedulerMap.put(timer.getId(), timer); } } timerSchedulerMap.put(currentTimer.getId(), currentTimer); timerSchedulerList.add(currentTimer); params.put("timerMap", JsonHelper.object2Json(timerSchedulerMap)); log.info("update the timer timerMap:{},ruleList:{}", timerSchedulerList.size(), timerSchedulerList.size()); JobDetail job = JobBuilder.newJob(jobClass).withIdentity(jobName, jobGroupName).setJobData(params).requestRecovery(true).storeDurably(true).build(); // Trigger trigger = TriggerBuilder.newTrigger() .withIdentity(new Date().toString().concat(currentTimer.getSchedulerName()), triggerGroupName) .startNow() .withSchedule(CronScheduleBuilder.cronSchedule(timerScheduler.getPeriodParams())).forJob(jobName, jobGroupName).build(); //check RetCode retCode = checkTimerTask(timerScheduler, params, jobName, jobGroupName, triggerName, triggerGroupName); if (1 == retCode.getErrorCode()) { return retCode; } scheduler.scheduleJob(job, trigger); if (!scheduler.isShutdown()) { scheduler.start(); } if (scheduler.checkExists(JobKey.jobKey(jobName, jobGroupName))) { log.info("deal timer task:{} success", jobName); return ConstantsHelper.RET_SUCCESS; } return ConstantsHelper.RET_FAIL; } catch (Exception e) { log.error("e:{}", e.toString()); return RetCode.mark(1, e.toString()); } }
Example 18
Source File: SchedulerNotificationManager.java From SO with BSD 2-Clause "Simplified" License | 4 votes |
@Override public void execute(JobExecutionContext context) throws JobExecutionException { // long time = System.currentTimeMillis(); SimpleDateFormat timeFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); String timeStr = timeFormat.format(new Date(time)); String profileId = context.getJobDetail().getKey().getName(); String groupName = context.getJobDetail().getKey().getGroup(); log.debug("### Checking Time by schedule({}) :{}",profileId, timeStr); SimpleTrigger st = (SimpleTrigger ) context.getTrigger(); int checkRate = (int)st.getRepeatInterval()/1000; //ServiceProcessor 의 ProfileInjector 에 주기에 따라 profile Id 전송 // ProfileInjector profileInJector = new ProfileIntjector(); // profileInJector.sendProfile(profileId); //JobDataMap dataMap = context.getMergedJobDataMap(); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); //JobDataMap dataMap = context.getTrigger().getJobDataMap(); ProfileForDB profileForDB = (ProfileForDB)dataMap.get("profile"); //log.debug("profileForDB.getPeriod()={}", profileForDB.getPeriod()); int period = profileForDB.getPeriod(); //int checkRate = dataMap.getInt("checkRate"); boolean happened = dataMap.getBoolean("happened"); //초기값=미발생 long lastTimeExecutedCm = dataMap.getLong("lastTimeExecutedCm"); //마지막 CM 발생 일시 //context.getJobDetail().getJobDataMap().put("lastTimeExecutedCm", lastTimeExecutedCm); long currentTime = (new Date().getTime())/1000; //to sec boolean result; log.debug("## past={}, {} - {}", (currentTime - lastTimeExecutedCm), currentTime, lastTimeExecutedCm); if (happened) { //이미 CM을 처리했으면 if (lastTimeExecutedCm==0 || (lastTimeExecutedCm + period) <= currentTime){ //시간이 경과 했으면 result = checkCm(profileId, CHECK_AND_RUN); //CM조사해서 발생했으면 실행후 발생(실행) 여부 리턴 lastTimeExecutedCm = currentTime; //마지막 실행시간 저장 dataMap.put("lastTimeExecutedCm", lastTimeExecutedCm); } else { result = checkCm(profileId, CHECK_ONLY); //CM조사해서발생여부만 리턴 } if (result == false) {//발생하지 않으면 happened = false; // 실행 여부 초기화 dataMap.put("happened", happened); } } else { //CM이 실행하지 않았으면 result = checkCm(profileId, CHECK_AND_RUN); //CM실행후 발생(실행) 여부 리턴 if (result == true) {//처리되었으면 happened = true; // 실행했음 lastTimeExecutedCm = currentTime; dataMap.put("happened", happened); dataMap.put("lastTimeExecutedCm", lastTimeExecutedCm); } } dataMap.put("lastTimeExecutedCm", lastTimeExecutedCm); dataMap.put("profile", profileForDB); SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS"); String ctime = sdf.format(new Date()); log.debug("{} - result={}, happened={}, lastTimeExecutedCm={}\n", ctime, result, happened, lastTimeExecutedCm); }
Example 19
Source File: StdJDBCDelegate.java From lams with GNU General Public License v2.0 | 4 votes |
/** * <p> * Select all of the triggers for jobs that are requesting recovery. The * returned trigger objects will have unique "recoverXXX" trigger names and * will be in the <code>{@link * org.quartz.Scheduler}.DEFAULT_RECOVERY_GROUP</code> * trigger group. * </p> * * <p> * In order to preserve the ordering of the triggers, the fire time will be * set from the <code>COL_FIRED_TIME</code> column in the <code>TABLE_FIRED_TRIGGERS</code> * table. The caller is responsible for calling <code>computeFirstFireTime</code> * on each returned trigger. It is also up to the caller to insert the * returned triggers to ensure that they are fired. * </p> * * @param conn * the DB Connection * @return an array of <code>{@link org.quartz.Trigger}</code> objects */ public List<OperableTrigger> selectTriggersForRecoveringJobs(Connection conn) throws SQLException, IOException, ClassNotFoundException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn .prepareStatement(rtp(SELECT_INSTANCES_RECOVERABLE_FIRED_TRIGGERS)); ps.setString(1, instanceId); setBoolean(ps, 2, true); rs = ps.executeQuery(); long dumId = System.currentTimeMillis(); LinkedList<OperableTrigger> list = new LinkedList<OperableTrigger>(); while (rs.next()) { String jobName = rs.getString(COL_JOB_NAME); String jobGroup = rs.getString(COL_JOB_GROUP); String trigName = rs.getString(COL_TRIGGER_NAME); String trigGroup = rs.getString(COL_TRIGGER_GROUP); long firedTime = rs.getLong(COL_FIRED_TIME); long scheduledTime = rs.getLong(COL_SCHED_TIME); int priority = rs.getInt(COL_PRIORITY); @SuppressWarnings("deprecation") SimpleTriggerImpl rcvryTrig = new SimpleTriggerImpl("recover_" + instanceId + "_" + String.valueOf(dumId++), Scheduler.DEFAULT_RECOVERY_GROUP, new Date(scheduledTime)); rcvryTrig.setJobName(jobName); rcvryTrig.setJobGroup(jobGroup); rcvryTrig.setPriority(priority); rcvryTrig.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY); JobDataMap jd = selectTriggerJobDataMap(conn, trigName, trigGroup); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_NAME, trigName); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_GROUP, trigGroup); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_FIRETIME_IN_MILLISECONDS, String.valueOf(firedTime)); jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_SCHEDULED_FIRETIME_IN_MILLISECONDS, String.valueOf(scheduledTime)); rcvryTrig.setJobDataMap(jd); list.add(rcvryTrig); } return list; } finally { closeResultSet(rs); closeStatement(ps); } }
Example 20
Source File: ScriptExecution.java From smarthome with Eclipse Public License 2.0 | 3 votes |
/** * Schedules a block of code (with argument) for later execution * * @param instant the point in time when the code should be executed * @param arg1 the argument to pass to the code block * @param closure the code block to execute * * @return a handle to the created timer, so that it can be canceled or rescheduled * @throws ScriptExecutionException if an error occurs during the execution */ public static Timer createTimerWithArgument(AbstractInstant instant, Object arg1, Procedure1<Object> closure) { JobDataMap dataMap = new JobDataMap(); dataMap.put("procedure1", closure); dataMap.put("argument1", arg1); return makeTimer(instant, closure.toString(), dataMap); }