Java Code Examples for org.quartz.JobDataMap

The following examples show how to use org.quartz.JobDataMap. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: quartz-manager   Source File: EmailService.java    License: MIT License 8 votes vote down vote up
public void updateJob(String group, String name, JobDescriptor descriptor) {
	try {
		JobDetail oldJobDetail = scheduler.getJobDetail(jobKey(name, group));
		if(Objects.nonNull(oldJobDetail)) {
			JobDataMap jobDataMap = oldJobDetail.getJobDataMap();
			jobDataMap.put("subject", descriptor.getSubject());
			jobDataMap.put("messageBody", descriptor.getMessageBody());
			jobDataMap.put("to", descriptor.getTo());
			jobDataMap.put("cc", descriptor.getCc());
			jobDataMap.put("bcc", descriptor.getBcc());
			JobBuilder jb = oldJobDetail.getJobBuilder();
			JobDetail newJobDetail = jb.usingJobData(jobDataMap).storeDurably().build();
			scheduler.addJob(newJobDetail, true);
			log.info("Updated job with key - {}", newJobDetail.getKey());
			return;
		}
		log.warn("Could not find job with key - {}.{} to update", group, name);
	} catch (SchedulerException e) {
		log.error("Could not find job with key - {}.{} to update due to error - {}", group, name, e.getLocalizedMessage());
	}
}
 
Example 2
Source Project: spring-boot-quartz-demo   Source File: JobUtil.java    License: MIT License 7 votes vote down vote up
/**
 * Create Quartz Job.
 * 
 * @param jobClass Class whose executeInternal() method needs to be called. 
 * @param isDurable Job needs to be persisted even after completion. if true, job will be persisted, not otherwise. 
 * @param context Spring application context.
 * @param jobName Job name.
 * @param jobGroup Job group.
 * 
 * @return JobDetail object
 */
protected static JobDetail createJob(Class<? extends QuartzJobBean> jobClass, boolean isDurable, 
		ApplicationContext context, String jobName, String jobGroup){
    JobDetailFactoryBean factoryBean = new JobDetailFactoryBean();
    factoryBean.setJobClass(jobClass);
    factoryBean.setDurability(isDurable);
    factoryBean.setApplicationContext(context);
    factoryBean.setName(jobName);
    factoryBean.setGroup(jobGroup);
       
    // set job data map
       JobDataMap jobDataMap = new JobDataMap();
       jobDataMap.put("myKey", "myValue");
       factoryBean.setJobDataMap(jobDataMap);
       
       factoryBean.afterPropertiesSet();
       
    return factoryBean.getObject();
}
 
Example 3
Source Project: openhab1-addons   Source File: WeatherJob.java    License: Eclipse Public License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void execute(JobExecutionContext jobContext) throws JobExecutionException {
    JobDataMap jobDataMap = jobContext.getJobDetail().getJobDataMap();

    String locationId = jobDataMap.getString("locationId");
    logger.debug("Starting Weather job for location '{}'", locationId);

    try {
        LocationConfig locationConfig = context.getConfig().getLocationConfig(locationId);
        WeatherProvider weatherProvider = WeatherProviderFactory
                .createWeatherProvider(locationConfig.getProviderName());
        context.setWeather(locationId, weatherProvider.getWeather(locationConfig));
        weatherPublisher.publish(locationId);

    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
        throw new JobExecutionException(ex.getMessage(), ex);
    }
}
 
Example 4
Source Project: archiva   Source File: RepositoryTaskJob.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Execute the discoverer and the indexer.
 * 
 * @param context
 * @throws org.quartz.JobExecutionException
 */
@SuppressWarnings( "unchecked" )
@Override
public void execute( JobExecutionContext context )
    throws JobExecutionException
{
    JobDataMap dataMap = context.getJobDetail().getJobDataMap();
    setJobDataMap( dataMap );

    TaskQueue taskQueue = (TaskQueue) dataMap.get( DefaultRepositoryArchivaTaskScheduler.TASK_QUEUE );

    String repositoryId = (String) dataMap.get( DefaultRepositoryArchivaTaskScheduler.TASK_REPOSITORY );
    RepositoryTask task = new RepositoryTask();
    task.setRepositoryId( repositoryId );

    try
    {
        taskQueue.put( task );
    }
    catch ( TaskQueueException e )
    {
        throw new JobExecutionException( e );
    }
}
 
Example 5
Source Project: FoxBPM   Source File: FoxbpmJobExecutionContext.java    License: Apache License 2.0 6 votes vote down vote up
public FoxbpmJobExecutionContext(JobExecutionContext jobExecutionContext) {
	JobDataMap jobDataMap = jobExecutionContext.getJobDetail()
			.getJobDataMap();
	scheduleJob = jobExecutionContext.getJobInstance();
	this.tokenId = jobDataMap.getString(TOKEN_ID);
	this.processInstanceId = jobDataMap.getString(PROCESS_INSTANCE_ID);
	this.nodeId = jobDataMap.getString(NODE_ID);
	this.processKey = jobDataMap.getString(PROCESS_DEFINITION_KEY);
	this.processId = jobDataMap.getString(PROCESS_DEFINITION_ID);
	this.processName = jobDataMap.getString(PROCESS_DEFINITION_NAME);
	this.bizKey = jobDataMap.getString(BUSINESS_KEY);
	this.jobType = jobDataMap.getString("jobType");
	this.connectorId = jobDataMap.getString(CONNECTOR_ID);
	this.connectorInstanceId = jobDataMap.getString(CONNECTOR_INSTANCE_ID);
	this.connectorInstanceName = jobDataMap
			.getString(CONNECTOR_INSTANCE_NAME);
	this.eventType = jobDataMap.getString(EVENT_TYPE);
	this.eventName = jobDataMap.getString(EVENT_NAME);
	this.taskId = jobDataMap.getString(TASK_ID);

}
 
Example 6
Source Project: WeEvent   Source File: TimerSchedulerController.java    License: Apache License 2.0 6 votes vote down vote up
@RequestMapping("/insert")
public BaseRspEntity insertTimerScheduler(@Validated @RequestBody TimerScheduler timerScheduler) throws BrokerException {
    BaseRspEntity resEntity = new BaseRspEntity(ConstantsHelper.RET_SUCCESS);
    JobDataMap timerSchedulerMap = new JobDataMap();
    timerSchedulerMap.put("id", timerScheduler.getId());
    timerSchedulerMap.put("type", "createTimerTask");
    timerSchedulerMap.put("timer", JsonHelper.object2Json(timerScheduler));
    RetCode retCode = timerSchedulerService.createTimerScheduler(timerScheduler.getId(), "timer", "timer",
            "timer-trigger", TimerSchedulerJob.class, timerSchedulerMap, timerScheduler);
    if (ConstantsHelper.RET_FAIL.getErrorCode() == retCode.getErrorCode()) { //fail
        resEntity.setErrorCode(retCode.getErrorCode());
        resEntity.setErrorMsg(retCode.getErrorMsg());
    }

    return resEntity;
}
 
Example 7
Source Project: openhab1-addons   Source File: IntervalJob.java    License: Eclipse Public License 2.0 6 votes vote down vote up
@Override
protected void executeJob(JobDataMap jobDataMap) {
    Calendar now = Calendar.getInstance();

    // sun
    SunCalc sunCalc = new SunCalc();
    Sun sun = (Sun) context.getPlanet(PlanetName.SUN);
    sunCalc.setSunPosition(now, context.getConfig().getLatitude(), context.getConfig().getLongitude(), sun);
    planetPublisher.publish(PlanetName.SUN);

    // moon
    MoonCalc moonCalc = new MoonCalc();
    Moon moon = (Moon) context.getPlanet(PlanetName.MOON);
    moonCalc.setMoonPosition(now, context.getConfig().getLatitude(), context.getConfig().getLongitude(), moon);
    planetPublisher.publish(PlanetName.MOON);
}
 
Example 8
Source Project: AsuraFramework   Source File: StdJDBCDelegate.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * <p>
 * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code>
 * version of a <code>{@link org.quartz.JobDataMap}</code>.
 * </p>
 * 
 * @param data
 *          the JobDataMap to serialize
 * @return the serialized ByteArrayOutputStream
 * @throws IOException
 *           if serialization causes an error
 */
protected ByteArrayOutputStream serializeJobData(JobDataMap data)
    throws IOException {
    if (canUseProperties()) {
        return serializeProperties(data);
    }

    try {
        return serializeObject(data);
    } catch (NotSerializableException e) {
        throw new NotSerializableException(
            "Unable to serialize JobDataMap for insertion into " + 
            "database because the value of property '" + 
            getKeyOfNonSerializableValue(data) + 
            "' is not serializable: " + e.getMessage());
    }
}
 
Example 9
Source Project: ruoyiplus   Source File: ScheduleUtils.java    License: MIT License 6 votes vote down vote up
/**
 * 立即执行任务
 */
public static int run(Scheduler scheduler, SysJob job)
{
    int rows = 0;
    try
    {
        // 参数
        JobDataMap dataMap = new JobDataMap();
        dataMap.put(ScheduleConstants.TASK_PROPERTIES, job);

        scheduler.triggerJob(getJobKey(job.getJobId()), dataMap);
        rows = 1;
    }
    catch (SchedulerException e)
    {
        log.error("run 异常:", e);
    }
    return rows;
}
 
Example 10
Source Project: Eagle   Source File: FileSensitivityPollingJob.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context)
		throws JobExecutionException {
	JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
	try{
		List<FileSensitivityAPIEntity> ipZones = load(jobDataMap);
		if(ipZones == null){
			LOG.warn("File sensitivity information is empty");
			return;
		}
		Map<String, FileSensitivityAPIEntity> map = Maps.uniqueIndex(ipZones, new Function<FileSensitivityAPIEntity, String>(){
			@Override
			public String apply(FileSensitivityAPIEntity input) {
				return input.getTags().get("filedir");
			}
		});
		ExternalDataCache.getInstance().setJobResult(getClass(), map);
	}catch(Exception ex){
		LOG.error("Fail loading file sensitivity data", ex);
	}
}
 
Example 11
Source Project: eagle   Source File: DataEnrichJob.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();

    DataEnrichLCM lcm = (DataEnrichLCM)jobDataMap.getOrDefault("dataEnrichLCM", null);
    if(lcm == null)
        throw new IllegalStateException("dataEnrichLCM implementation should be provided");
    try {
        Collection externalEntities = lcm.loadExternal();
        Map<Object, Object> map = Maps.uniqueIndex(
                externalEntities,
                entity -> lcm.getCacheKey(entity)
            );
        ExternalDataCache.getInstance().setJobResult(lcm.getClass(), map);
    } catch(Exception ex) {
        LOG.error("Fail to load sensitivity data", ex);
    }
}
 
Example 12
Source Project: incubator-gobblin   Source File: GobblinHelixJob.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void executeImpl(JobExecutionContext context) throws JobExecutionException {
  JobDataMap dataMap = context.getJobDetail().getJobDataMap();
  final JobScheduler jobScheduler = (JobScheduler) dataMap.get(JobScheduler.JOB_SCHEDULER_KEY);
  // the properties may get mutated during job execution and the scheduler reuses it for the next round of scheduling,
  // so clone it
  final Properties jobProps = (Properties)((Properties) dataMap.get(JobScheduler.PROPERTIES_KEY)).clone();
  final JobListener jobListener = (JobListener) dataMap.get(JobScheduler.JOB_LISTENER_KEY);

  try {
    if (Boolean.valueOf(jobProps.getProperty(GobblinClusterConfigurationKeys.JOB_EXECUTE_IN_SCHEDULING_THREAD,
            Boolean.toString(GobblinClusterConfigurationKeys.JOB_EXECUTE_IN_SCHEDULING_THREAD_DEFAULT)))) {
      jobScheduler.runJob(jobProps, jobListener);
    } else {
      cancellable = jobScheduler.scheduleJobImmediately(jobProps, jobListener);
    }
  } catch (Throwable t) {
    throw new JobExecutionException(t);
  }
}
 
Example 13
protected void scheduleMessage(PersistedMessageBO message) throws SchedulerException {
 LOG.debug("Scheduling execution of a delayed asynchronous message.");
 Scheduler scheduler = KSBServiceLocator.getScheduler();
 JobDataMap jobData = new JobDataMap();
 jobData.put(MessageServiceExecutorJob.MESSAGE_KEY, message);

    JobDetailImpl jobDetail = new JobDetailImpl("Delayed_Asynchronous_Call-" + Math.random(), "Delayed_Asynchronous_Call",
  MessageServiceExecutorJob.class);
 jobDetail.setJobDataMap(jobData);

    scheduler.getListenerManager().addJobListener( new MessageServiceExecutorJobListener());

    SimpleTriggerImpl trigger = new SimpleTriggerImpl("Delayed_Asynchronous_Call_Trigger-" + Math.random(),
  "Delayed_Asynchronous_Call", message.getQueueDate());

    trigger.setJobDataMap(jobData);// 1.6 bug required or derby will choke
 scheduler.scheduleJob(jobDetail, trigger);
}
 
Example 14
public void execute(JobExecutionContext context) throws JobExecutionException
{
    JobDataMap jobData = context.getJobDetail().getJobDataMap();
    // extract the content Cleanup to use
    Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker");
    if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker))
    {
        throw new AlfrescoRuntimeException(
                "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference");
    }
    NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj;
    List<String> cleanupLog = nodeCleanupWorker.doClean();
    // Done
    if (logger.isDebugEnabled())
    {
        logger.debug("Node cleanup log:");
        for (String log : cleanupLog)
        {
            logger.debug(log);
        }
    }
}
 
Example 15
Source Project: Eagle   Source File: HbaseResourceSensitivityPollingJob.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context)
        throws JobExecutionException {
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
    try {
        List<HbaseResourceSensitivityAPIEntity>
        hbaseResourceSensitivity = load(jobDataMap, "HbaseResourceSensitivityService");
        if(hbaseResourceSensitivity == null) {
        	LOG.warn("Hbase resource sensitivity information is empty");
        	return;
        }
        Map<String, HbaseResourceSensitivityAPIEntity> map = Maps.uniqueIndex(
        		hbaseResourceSensitivity,
        		new Function<HbaseResourceSensitivityAPIEntity, String>() {
        			@Override
        			public String apply(HbaseResourceSensitivityAPIEntity input) {
        				return input.getTags().get("hbaseResource");
        			}
        		});
        ExternalDataCache.getInstance().setJobResult(getClass(), map);
    } catch(Exception ex) {
    	LOG.error("Fail to load hbase resource sensitivity data", ex);
    }
}
 
Example 16
Source Project: smarthome   Source File: ScriptExecution.java    License: Eclipse Public License 2.0 6 votes vote down vote up
/**
 * helper function to create the timer
 *
 * @param instant the point in time when the code should be executed
 * @param closure string for job id
 * @param dataMap job data map, preconfigured with arguments
 * @return
 */
private static Timer makeTimer(AbstractInstant instant, String closure, JobDataMap dataMap) {

    Logger logger = LoggerFactory.getLogger(ScriptExecution.class);
    JobKey jobKey = new JobKey(instant.toString() + ": " + closure.toString());
    Trigger trigger = newTrigger().startAt(instant.toDate()).build();
    Timer timer = new TimerImpl(jobKey, trigger.getKey(), dataMap, instant);
    try {
        JobDetail job = newJob(TimerExecutionJob.class).withIdentity(jobKey).usingJobData(dataMap).build();
        if (TimerImpl.scheduler.checkExists(job.getKey())) {
            TimerImpl.scheduler.deleteJob(job.getKey());
            logger.debug("Deleted existing Job {}", job.getKey().toString());
        }
        TimerImpl.scheduler.scheduleJob(job, trigger);
        logger.debug("Scheduled code for execution at {}", instant.toString());
        return timer;
    } catch (SchedulerException e) {
        logger.error("Failed to schedule code for execution.", e);
        return null;
    }
}
 
Example 17
Source Project: syncope   Source File: SchedulingPullActions.java    License: Apache License 2.0 6 votes vote down vote up
protected <T extends Job> void schedule(final Class<T> reference, final Map<String, Object> jobMap)
        throws JobExecutionException {

    @SuppressWarnings("unchecked")
    T jobInstance = (T) ApplicationContextProvider.getBeanFactory().
            createBean(reference, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
    String jobName = getClass().getName() + SecureRandomUtils.generateRandomUUID();

    jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain());

    ApplicationContextProvider.getBeanFactory().registerSingleton(jobName, jobInstance);

    JobBuilder jobDetailBuilder = JobBuilder.newJob(reference).
            withIdentity(jobName).
            usingJobData(new JobDataMap(jobMap));

    TriggerBuilder<Trigger> triggerBuilder = TriggerBuilder.newTrigger().
            withIdentity(JobNamer.getTriggerName(jobName)).
            startNow();

    try {
        scheduler.getScheduler().scheduleJob(jobDetailBuilder.build(), triggerBuilder.build());
    } catch (SchedulerException e) {
        throw new JobExecutionException("Could not schedule, aborting", e);
    }
}
 
Example 18
Source Project: griffin   Source File: EntityMocksHelper.java    License: Apache License 2.0 6 votes vote down vote up
public static JobDetailImpl createJobDetail(
    String measureJson,
    String predicatesJson) {
    JobDetailImpl jobDetail = new JobDetailImpl();
    JobKey jobKey = new JobKey("name", "group");
    jobDetail.setKey(jobKey);
    JobDataMap jobDataMap = new JobDataMap();
    jobDataMap.put(MEASURE_KEY, measureJson);
    jobDataMap.put(PREDICATES_KEY, predicatesJson);
    jobDataMap.put(JOB_NAME, "jobName");
    jobDataMap.put("jobName", "jobName");
    jobDataMap.put(PREDICATE_JOB_NAME, "predicateJobName");
    jobDataMap.put(GRIFFIN_JOB_ID, 1L);
    jobDetail.setJobDataMap(jobDataMap);
    return jobDetail;
}
 
Example 19
Source Project: iaf   Source File: ConfiguredJob.java    License: Apache License 2.0 6 votes vote down vote up
public void execute(JobExecutionContext context) throws JobExecutionException {
	String ctName = Thread.currentThread().getName();
	try {
		JobDataMap dataMap = context.getJobDetail().getJobDataMap();
		IbisManager ibisManager = (IbisManager)dataMap.get(MANAGER_KEY);
		JobDef jobDef = (JobDef)dataMap.get(JOBDEF_KEY);
		Thread.currentThread().setName(jobDef.getName() + "["+ctName+"]");
		log.info(getLogPrefix(jobDef) + "executing");
		jobDef.executeJob(ibisManager);
		log.debug(getLogPrefix(jobDef) + "completed");
	}
	catch (Exception e) {
		log.error(e);
		throw new JobExecutionException(e, false);
	}
	finally {
		Thread.currentThread().setName(ctName);
	}
}
 
Example 20
Source Project: sakai   Source File: AutoRun.java    License: Educational Community License v2.0 5 votes vote down vote up
public void init() {
    if (config == null || serverConfigurationService.getBoolean(config, false)) {
        log.info("AutoRun running");
        Scheduler scheduler = schedulerManager.getScheduler();

        for (JobBeanWrapper job : startup) {
            try {
                JobDataMap jobData = new JobDataMap();
                jobData.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId());
                jobData.put(JobBeanWrapper.JOB_NAME, job.getJobName());

                JobDetail jobDetail = JobBuilder.newJob(job.getJobClass())
                        .withIdentity(job.getJobName(), null)
                        .setJobData(jobData)
                        .build();

                // Non durable job that will get removed
                scheduler.addJob(jobDetail, true, true);
                scheduler.triggerJob(jobDetail.getKey());
                log.info("Triggered job: {}", job.getJobName());
            } catch (SchedulerException se) {
                log.warn("Failed to run job: {}", startup, se);
            }

        }
    }
}
 
Example 21
Source Project: MyCommunity   Source File: QuartzConfig.java    License: Apache License 2.0 5 votes vote down vote up
public SimpleTriggerFactoryBean postScoreRefreshTrigger(JobDetail postScoreRefreshJobDetail) {
        SimpleTriggerFactoryBean factoryBean = new SimpleTriggerFactoryBean();
        factoryBean.setJobDetail(postScoreRefreshJobDetail);
        factoryBean.setName("postScoreRefreshTrigger");
        factoryBean.setGroup("communityTriggerGroup");
//        factoryBean.setRepeatInterval(1000 * 60 * 5);  // 5分钟
        factoryBean.setRepeatInterval(1000 * 60 * 120);  // 2小时
        factoryBean.setJobDataMap(new JobDataMap());
        return factoryBean;
    }
 
Example 22
Source Project: SuitAgent   Source File: SNMPPluginJob.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    long timestamp = System.currentTimeMillis() / 1000;
    JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
    String pluginName = jobDataMap.getString("pluginName");
    try {
        SNMPV3Plugin plugin = (SNMPV3Plugin) jobDataMap.get("pluginObject");
        List<SNMPV3UserInfo> jobUsers = (List<SNMPV3UserInfo>) jobDataMap.get("userInfoList");
        MetricsCommon metricsValue = new SNMPV3MetricsValue(plugin,jobUsers,timestamp);
        //SNMP监控数据获取时间较长,采用异步方式
        ExecuteThreadUtil.execute(new JobThread(metricsValue,"snmp v3 job thread"));
    } catch (Exception e) {
        log.error("插件 {} 运行异常",pluginName,e);
    }
}
 
Example 23
Source Project: boubei-tss   Source File: AbstractJob.java    License: Apache License 2.0 5 votes vote down vote up
public void execute(JobExecutionContext context) throws JobExecutionException {
  	initContext(); 
auto = true; 
  	
  	JobDetail aJob = context.getJobDetail();
  	String jobName = aJob.getKey().getName();
  	
  	JobDataMap dataMap = aJob.getJobDataMap();
  	String jobConfig = (String) dataMap.get(jobName);
  	Long jobID = (Long) dataMap.get(jobName + "-ID");
      
      log.info("Job[" + jobName + "] starting...");
      
      excuting(jobName, jobConfig, jobID);
  }
 
Example 24
Source Project: nexus-public   Source File: JobStoreImpl.java    License: Eclipse Public License 1.0 5 votes vote down vote up
/**
 * A {@link TriggerEntity} is orphaned if it's owner isn't in the cluster OR it's limited to a node not in the cluster
 * If there is no cluster, it's never orphaned
 */
private boolean isOrphaned(final TriggerEntity entity) {
  if (isClustered()) {
    Set<String> memberIds = nodeAccess.getMemberIds();
    JobDataMap triggerDetail = entity.getValue().getJobDataMap();
    String limitedNodeId = triggerDetail.getString(LIMIT_NODE_KEY);
    String owner = triggerDetail.getString(NODE_ID);
    return limitedNodeId != null ? !memberIds.contains(limitedNodeId) : !memberIds.contains(owner);
  }
  return false;
}
 
Example 25
@Override
public Job newJob(TriggerFiredBundle bundle, Scheduler scheduler) throws SchedulerException {

    Job job = super.newJob(bundle, scheduler);
    
    JobDataMap jobDataMap = new JobDataMap();
    jobDataMap.putAll(scheduler.getContext());
    jobDataMap.putAll(bundle.getJobDetail().getJobDataMap());
    jobDataMap.putAll(bundle.getTrigger().getJobDataMap());

    setBeanProps(job, jobDataMap);
    
    return job;
}
 
Example 26
Source Project: cachecloud   Source File: InspectorJob.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void action(JobExecutionContext context) {
    try {
        long start = System.currentTimeMillis();
        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        // 应用相关
        InspectHandler inspectHandler;
        JobDataMap jobDataMap = context.getMergedJobDataMap();
        String inspectorType = MapUtils.getString(jobDataMap, "inspectorType");
        if (StringUtils.isBlank(inspectorType)) {
            logger.error("=====================InspectorJob:inspectorType is null=====================");
            return;
        } else if (inspectorType.equals("host")) {
            inspectHandler = applicationContext.getBean("hostInspectHandler", InspectHandler.class);
        } else if (inspectorType.equals("app")) {
            inspectHandler = applicationContext.getBean("appInspectHandler", InspectHandler.class);
        } else {
            logger.error("=====================InspectorJob:inspectorType not match:{}=====================", inspectorType);
            return;
        }
        inspectHandler.handle();
        long end = System.currentTimeMillis();
        logger.info("=====================InspectorJob {} Done! cost={} ms=====================",
                inspectHandler.getClass().getSimpleName(), (end - start));
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        throw new RuntimeException(e);
    }
}
 
Example 27
Source Project: sakai   Source File: SynchronizationJob.java    License: Educational Community License v2.0 5 votes vote down vote up
public void executeInternal(JobExecutionContext jec) throws JobExecutionException {
    log.info("Starting Integration Job");

    JobDataMap jdm = jec.getMergedJobDataMap();

    if (dataProcessors != null) {
        for (DataProcessor dp : dataProcessors) {
            ProcessorState state = null;

            try {
                state = dp.init(jdm);
                dp.preProcess(state);
                dp.process(state);
                dp.postProcess(state);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            } finally {
                sendEmail(dp, state);
                if (state != null) {
                    state.reset();
                }
            }
        }
    } else {
        throw new JobExecutionException("Data processors list has not been set.");
    }

    log.info("Integration Job Complete");
}
 
Example 28
/**
 * Get the job detail.
 * 
 * @return - the job detail.
 */
private JobDetail getJobDetail()
{
    JobDataMap jobDataMap = new JobDataMap();
    jobDataMap.put(ACTION_JOB_DATA_MAP_KEY, this);
    final JobDetail jobDetail = JobBuilder.newJob()
            .withIdentity(getJobName(), getJobGroup())
            .usingJobData(jobDataMap)
            .ofType(JobDefinition.class)
            .build();
    return jobDetail;
}
 
Example 29
public void injectContext(JobDataMap jobDataMap) {
	final TraceeFilterConfiguration configuration = backend.getConfiguration(profile);

	if (!backend.isEmpty() && configuration.shouldProcessContext(AsyncDispatch)) {
		jobDataMap.put(TraceeConstants.TPIC_HEADER, backend.getConfiguration(profile).filterDeniedParams(backend.copyToMap(), AsyncDispatch));
	}
}
 
Example 30
Source Project: spring-boot-quartz-demo   Source File: SimpleJob.java    License: MIT License 5 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	JobKey key = jobExecutionContext.getJobDetail().getKey();
	System.out.println("Simple Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName());
	
	System.out.println("======================================");
	System.out.println("Accessing annotation example: "+jobService.getAllJobs());
	List<Map<String, Object>> list = jobService.getAllJobs();
	System.out.println("Job list :"+list);
	System.out.println("======================================");
	
	//*********** For retrieving stored key-value pairs ***********/
	JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap();
	String myValue = dataMap.getString("myKey");
	System.out.println("Value:" + myValue);

	//*********** For retrieving stored object, It will try to deserialize the bytes Object. ***********/
	/*
	SchedulerContext schedulerContext = null;
       try {
           schedulerContext = jobExecutionContext.getScheduler().getContext();
       } catch (SchedulerException e1) {
           e1.printStackTrace();
       }
       YourClass yourClassObject = (YourClass) schedulerContext.get("storedObjectKey");
	 */

	while(toStopFlag){
		try {
			System.out.println("Test Job Running... Thread Name :"+Thread.currentThread().getName());
			Thread.sleep(2000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped.");
}