Java Code Examples for org.quartz.JobExecutionContext#getMergedJobDataMap()

The following examples show how to use org.quartz.JobExecutionContext#getMergedJobDataMap() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SendMailJob.java    From AsuraFramework with Apache License 2.0 7 votes vote down vote up
/**
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 */
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();

    MailInfo mailInfo = populateMailInfo(data, createMailInfo());
    
    getLog().info("Sending message " + mailInfo);

    try {
        MimeMessage mimeMessage = prepareMimeMessage(mailInfo);
        
        Transport.send(mimeMessage);
    } catch (MessagingException e) {
        throw new JobExecutionException("Unable to send mail: " + mailInfo,
                e, false);
    }

}
 
Example 2
Source File: Assignment12ConversionJob.java    From sakai with Educational Community License v2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    log.info("<===== Assignment Conversion Job start =====>");

    // never run as a recovery
    if (context.isRecovering()) {
        log.warn("<===== Assignment Conversion Job doesn't support recovery, job will terminate... =====>");
    } else {
        JobDataMap map = context.getMergedJobDataMap();
        Integer size = Integer.parseInt((String) map.get(SIZE_PROPERTY));
        Integer number = Integer.parseInt((String) map.get(NUMBER_PROPERTY));
        assignmentConversionService.runConversion(number, size);
    }

    log.info("<===== Assignment Conversion Job end =====>");
}
 
Example 3
Source File: CronJob.java    From spring-boot-quartz-demo with MIT License 6 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	JobKey key = jobExecutionContext.getJobDetail().getKey();
	System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date());
	
	System.out.println("======================================");
	System.out.println("Accessing annotation example: "+jobService.getAllJobs());
	List<Map<String, Object>> list = jobService.getAllJobs();
	System.out.println("Job list :"+list);
	System.out.println("======================================");
	
	//*********** For retrieving stored key-value pairs ***********/
	JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap();
	String myValue = dataMap.getString("myKey");
	System.out.println("Value:" + myValue);

	System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped.");
}
 
Example 4
Source File: Assignment12ConversionJob.java    From sakai with Educational Community License v2.0 6 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    log.info("<===== Assignment Conversion Job start =====>");

    // never run as a recovery
    if (context.isRecovering()) {
        log.warn("<===== Assignment Conversion Job doesn't support recovery, job will terminate... =====>");
    } else {
        JobDataMap map = context.getMergedJobDataMap();
        Integer size = Integer.parseInt((String) map.get(SIZE_PROPERTY));
        Integer number = Integer.parseInt((String) map.get(NUMBER_PROPERTY));
        assignmentConversionService.runConversion(number, size);
    }

    log.info("<===== Assignment Conversion Job end =====>");
}
 
Example 5
Source File: RetentionJob.java    From chronix.server with Apache License 2.0 6 votes vote down vote up
/**
 * Executes the job that calls the retention plugin.
 *
 * @param context the current job context
 * @throws JobExecutionException if the solr server could not be reached.
 */
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    LOGGER.info("Starting retention job");
    JobDataMap data = context.getMergedJobDataMap();

    String url = data.getString(RetentionConstants.RETENTION_URL);
    HttpGet httpget = new HttpGet(url);

    try {
        CloseableHttpResponse response = httpClient.execute(httpget);
        LOGGER.info("Response was {}", response);
    } catch (IOException e) {
        throw new JobExecutionException("Could not execute http get request " + httpget, e);
    }

}
 
Example 6
Source File: NativeJob.java    From AsuraFramework with Apache License 2.0 6 votes vote down vote up
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();
    
    String command = data.getString(PROP_COMMAND);

    String parameters = data.getString(PROP_PARAMETERS);

    if (parameters == null) {
        parameters = "";
    }

    boolean wait = true;
    if(data.containsKey(PROP_WAIT_FOR_PROCESS)) {
        wait = data.getBooleanValue(PROP_WAIT_FOR_PROCESS);
    }
    boolean consumeStreams = false;
    if(data.containsKey(PROP_CONSUME_STREAMS)) {
        consumeStreams = data.getBooleanValue(PROP_CONSUME_STREAMS);
    }
        
    Integer exitCode = this.runNativeCommand(command, parameters, wait, consumeStreams);
    context.setResult(exitCode);
    
}
 
Example 7
Source File: InspectorJob.java    From cachecloud with Apache License 2.0 5 votes vote down vote up
@Override
public void action(JobExecutionContext context) {
    try {
        long start = System.currentTimeMillis();
        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        // 应用相关
        InspectHandler inspectHandler;
        JobDataMap jobDataMap = context.getMergedJobDataMap();
        String inspectorType = MapUtils.getString(jobDataMap, "inspectorType");
        if (StringUtils.isBlank(inspectorType)) {
            logger.error("=====================InspectorJob:inspectorType is null=====================");
            return;
        } else if (inspectorType.equals("host")) {
            inspectHandler = applicationContext.getBean("hostInspectHandler", InspectHandler.class);
        } else if (inspectorType.equals("app")) {
            inspectHandler = applicationContext.getBean("appInspectHandler", InspectHandler.class);
        } else {
            logger.error("=====================InspectorJob:inspectorType not match:{}=====================", inspectorType);
            return;
        }
        inspectHandler.handle();
        long end = System.currentTimeMillis();
        logger.info("=====================InspectorJob {} Done! cost={} ms=====================",
                inspectHandler.getClass().getSimpleName(), (end - start));
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        throw new RuntimeException(e);
    }
}
 
Example 8
Source File: SynchronizationJob.java    From sakai with Educational Community License v2.0 5 votes vote down vote up
public void executeInternal(JobExecutionContext jec) throws JobExecutionException {
    log.info("Starting Integration Job");

    JobDataMap jdm = jec.getMergedJobDataMap();

    if (dataProcessors != null) {
        for (DataProcessor dp : dataProcessors) {
            ProcessorState state = null;

            try {
                state = dp.init(jdm);
                dp.preProcess(state);
                dp.process(state);
                dp.postProcess(state);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            } finally {
                sendEmail(dp, state);
                if (state != null) {
                    state.reset();
                }
            }
        }
    } else {
        throw new JobExecutionException("Data processors list has not been set.");
    }

    log.info("Integration Job Complete");
}
 
Example 9
Source File: SynchronizationJob.java    From sakai with Educational Community License v2.0 5 votes vote down vote up
public void executeInternal(JobExecutionContext jec) throws JobExecutionException {
    log.info("Starting Integration Job");

    JobDataMap jdm = jec.getMergedJobDataMap();

    if (dataProcessors != null) {
        for (DataProcessor dp : dataProcessors) {
            ProcessorState state = null;

            try {
                state = dp.init(jdm);
                dp.preProcess(state);
                dp.process(state);
                dp.postProcess(state);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            } finally {
                sendEmail(dp, state);
                if (state != null) {
                    state.reset();
                }
            }
        }
    } else {
        throw new JobExecutionException("Data processors list has not been set.");
    }

    log.info("Integration Job Complete");
}
 
Example 10
Source File: EchoJob.java    From spring-boot-cookbook with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    LOGGER.info("================job start================");
    JobDataMap jobDataMap = context.getMergedJobDataMap();
    echoBiz.echo(jobDataMap.getIntValue(RANDOM_VALUE));
    echoBiz.echo(jobDataMap.getString(CREATE_TIME));
    LOGGER.info("================job end================");
}
 
Example 11
Source File: MachineMonitorJob.java    From cachecloud with Apache License 2.0 5 votes vote down vote up
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        String ip = dataMap.getString(ConstUtils.HOST_KEY);
        long hostId = dataMap.getLong(ConstUtils.HOST_ID_KEY);

        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        MachineCenter machineCenter = applicationContext.getBean("machineCenter", MachineCenter.class);
        machineCenter.asyncMonitorMachineStats(hostId, ip);
    } catch (SchedulerException e) {
        logger.error(e.getMessage(), e);
    }
}
 
Example 12
Source File: ZKJob.java    From zkdoctor with Apache License 2.0 5 votes vote down vote up
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        int clusterId = dataMap.getInt(SchedulerConstant.CLUSTER_KEY);
        collectService.collectZKInfo(clusterId);
    } catch (Exception e) {
        LOGGER.error("ZKJob execute failed.", e);
    }
}
 
Example 13
Source File: MailFlowJob.java    From elasticsearch-imap with Apache License 2.0 5 votes vote down vote up
@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {

    final JobKey key = context.getJobDetail().getKey();

    logger.debug("Executing mail flow job {}", key.toString());

    final JobDataMap data = context.getMergedJobDataMap();

    mailSource = (MailSource) data.get("mailSource");
    pattern = (Pattern) data.get("pattern");
    
    Client client = (Client) data.get("client");

    try {
        IMAPImporter.waitForYellowCluster(client);
        execute();
    } catch (final Exception e) {
        logger.error("Error in mail flow job {}: {} job", e, key.toString(), e.toString());
        final JobExecutionException e2 = new JobExecutionException(e);
        // this job will refire immediately
        // e2.refireImmediately();
        throw e2;
    }

    logger.debug("End of mail flow job with no errors {}", key.toString());

}
 
Example 14
Source File: SimpleJob.java    From spring-boot-quartz-demo with MIT License 5 votes vote down vote up
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	JobKey key = jobExecutionContext.getJobDetail().getKey();
	System.out.println("Simple Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName());
	
	System.out.println("======================================");
	System.out.println("Accessing annotation example: "+jobService.getAllJobs());
	List<Map<String, Object>> list = jobService.getAllJobs();
	System.out.println("Job list :"+list);
	System.out.println("======================================");
	
	//*********** For retrieving stored key-value pairs ***********/
	JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap();
	String myValue = dataMap.getString("myKey");
	System.out.println("Value:" + myValue);

	//*********** For retrieving stored object, It will try to deserialize the bytes Object. ***********/
	/*
	SchedulerContext schedulerContext = null;
       try {
           schedulerContext = jobExecutionContext.getScheduler().getContext();
       } catch (SchedulerException e1) {
           e1.printStackTrace();
       }
       YourClass yourClassObject = (YourClass) schedulerContext.get("storedObjectKey");
	 */

	while(toStopFlag){
		try {
			System.out.println("Test Job Running... Thread Name :"+Thread.currentThread().getName());
			Thread.sleep(2000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped.");
}
 
Example 15
Source File: FileScanJob.java    From AsuraFramework with Apache License 2.0 4 votes vote down vote up
/** 
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 */
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap mergedJobDataMap = context.getMergedJobDataMap();
    SchedulerContext schedCtxt = null;
    try {
        schedCtxt = context.getScheduler().getContext();
    } catch (SchedulerException e) {
        throw new JobExecutionException("Error obtaining scheduler context.", e, false);
    }
    
    String fileName = mergedJobDataMap.getString(FILE_NAME);
    String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME);
    
    if(fileName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_NAME + "' not found in merged JobDataMap");
    }
    if(listenerName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap");
    }

    FileScanListener listener = (FileScanListener)schedCtxt.get(listenerName);
    
    if(listener == null) {
        throw new JobExecutionException("FileScanListener named '" + 
                listenerName + "' not found in SchedulerContext");
    }
    
    long lastDate = -1;
    if(mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) {
        lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME);
    }
    
    long newDate = getLastModifiedDate(fileName);

    if(newDate < 0) {
        log.warn("File '"+fileName+"' does not exist.");
        return;
    }
    
    if(lastDate > 0 && (newDate != lastDate)) {
        // notify call back...
        log.info("File '"+fileName+"' updated, notifying listener.");
        listener.fileUpdated(fileName); 
    } else if (log.isDebugEnabled()) {
        log.debug("File '"+fileName+"' unchanged.");
    }
    
    // It is the JobDataMap on the JobDetail which is actually stateful
    context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate);
}
 
Example 16
Source File: DynamicExpressionObserverJob.java    From deltaspike with Apache License 2.0 4 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException
{
    JobDataMap jobDataMap = context.getMergedJobDataMap();
    String configExpression = jobDataMap.getString(CONFIG_EXPRESSION_KEY);
    String triggerId = jobDataMap.getString(TRIGGER_ID_KEY);
    String activeCronExpression = jobDataMap.getString(ACTIVE_CRON_EXPRESSION_KEY);

    String configKey = configExpression.substring(1, configExpression.length() - 1);
    String configuredValue = ConfigResolver.getPropertyAwarePropertyValue(configKey, activeCronExpression);

    if (!activeCronExpression.equals(configuredValue))
    {
        //both #put calls are needed currently
        context.getJobDetail().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);
        context.getTrigger().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);

        BeanProvider.injectFields(this);

        JobKey observerJobKey = context.getJobDetail().getKey();
        String observedJobName = observerJobKey.getName()
            .substring(0, observerJobKey.getName().length() - OBSERVER_POSTFIX.length());
        JobKey observedJobKey = new JobKey(observedJobName, observerJobKey.getGroup());

        Trigger trigger = TriggerBuilder.newTrigger()
                .withIdentity(triggerId)
                .forJob(observedJobName, observedJobKey.getGroup())
                .withSchedule(CronScheduleBuilder.cronSchedule(configuredValue))
                .build();

        //use rescheduleJob instead of delete + add
        //(unwrap is ok here, because this class will only get active in case of a quartz-scheduler)
        org.quartz.Scheduler quartzScheduler = scheduler.unwrap(org.quartz.Scheduler.class);
        try
        {
            quartzScheduler.rescheduleJob(trigger.getKey(), trigger);
        }
        catch (SchedulerException e)
        {
            LOG.warning("failed to updated cron-expression for " + observedJobKey);
        }
    }
}
 
Example 17
Source File: NavigableEventLogListener.java    From sakai with Educational Community License v2.0 4 votes vote down vote up
private void info (EVENTTYPE eventType, Trigger trig, JobExecutionContext context, JobExecutionException exception, CompletedExecutionInstruction instructionCode) {
    JobDetail
        detail = (context != null)?context.getJobDetail():null;
    final JobDataMap
        dataMap = (context != null)?context.getMergedJobDataMap():null;
    final String
        jobName = (detail != null)?detail.getKey().getName():null,
        jobDesc = (detail != null)?detail.getDescription():null;
    final Class
        jobClass = (detail != null)?detail.getJobClass():null;
    final Trigger
        trigger = (trig != null)?trig:((context != null)?context.getTrigger():null);
    final String
        trigName = (trigger != null)?trigger.getKey().getName():null,
        trigDesc = (trigger != null)?trigger.getDescription():null;
    final Date
        trigStart = (trigger != null)?trigger.getStartTime():null,
        trigEnd = (trigger != null)?trigger.getEndTime():null;

    StringBuilder
        sb = new StringBuilder();

    switch (eventType)
    {
        case JOB_EXECUTING:
        {
            sb.append("Job Executing: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case JOB_VETOED:
        {
            sb.append("Job Vetoed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            break;
        }
        case JOB_EXECUTED:
        {
            sb.append("Job Executed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName());

            if (exception != null)
            {
                sb.append (", exception: ").append(exception.getMessage());
                if (exception.getCause() != null)
                {
                  sb.append(", exception cause: ").append(exception.getCause().getClass().getName());
                }
            }
            sb.append("]");

            break;
        }
        case TRIGGER_FIRED:
        {
            sb.append("Trigger Fired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart != null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd != null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case TRIGGER_MISFIRED:
        {
            sb.append("Trigger Misfired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append("]");
            break;
        }
        case TRIGGER_COMPLETED:
        {
            sb.append("Trigger Completed: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName())
              .append(", execution result: ").append(instructionCode);
            sb.append("]");
            break;
        }
    }
    if (log.isDebugEnabled())
    {
    	log.debug(sb.toString());
    }
}
 
Example 18
Source File: MethodInvokeJob.java    From quartz-web with Apache License 2.0 4 votes vote down vote up
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    Date startDate = new Date();
    LOG.debug("methodInvokeJob start : " + DateUtils.formart(startDate));
    long startTime = startDate.getTime();

    JobDataMap jobDataMap = context.getMergedJobDataMap();
    //Object methodInvokerObj = jobDataMap.get("methodInvoker");
    Object jobClassObj = jobDataMap.get("jobClass");
    Object constructorArgumentsObj = jobDataMap.get("constructorArguments");
    Object jobClassMethodNameObj = jobDataMap.get("jobClassMethodName");
    Object jobClassMethodArgsObj = jobDataMap.get("jobClassMethodArgs");
    try {
        String jobClass = (String) jobClassObj;
        Object[] constructorArguments = (Object[]) constructorArgumentsObj;
        String jobClassMethodName = (String) jobClassMethodNameObj;
        Object[] jobClassMethodArgs = (Object[]) jobClassMethodArgsObj;
        Object jobBean;

        LOG.debug("methodInvokeJob jobClass:" + jobClass);
        LOG.debug("methodInvokeJob jobClassMethodName:" + jobClassMethodName);

        QuartzBeanManagerFacade quartzBeanManagerFacade = QuartzBeanManagerFacade.getInstance();

        if (constructorArguments != null && constructorArguments.length > 0) {
            jobBean = quartzBeanManagerFacade.getBean(jobClass, constructorArguments);
        } else {
            jobBean = quartzBeanManagerFacade.getBean(jobClass);
        }

        MethodInvoker methodInvoker = new MethodInvoker();
        methodInvoker.setTargetMethod(jobClassMethodName);
        methodInvoker.setArguments(jobClassMethodArgs);

        methodInvoker.setTargetObject(jobBean);

        boolean prepared = methodInvoker.isPrepared();
        if (!prepared) {
            methodInvoker.prepare();
        }
        Object result = methodInvoker.invoke();
        context.setResult(result);
        Date endDate = new Date();
        long endTime = endDate.getTime();
        LOG.debug("methodInvokeJob end : " + DateUtils.formart(endDate) + "," + (endTime - startTime));

    } catch (Exception e) {
        LOG.error("MethodInvokeJob exception message:" + e.getMessage(), e);
        e.printStackTrace();
        throw new JobExecutionException(e);
    }
}
 
Example 19
Source File: NavigableEventLogListener.java    From sakai with Educational Community License v2.0 4 votes vote down vote up
private void info (EVENTTYPE eventType, Trigger trig, JobExecutionContext context, JobExecutionException exception, CompletedExecutionInstruction instructionCode) {
    JobDetail
        detail = (context != null)?context.getJobDetail():null;
    final JobDataMap
        dataMap = (context != null)?context.getMergedJobDataMap():null;
    final String
        jobName = (detail != null)?detail.getKey().getName():null,
        jobDesc = (detail != null)?detail.getDescription():null;
    final Class
        jobClass = (detail != null)?detail.getJobClass():null;
    final Trigger
        trigger = (trig != null)?trig:((context != null)?context.getTrigger():null);
    final String
        trigName = (trigger != null)?trigger.getKey().getName():null,
        trigDesc = (trigger != null)?trigger.getDescription():null;
    final Date
        trigStart = (trigger != null)?trigger.getStartTime():null,
        trigEnd = (trigger != null)?trigger.getEndTime():null;

    StringBuilder
        sb = new StringBuilder();

    switch (eventType)
    {
        case JOB_EXECUTING:
        {
            sb.append("Job Executing: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case JOB_VETOED:
        {
            sb.append("Job Vetoed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            break;
        }
        case JOB_EXECUTED:
        {
            sb.append("Job Executed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName());

            if (exception != null)
            {
                sb.append (", exception: ").append(exception.getMessage());
                if (exception.getCause() != null)
                {
                  sb.append(", exception cause: ").append(exception.getCause().getClass().getName());
                }
            }
            sb.append("]");

            break;
        }
        case TRIGGER_FIRED:
        {
            sb.append("Trigger Fired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart != null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd != null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case TRIGGER_MISFIRED:
        {
            sb.append("Trigger Misfired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append("]");
            break;
        }
        case TRIGGER_COMPLETED:
        {
            sb.append("Trigger Completed: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName())
              .append(", execution result: ").append(instructionCode);
            sb.append("]");
            break;
        }
    }
    if (log.isDebugEnabled())
    {
    	log.debug(sb.toString());
    }
}
 
Example 20
Source File: ClusterStatJob.java    From EserKnife with Apache License 2.0 4 votes vote down vote up
/**
 * 定时收集集群信息
 *
 */
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        Date date = context.getTrigger().getPreviousFireTime();
    	String clusterName = dataMap.getString(Constant.CLUSTER_NAME);
    	LOGGER.info("开始统计"+clusterName+"的信息!");
        NodesStats nodesStats= new NodesStats.Builder().withJvm().withOs().withIndices()
                .withHttp().withTransport().withThreadPool().withFs().build();
        JestResult result = JestManager.getJestClient(clusterName).execute(nodesStats);
    	JSONObject json = JSONObject.parseObject(result.getJsonString());
    	if(json == null) {
            return;
    	}
        asyncService.submitFuture(new CollectionCommonStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionIndicesStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionHttpStatHandler(JobKey.buildFutureKey(clusterName, Constant.HTTP, date), clusterName, json, date));
    	asyncService.submitFuture(new CollectionJVMStatHandler(JobKey.buildFutureKey(clusterName, Constant.JVM_NAME, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionTransportStatHandler(JobKey.buildFutureKey(clusterName, Constant.TRANSPORT, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionThreadPoolStatHandler(JobKey.buildFutureKey(clusterName, Constant.THREAD_POOL, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionOsStatHandler(JobKey.buildFutureKey(clusterName, Constant.OS, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionFsStatHandler(JobKey.buildFutureKey(clusterName, Constant.FS, date), clusterName, json, date));

        //报警处理
        List<AlarmRule> all = alarmRuleService.getList();
        List<AlarmRule> alarmFilterRule = new ArrayList<AlarmRule>();
        if(CollectionUtils.isEmpty(all)) {
            return;
        }else{
           for(AlarmRule alarmRule:all){
               if(clusterName.equals(alarmRule.getClusterName()) && alarmRule.getEnable() > 0){
                   alarmFilterRule.add(alarmRule);
               }
           }
        }
        asyncService.submitFuture(new ClusterNodeAlarm(JobKey.buildFutureKey(clusterName, Constant.NODE_ALARM, date),clusterName,json,alarmFilterRule));
    }catch (Exception e) {
        LOGGER.error("定时获取集群统计异常", e.getStackTrace());
    }
}