Java Code Examples for org.quartz.JobDataMap#containsKey()

The following examples show how to use org.quartz.JobDataMap#containsKey() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NativeJob.java    From AsuraFramework with Apache License 2.0 6 votes vote down vote up
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();
    
    String command = data.getString(PROP_COMMAND);

    String parameters = data.getString(PROP_PARAMETERS);

    if (parameters == null) {
        parameters = "";
    }

    boolean wait = true;
    if(data.containsKey(PROP_WAIT_FOR_PROCESS)) {
        wait = data.getBooleanValue(PROP_WAIT_FOR_PROCESS);
    }
    boolean consumeStreams = false;
    if(data.containsKey(PROP_CONSUME_STREAMS)) {
        consumeStreams = data.getBooleanValue(PROP_CONSUME_STREAMS);
    }
        
    Integer exitCode = this.runNativeCommand(command, parameters, wait, consumeStreams);
    context.setResult(exitCode);
    
}
 
Example 2
Source File: JobStoreImpl.java    From nexus-public with Eclipse Public License 1.0 6 votes vote down vote up
private boolean isLocal(final TriggerEntity entity) {
  if (isClustered()) {
    String localId = nodeAccess.getId();

    JobDataMap triggerDetail = entity.getValue().getJobDataMap();
    if (triggerDetail.containsKey(LIMIT_NODE_KEY)) {
      // filter limited triggers to those limited to run on this node
      String limitedNodeId = triggerDetail.getString(LIMIT_NODE_KEY);
      return localId.equals(limitedNodeId);
    }
    // filter all other triggers to those "owned" by this node
    String owner = triggerDetail.getString(NODE_ID);
    return localId.equals(owner);
  }
  return true;
}
 
Example 3
Source File: JobStoreImpl.java    From nexus-public with Eclipse Public License 1.0 6 votes vote down vote up
/**
 * Helper to warn when a limited trigger won't fire because its node is missing.
 */
private boolean isLimitedToMissingNode(final TriggerEntity entity) {
  OperableTrigger trigger = entity.getValue();
  JobDataMap triggerDetail = trigger.getJobDataMap();
  if (triggerDetail.containsKey(LIMIT_NODE_KEY)) {
    String limitedNodeId = triggerDetail.getString(LIMIT_NODE_KEY);
    // can skip members check here because "local()" has already filtered limited triggers down to those
    // which are either limited to run on the current node, or on a missing node (ie. have been orphaned)
    if (!nodeAccess.getId().equals(limitedNodeId)) {
      // not limited to this node, so must be an orphaned trigger
      String description = trigger.getDescription();
      if (Strings2.isBlank(description)) {
        description = trigger.getJobKey().getName();
      }
      if (Strings2.isBlank(limitedNodeId)) {
        log.warn("Cannot run task '{}' because it is not configured for HA", description);
      }
      else {
        log.warn("Cannot run task '{}' because it uses node {} which is not a member of this cluster", description,
            limitedNodeId);
      }
      return true;
    }
  }
  return false;
}
 
Example 4
Source File: QuartzNativeObjectsConverter.java    From Knowage-Server with GNU Affero General Public License v3.0 5 votes vote down vote up
public static org.quartz.JobDetail convertJobToNativeObject(Job spagobiJob) {
	org.quartz.JobDetail quartzJob;

	quartzJob = new org.quartz.JobDetail();
	quartzJob.setName(spagobiJob.getName());
	quartzJob.setGroup(spagobiJob.getGroupName());
	quartzJob.setDescription(spagobiJob.getDescription());
	quartzJob.setJobClass(spagobiJob.getJobClass());
	quartzJob.setDurability(spagobiJob.isDurable());
	quartzJob.setRequestsRecovery(spagobiJob.isRequestsRecovery());
	quartzJob.setVolatility(spagobiJob.isVolatile());

	JobDataMap parameters = convertParametersToNativeObject(spagobiJob.getParameters());
	if (parameters.containsKey(MERGE_ALL_SNAPSHOTS)) {
		throw new SpagoBIRuntimeException(
				"An unexpected error occured while converting Job to native object: " + MERGE_ALL_SNAPSHOTS + " property already defined");
	}
	parameters.put(MERGE_ALL_SNAPSHOTS, spagobiJob.isMergeAllSnapshots() ? "true" : "false");
	if (parameters.containsKey(COLLATE_SNAPSHOTS)) {
		throw new SpagoBIRuntimeException(
				"An unexpected error occured while converting Job to native object: " + COLLATE_SNAPSHOTS + " property already defined");
	}
	parameters.put(COLLATE_SNAPSHOTS, spagobiJob.isCollateSnapshots() ? "true" : "false");
	quartzJob.setJobDataMap(parameters);

	return quartzJob;
}
 
Example 5
Source File: FileScanJob.java    From AsuraFramework with Apache License 2.0 4 votes vote down vote up
/** 
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 */
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap mergedJobDataMap = context.getMergedJobDataMap();
    SchedulerContext schedCtxt = null;
    try {
        schedCtxt = context.getScheduler().getContext();
    } catch (SchedulerException e) {
        throw new JobExecutionException("Error obtaining scheduler context.", e, false);
    }
    
    String fileName = mergedJobDataMap.getString(FILE_NAME);
    String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME);
    
    if(fileName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_NAME + "' not found in merged JobDataMap");
    }
    if(listenerName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap");
    }

    FileScanListener listener = (FileScanListener)schedCtxt.get(listenerName);
    
    if(listener == null) {
        throw new JobExecutionException("FileScanListener named '" + 
                listenerName + "' not found in SchedulerContext");
    }
    
    long lastDate = -1;
    if(mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) {
        lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME);
    }
    
    long newDate = getLastModifiedDate(fileName);

    if(newDate < 0) {
        log.warn("File '"+fileName+"' does not exist.");
        return;
    }
    
    if(lastDate > 0 && (newDate != lastDate)) {
        // notify call back...
        log.info("File '"+fileName+"' updated, notifying listener.");
        listener.fileUpdated(fileName); 
    } else if (log.isDebugEnabled()) {
        log.debug("File '"+fileName+"' unchanged.");
    }
    
    // It is the JobDataMap on the JobDetail which is actually stateful
    context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate);
}