org.springframework.batch.core.JobExecution Java Examples

The following examples show how to use org.springframework.batch.core.JobExecution. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AlarmJobTest.java    From pinpoint with Apache License 2.0 8 votes vote down vote up
public static void main(String[] args) throws Exception{
     GenericXmlApplicationContext applicationContext = new GenericXmlApplicationContext("/applicationContext-test.xml");
     JobLauncherTestUtils testLauncher = applicationContext.getBean(JobLauncherTestUtils.class);
     
     JobExecution jobExecution = testLauncher.launchJob(getParameters());
     BatchStatus status = jobExecution.getStatus();
     assertEquals(BatchStatus.COMPLETED, status);
     
     applicationContext.close();
}
 
Example #2
Source File: JobCompletionPayRollListener.java    From Software-Architecture-with-Spring-5.0 with MIT License 7 votes vote down vote up
@Override
public void afterJob(JobExecution jobExecution) {
    if (jobExecution.getStatus() == BatchStatus.COMPLETED) {
        log.info(">>>>> PAY ROLL JOB FINISHED! ");

        jdbcTemplate.query("SELECT PERSON_IDENTIFICATION, CURRENCY, TX_AMMOUNT, ACCOUNT_TYPE, ACCOUNT_ID, TX_DESCRIPTION, FIRST_LAST_NAME FROM PAYROLL",
                (rs, row) -> new PayrollTo(
                        rs.getInt(1),
                        rs.getString(2),
                        rs.getDouble(3),
                        rs.getString(4),
                        rs.getString(5),
                        rs.getString(6),
                        rs.getString(7))
        ).forEach(payroll -> log.info("Found <" + payroll + "> in the database."));
    }
}
 
Example #3
Source File: SettleJobListeners.java    From seed with Apache License 2.0 6 votes vote down vote up
@Override
public void afterJob(JobExecution jobExecution) {
    if(jobExecution.getStatus() == BatchStatus.COMPLETED){
        LogUtil.getLogger().info("批量任务Job-->[{}-{}]-->处理完成,TotalDuration[{}]ms", jobExecution.getJobId(), jobExecution.getJobInstance().getJobName(), SystemClockUtil.INSTANCE.now()-jobExecution.getStartTime().getTime());
        LogUtil.getLogger().info("=======================================================================");
    }
}
 
Example #4
Source File: MapLightminJobExecutionDaoTest.java    From spring-batch-lightmin with Apache License 2.0 6 votes vote down vote up
@Test
public void testFindJobExecutionsAllQueryParameters() {
    this.createJobExecutionsForQuery();
    final String jobName = "queryJob";
    final Integer size = 4;
    final Date startDate = new Date(System.currentTimeMillis() - 100000);
    final Date endDate = new Date(System.currentTimeMillis() + 100000);
    final String exitStatus = ExitStatus.COMPLETED.getExitCode();
    final Map<String, Object> queryParameters = new HashMap<>();
    queryParameters.put(QueryParameterKey.EXIT_STATUS, exitStatus);
    queryParameters.put(QueryParameterKey.START_DATE, startDate);
    queryParameters.put(QueryParameterKey.END_DATE, endDate);
    final List<JobExecution> result = this.mapLightminJobExecutionDao.findJobExecutions(jobName, queryParameters, size);
    assertThat(result).isNotNull();
    assertThat(result).isNotEmpty();
    assertThat(result).hasSize(size);
}
 
Example #5
Source File: App.java    From tutorials with MIT License 6 votes vote down vote up
private static void runJob(AnnotationConfigApplicationContext context, String batchJobName) {
    final JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
    final Job job = (Job) context.getBean(batchJobName);

    LOGGER.info("Starting the batch job: {}", batchJobName);
    try {
        // To enable multiple execution of a job with the same parameters
        JobParameters jobParameters = new JobParametersBuilder().addString("jobID", String.valueOf(System.currentTimeMillis()))
            .toJobParameters();
        final JobExecution execution = jobLauncher.run(job, jobParameters);
        LOGGER.info("Job Status : {}", execution.getStatus());
    } catch (final Exception e) {
        e.printStackTrace();
        LOGGER.error("Job failed {}", e.getMessage());
    }
}
 
Example #6
Source File: JavaConfigIntegrationTest.java    From spring-boot-starter-batch-web with Apache License 2.0 6 votes vote down vote up
@Test
public void testRunJob() throws InterruptedException {
	Long executionId = restTemplate.postForObject("http://localhost:" + port + "/batch/operations/jobs/simpleJob",
			"", Long.class);
	while (!restTemplate
			.getForObject("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}",
					String.class, executionId)
			.equals("COMPLETED")) {
		Thread.sleep(1000);
	}
	String log = restTemplate.getForObject(
			"http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}/log", String.class,
			executionId);
	assertThat(log.length() > 20, is(true));
	JobExecution jobExecution = jobExplorer.getJobExecution(executionId);
	assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED));
	String jobExecutionString = restTemplate.getForObject(
			"http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class,
			executionId);
	assertThat(jobExecutionString.contains("COMPLETED"), is(true));
}
 
Example #7
Source File: JobCompleteNotificationListener.java    From CogStack-Pipeline with Apache License 2.0 6 votes vote down vote up
@Override
public void beforeJob(JobExecution jobExecution) {
  columnRangePartitioner.setJobExecution(jobExecution);

  // *** Workaround -- re-initialising the ES REST service for scheduled jobs (see afterJob() )
  if (esRestService != null && esServiceAlreadyClosed) {
    try {
      esRestService.init();
      esServiceAlreadyClosed = false;
      log.debug("Re-initialising ElasticSearch REST service");
    }
    catch (Exception e) {
      log.warn("Cannot re-initialise ElasticSearch REST service");
    }
  }
}
 
Example #8
Source File: BatchMetricsFlatFileToDbIntegrationTest.java    From spring-boot-starter-batch-web with Apache License 2.0 6 votes vote down vote up
@Test
public void testRunFlatFileToDbSkipJob_SkipInProcess_ProcessorNonTransactional() throws InterruptedException {
	JobExecution jobExecution = runJob("flatFileToDbSkipProcessorNonTransactionalJob",
			"metrics/flatFileToDbSkipJob_SkipInProcess.csv");
	assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED));
	ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext();
	long writeCount = 7L;
	MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext)
			.withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L)
			.withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L)
			.withBeforeProcessCount(8L).withProcessCount(8L).withAfterProcessCount(7L).withProcessErrorCount(1L)
			.withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withAfterChunkCount(3L)
			.withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(1L).withSkipInWriteCount(0L)
			.build();
	validator.validate();
	// if one is correct, all will be in the metricReader, so I check just one
	Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)//
			.tag("context", "flatFileToDbSkipProcessorNonTransactionalJob.step")//
			.tag("name", MetricNames.PROCESS_COUNT.getName())//
			.gauge();
	assertThat((Double) gauge.value(), is(notNullValue()));
	assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount));
}
 
Example #9
Source File: TaxCalculationStepITest.java    From batchers with Apache License 2.0 6 votes vote down vote up
@Test
public void taxCalculationStep_generatesCorrectCalculation() throws Exception {
    Employee employee = haveOneEmployee();

    JobParameters jobParameters = new JobParametersBuilder()
            .addLong("year", 2014L, true)
            .addLong("month", 5L, true)
            .toJobParameters();

    JobExecution jobExecution = jobLauncherTestUtils.launchStep(EmployeeJobConfigSingleJvm.TAX_CALCULATION_STEP, jobParameters);

    assertThat(jobExecution.getExitStatus()).isEqualTo(ExitStatus.COMPLETED);

    List<TaxCalculation> byEmployee = taxCalculationRepository.findByEmployee(employee);

    assertThat(byEmployee).hasSize(1);
    TaxCalculation taxCalculation = byEmployee.get(0);
    assertThat(taxCalculation.getEmployee().getId()).isEqualTo(employee.getId());
    assertThat(taxCalculation.getYear()).isEqualTo(2014);
    assertThat(taxCalculation.getMonth()).isEqualTo(5);

    List<TaxCalculation> byYearAndMonth = taxCalculationRepository.find(2014, 5, 1L);
    assertThat(byYearAndMonth).hasSize(1);
}
 
Example #10
Source File: SettleQuartzController.java    From seed with Apache License 2.0 6 votes vote down vote up
/**
 * @param jobNameStr   任务名字符串
 * @param jobNameDesc  任务名描述
 * @param time         随机数:批量的唯一标志(非断点续跑可直接传null)
 * @param parameterMap 其它参数:不会作为批量的唯一标志(无参可传null)
 * Comment by 玄玉<https://jadyer.cn/> on 2019/8/12 18:25.
 */
private JobExecution runJob(String jobNameStr, String jobNameDesc, String time, Map<String, String> parameterMap) throws Exception {
    //判断是否断点续跑
    boolean isResume = false;
    long timeLong;
    if(StringUtils.isBlank(time)){
        timeLong = SystemClockUtil.INSTANCE.now();
    }else{
        isResume = true;
        timeLong = Long.parseLong(time);
    }
    LogUtil.getLogger().info("{}==>{}:Starting...time={}", jobNameDesc, isResume?":断点续跑":"", timeLong);
    //构造JobParameters
    JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
    jobParametersBuilder.addLong("time", timeLong);
    if(null!=parameterMap && !parameterMap.isEmpty()){
        for(Map.Entry<String,String> entry : parameterMap.entrySet()){
            jobParametersBuilder.addString(entry.getKey(), entry.getValue(), false);
        }
    }
    //执行job
    Job xmlSettleJob = (Job)SpringContextHolder.getBean(jobNameStr);
    JobExecution execution = jobLauncher.run(xmlSettleJob, jobParametersBuilder.toJobParameters());
    LogUtil.getLogger().info("{}==>{}:Ending......jobInstance={}", jobNameDesc, isResume?":断点续跑":"", execution.getJobInstance());
    return execution;
}
 
Example #11
Source File: AllJobExecutionProvider.java    From spring-batch-rest with Apache License 2.0 6 votes vote down vote up
public Collection<JobExecution> getJobExecutions(Optional<String> jobNameRegexp,
                                                 Optional<String> exitCode,
                                                 int limitPerJob) {
    log.debug("Getting job executions from JobExplorer for jobNameRegexp={}, exitCode={}, limitPerJob={}", jobNameRegexp, exitCode, limitPerJob);
    Optional<Pattern> maybeJobNamePattern = jobNameRegexp.map(Pattern::compile);
    List<String> jobNames = jobExplorer.getJobNames().stream()
            .filter(n -> maybeJobNamePattern.map(p -> p.matcher(n).matches()).orElse(true)).collect(toList());
    TreeSet<JobExecution> result = new TreeSet<>(byDescendingTime());
    for (String jobName : jobNames)
        jobExplorer.getJobInstances(jobName, 0, limitPerJob).stream()
                .flatMap(ji ->jobExplorer.getJobExecutions(ji).stream())
                .filter(e -> exitCode.map(c -> e.getExitStatus().getExitCode().equals(c)).orElse(true))
                .sorted(byDescendingTime())
                .limit(limitPerJob).forEach(result::add);
    log.debug("Found {} job execution(s) for jobNameRegexp={}, exitCode={}, limitPerJob={}",
            jobNameRegexp, exitCode, limitPerJob, result.size());
    return result;
}
 
Example #12
Source File: SettleQuartzController.java    From seed with Apache License 2.0 6 votes vote down vote up
/**
 * SpringBatch的断点续跑
 * -----------------------------------------------------------------------------------------------
 * 执行Step过程中发生异常时,而该异常又没有被配置为skip,那么整个Job会中断
 * 当人工修正完异常数据后,再次调用jobLauncher.run(),SpringBatch会从上次异常的地方开始跑
 * 1、当Step为读处理写时,假设10条数据,Step配置chunk=3(表明每三条Write一次),若第5条出现异常
 *    那么前三条可以成功Write,第4条即便处理成功也不会写入数据库,在修复后再跑的时,会从第4条开始读
 * 2、当Step为Tasklet时,仅当Tasklet全部执行完,且未发生异常,才会真正的提交事务,写入数据到数据库
 *    即只要其中某一条数据处理时发生异常,那么无论之前提交了多少次数据到数据库,都不会真正的写入数据库
 * 3、当并行Step中的某个Step出现异常时,那么并行中的其它Step不受影响,会继续跑完,然后才会中断Job
 *    修复数据后再跑时,会直接从并行中发生异常的该Step开始跑,其它未发生异常的并行中的Step不会重复跑
 * 注意:断点续跑时,传入的jobParameters必须相同,否则会认为是另一个任务,会从头跑,不会从断点的地方跑
 *      也就是说,这一切都建立在jobParameters传值相同的条件下
 * 另外:对于JobOperator.start()和restart()两个方法都试过,都没实现断点续跑的功能
 * -----------------------------------------------------------------------------------------------
 */
@RequestMapping("/batch")
//@SeedQSSReg(qssHost="${qss.host}", appHost="${qss.appHost}", appname="${qss.appname}", name="${qss.name}", cron="${qss.cron}")
CommResult<JobInstance> batch(String bizDate) throws Exception {
    //判断是否断点续跑
    boolean isResume = false;
    if(StringUtils.isBlank(bizDate)){
        bizDate = DateFormatUtils.format(new Date(), "yyyyMMdd");
    }else{
        isResume = true;
    }
    LogUtil.getLogger().info("结算跑批{}:Starting...bizDate={}", isResume?":断点续跑":"", bizDate);
    //构造JobParameters
    JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
    jobParametersBuilder.addString("bizDate", bizDate);
    //执行job
    JobExecution execution = jobLauncher.run(settleJob, jobParametersBuilder.toJobParameters());
    LogUtil.getLogger().info("结算跑批{}:Ending......", isResume?":断点续跑":"");
    return CommResult.success(execution.getJobInstance());
}
 
Example #13
Source File: XmlIntegrationTest.java    From spring-boot-starter-batch-web with Apache License 2.0 6 votes vote down vote up
@Test
public void testRunJob() throws InterruptedException {
	Long executionId = restTemplate
			.postForObject("http://localhost:" + port + "/batch/operations/jobs/flatFile2JobXml", "", Long.class);
	while (!restTemplate
			.getForObject("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}",
					String.class, executionId)
			.equals("COMPLETED")) {
		Thread.sleep(1000);
	}
	String log = restTemplate.getForObject(
			"http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}/log", String.class,
			executionId);
	assertThat(log.length() > 20, is(true));
	JobExecution jobExecution = jobExplorer.getJobExecution(executionId);
	assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED));
	String jobExecutionString = restTemplate.getForObject(
			"http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class,
			executionId);
	assertThat(jobExecutionString.contains("COMPLETED"), is(true));
}
 
Example #14
Source File: BatchMetricsFlatFileToDbIntegrationTest.java    From spring-boot-starter-batch-web with Apache License 2.0 6 votes vote down vote up
@Test
public void testRunFlatFileToDbNoSkipJob_Success() throws InterruptedException {
	JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Success.csv");
	assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED));
	ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext();
	long writeCount = 5L;
	MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext)
			.withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L)
			.withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L)
			.withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(0L)
			.withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L)
			.withChunkErrorCount(0L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L)
			.build();
	validator.validate();
	// if one is correct, all will be in the metricReader, so I check just one
	Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)//
			.tag("context", "flatFileToDbNoSkipJob.step")//
			.tag("name", MetricNames.PROCESS_COUNT.getName())//
			.gauge();
	assertThat((Double) gauge.value(), is(notNullValue())); // TODO
	assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount));
}
 
Example #15
Source File: DefaultTaskJobServiceTests.java    From spring-cloud-dataflow with Apache License 2.0 6 votes vote down vote up
private void createSampleJob(JobRepository jobRepository, TaskBatchDao taskBatchDao,
		TaskExecutionDao taskExecutionDao, String jobName,
		int jobExecutionCount, BatchStatus status) {
	JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
	TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null);
	JobExecution jobExecution;

	for (int i = 0; i < jobExecutionCount; i++) {
		jobExecution = jobRepository.createJobExecution(instance,
				this.jobParameters, null);
		StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L);
		stepExecution.setId(null);
		jobRepository.add(stepExecution);
		taskBatchDao.saveRelationship(taskExecution, jobExecution);
		jobExecution.setStatus(status);
		jobExecution.setStartTime(new Date());
		jobRepository.update(jobExecution);
	}
}
 
Example #16
Source File: JdbcSearchableJobExecutionDao.java    From spring-cloud-dataflow with Apache License 2.0 6 votes vote down vote up
/**
 * @see SearchableJobExecutionDao#getJobExecutions(String, BatchStatus, int, int)
 */
@Override
public List<JobExecution> getJobExecutions(String jobName, BatchStatus status, int start, int count) {
       if (start <= 0) {
           return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateFirstPageQuery(count),
                   new JobExecutionRowMapper(), jobName, status.name());
       }
       try {
           Long startAfterValue = getJdbcTemplate().queryForObject(
                   byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, status.name());
           return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count),
                   new JobExecutionRowMapper(), jobName, status.name(), startAfterValue);
       }
       catch (IncorrectResultSizeDataAccessException e) {
           return Collections.emptyList();
       }
}
 
Example #17
Source File: ComposedTaskRunnerConfigurationWithPropertiesTests.java    From spring-cloud-dataflow with Apache License 2.0 6 votes vote down vote up
@Test
@DirtiesContext
public void testComposedConfiguration() throws Exception {
	JobExecution jobExecution = this.jobRepository.createJobExecution(
			"ComposedTest", new JobParameters());
	job.execute(jobExecution);

	Map<String, String> props = new HashMap<>(1);
	props.put("format", "yyyy");
	props.put("memory", "2048m");
	assertEquals(COMPOSED_TASK_PROPS, composedTaskProperties.getComposedTaskProperties());
	assertEquals(1010, composedTaskProperties.getMaxWaitTime());
	assertEquals(1100, composedTaskProperties.getIntervalTimeBetweenChecks());
	assertEquals("https://bar", composedTaskProperties.getDataflowServerUri().toASCIIString());
	List<String> args = new ArrayList<>(1);
	args.add("--baz=boo --foo=bar");
	Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null.");
	verify(this.taskOperations).launch("ComposedTest-AAA", props, args, null);
}
 
Example #18
Source File: BatchConfiguration.java    From building-microservices with Apache License 2.0 6 votes vote down vote up
CommandLineRunner runner(JobLauncher launcher,
                         Job job,
                         @Value("${file}") File in,
                         JdbcTemplate jdbcTemplate) {
    return args -> {

        JobExecution execution = launcher.run(job,
                new JobParametersBuilder()
                        .addString("file", in.getAbsolutePath())
                        .toJobParameters());

        System.out.println("execution status: " + execution.getExitStatus().toString());

        List<Person> personList = jdbcTemplate.query("select * from PEOPLE", (resultSet, i) -> new Person(resultSet.getString("first"),
                resultSet.getString("last"),
                resultSet.getString("email")));

        personList.forEach(System.out::println);

    };

}
 
Example #19
Source File: JobResultsServiceImplTest.java    From batchers with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetFinishedJobResults_SameDates_SortingIsDescOnDate() throws Exception {
    //ARRANGE
    JobInstance jobInstance1 = new JobInstance(1L, EmployeeJobConfigSingleJvm.EMPLOYEE_JOB);

    when(jobExplorer.findJobInstancesByJobName(EmployeeJobConfigSingleJvm.EMPLOYEE_JOB, 0, MAX_VALUE))
            .thenReturn(asList(jobInstance1));

    DateTime dateTime = new DateTime();
    JobExecution jobInstance1_jobExecution1 = new JobExecution(jobInstance1, 1L, createJobParameters(dateTime.getYear(), dateTime.getMonthOfYear()), null);
    jobInstance1_jobExecution1.setEndTime(getDateOfDay(3));
    JobExecution jobInstance1_jobExecution2 = new JobExecution(jobInstance1, 2L, createJobParameters(dateTime.getYear(), dateTime.getMonthOfYear()), null);
    jobInstance1_jobExecution2.setEndTime(getDateOfDay(4));

    when(jobExplorer.getJobExecutions(jobInstance1)).thenReturn(asList(jobInstance1_jobExecution1, jobInstance1_jobExecution2));
    //ACT
    List<JobResult> jobResults = jobResultsService.getJobResults();

    assertThat(jobResults.get(0).getJobExecutionResults().get(0).getEndTime()).isAfter(jobResults.get(0).getJobExecutionResults().get(1).getEndTime());
}
 
Example #20
Source File: IncrementalColumnRangePartitionerTests.java    From spring-cloud-task-app-starters with Apache License 2.0 6 votes vote down vote up
@Test
public void testTwoPartitions() {
	jdbc.execute("insert into bar (foo) values (1), (2), (3), (4)");
	partitioner.setColumn("foo");
	partitioner.setTable("bar");
	partitioner.setPartitions(2);
	partitioner.beforeStep(new StepExecution("step1", new JobExecution(5l)));
	Map<String, ExecutionContext> partitions = partitioner.partition(1);
	assertEquals(2, partitions.size());
	assertTrue(partitions.containsKey("partition0"));
	assertEquals("WHERE (foo BETWEEN 1 AND 2)", partitions.get("partition0").get("partClause"));
	assertEquals("-p0", partitions.get("partition0").get("partSuffix"));
	assertTrue(partitions.containsKey("partition1"));
	assertEquals("WHERE (foo BETWEEN 3 AND 4)", partitions.get("partition1").get("partClause"));
	assertEquals("-p1", partitions.get("partition1").get("partSuffix"));
}
 
Example #21
Source File: JobOperationsController.java    From spring-boot-starter-batch-web with Apache License 2.0 6 votes vote down vote up
/**
 * Borrowed from CommandLineJobRunner.
 *
 * @param job
 *            the job that we need to find the next parameters for
 * @return the next job parameters if they can be located
 * @throws JobParametersNotFoundException
 *             if there is a problem
 */
private JobParameters getNextJobParameters(Job job) throws JobParametersNotFoundException {
	String jobIdentifier = job.getName();
	JobParameters jobParameters;
	List<JobInstance> lastInstances = jobExplorer.getJobInstances(jobIdentifier, 0, 1);

	JobParametersIncrementer incrementer = job.getJobParametersIncrementer();

	if (lastInstances.isEmpty()) {
		jobParameters = incrementer.getNext(new JobParameters());
		if (jobParameters == null) {
			throw new JobParametersNotFoundException(
					"No bootstrap parameters found from incrementer for job=" + jobIdentifier);
		}
	} else {
		List<JobExecution> lastExecutions = jobExplorer.getJobExecutions(lastInstances.get(0));
		jobParameters = incrementer.getNext(lastExecutions.get(0).getJobParameters());
	}
	return jobParameters;
}
 
Example #22
Source File: DefaultJobService.java    From spring-batch-lightmin with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<JobExecution> getJobExecutions(final JobInstance jobInstance, final int start, final int count) {
    final Collection<JobExecution> jobExecutions = new LinkedList<>();
    final List<JobExecution> jobExecutionList = this.lightminJobExecutionDao.findJobExecutions(jobInstance, start, count);
    jobExecutions.addAll(jobExecutionList);
    return jobExecutions;
}
 
Example #23
Source File: SpringBatchBuildReportHandler.java    From spring-cloud-release-tools with Apache License 2.0 5 votes vote down vote up
@Override
public void reportBuildSummary() {
	List<String> jobNames = this.jobExplorer.getJobNames();
	List<JobExecution> sortedJobExecutions = jobNames.stream()
			.flatMap(name -> this.jobExplorer.findJobInstancesByJobName(name, 0, 100)
					.stream())
			.flatMap(instance -> this.jobExplorer.getJobExecutions(instance).stream())
			.filter(j -> !j.isRunning())
			.sorted(Comparator.comparing(JobExecution::getCreateTime))
			.collect(Collectors.toList());
	List<StepExecution> stepContexts = sortedJobExecutions.stream()
			.flatMap(j -> j.getStepExecutions().stream())
			.collect(Collectors.toCollection(LinkedList::new));
	printTable(buildTable(stepContexts));
}
 
Example #24
Source File: ComposedRunnerVisitorTests.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
private Collection<StepExecution> getStepExecutions() {
	JobExplorer jobExplorer = this.applicationContext.getBean(JobExplorer.class);
	List<JobInstance> jobInstances = jobExplorer.findJobInstancesByJobName("job", 0, 1);
	assertEquals(1, jobInstances.size());
	JobInstance jobInstance = jobInstances.get(0);
	List<JobExecution> jobExecutions = jobExplorer.getJobExecutions(jobInstance);
	assertEquals(1, jobExecutions.size());
	JobExecution jobExecution = jobExecutions.get(0);
	return jobExecution.getStepExecutions();
}
 
Example #25
Source File: EmployeeBatchJobITest.java    From batchers with Apache License 2.0 5 votes vote down vote up
@Test
public void whenWebServiceFailsForOneEmployee_thenSumOfTaxes_isCalculatedForFailedCalls() throws Exception {
    haveEmployees(3);

    respondWithServerError(3);
    respondOneTimeWithSuccess();
    respondOneTimeWithSuccess();

    JobExecution jobExecution = jobLauncherTestUtils.launchJob(jobParams);

    assertThat(sumOfTaxes.getFailedSum(YEAR.intValue(), MONTH.intValue())).isEqualTo(100D);
    verifyJob(jobExecution);
}
 
Example #26
Source File: BatchJobUtils.java    From CogStack-Pipeline with Apache License 2.0 5 votes vote down vote up
public JobExecution getLastSuccessfulJobExecution(){
    JdbcTemplate template = new JdbcTemplate(jobRepositoryDataSource);
    String sql = "SELECT MAX(bje.job_execution_id) FROM batch_job_execution bje \n" +
            " JOIN batch_job_instance bji ON bje.job_instance_id = bji.job_instance_id \n" +
            " JOIN batch_job_execution_params bjep ON bje.job_execution_id = bjep.job_execution_id" +
            " WHERE bje.status = 'COMPLETED' \n" +
            " AND bji.job_name = '" + env.getProperty("job.jobName") + "'";
    LOG.info("Looking for last successful job");
    Long id = template.queryForObject(sql, Long.class);
    return jobExplorer.getJobExecution(id);
}
 
Example #27
Source File: SpringBatchIntegrationTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
public void whenStep2Executed_thenSuccess() {

    // when
    JobExecution jobExecution = jobLauncherTestUtils.launchStep("step2", defaultJobParameters());
    Collection<StepExecution> actualStepExecutions = jobExecution.getStepExecutions();
    ExitStatus actualExitStatus = jobExecution.getExitStatus();

    // then
    assertThat(actualStepExecutions.size(), is(1));
    assertThat(actualExitStatus.getExitCode(), is("COMPLETED"));
    actualStepExecutions.forEach(stepExecution -> {
        assertThat(stepExecution.getWriteCount(), is(8));
    });
}
 
Example #28
Source File: DeployerStepExecutionHandlerTests.java    From spring-cloud-task with Apache License 2.0 5 votes vote down vote up
@Test
public void testRunSuccessful() throws Exception {
	StepExecution workerStep = new StepExecution("workerStep", new JobExecution(1L),
			2L);

	when(this.environment.containsProperty(
			DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID))
					.thenReturn(true);
	when(this.environment.containsProperty(
			DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID))
					.thenReturn(true);
	when(this.environment
			.containsProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME))
					.thenReturn(true);
	when(this.environment
			.getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME))
					.thenReturn("workerStep");
	when(this.beanFactory.getBeanNamesForType(Step.class))
			.thenReturn(new String[] { "workerStep", "foo", "bar" });
	when(this.environment.getProperty(
			DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID))
					.thenReturn("2");
	when(this.environment
			.getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID))
					.thenReturn("1");
	when(this.jobExplorer.getStepExecution(1L, 2L)).thenReturn(workerStep);
	when(this.environment
			.getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME))
					.thenReturn("workerStep");
	when(this.beanFactory.getBean("workerStep", Step.class)).thenReturn(this.step);

	this.handler.run();

	verify(this.step).execute(workerStep);
	verifyZeroInteractions(this.jobRepository);
}
 
Example #29
Source File: EmployeeBatchJobITest.java    From batchers with Apache License 2.0 5 votes vote down vote up
@Test
public void jobLaunched_NoEmployees_EmployeeRepositoryIsCalled_NoInteractionWithTheTaxCalculatorService() throws Exception {
    JobExecution jobExecution = jobLauncherTestUtils.launchJob(jobParams);

    assertThat(jobExecution.getStatus()).isEqualTo(COMPLETED);
    verifyJob(jobExecution);
}
 
Example #30
Source File: TaskBatchExecutionListener.java    From spring-cloud-task with Apache License 2.0 5 votes vote down vote up
@Override
public void beforeJob(JobExecution jobExecution) {
	if (this.taskExecution == null) {
		logger.warn(
				"This job was executed outside the scope of a task but still used the task listener.");
	}
	else {
		logger.info(String.format(
				"The job execution id %s was run within the task execution %s",
				jobExecution.getId(), this.taskExecution.getExecutionId()));
		this.taskBatchDao.saveRelationship(this.taskExecution, jobExecution);
	}
}