Java Code Examples for org.springframework.batch.item.ItemReader

The following examples show how to use org.springframework.batch.item.ItemReader. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: spring-cloud   Source File: UserReader.java    License: Apache License 2.0 6 votes vote down vote up
@Bean(destroyMethod = "")
@Qualifier("jpaPagingItemReader")
public ItemReader<User> jpaPagingItemReader() {
    JpaPagingItemReader<User> reader = new JpaPagingItemReader<User>();
    String sqlQuery = "select * from user where id like :limit ";
    JpaNativeQueryProvider<User> queryProvider = new JpaNativeQueryProvider<User>();
    queryProvider.setSqlQuery(sqlQuery);
    queryProvider.setEntityClass(User.class);
    reader.setEntityManagerFactory(emf);
    reader.setPageSize(3);
    reader.setQueryProvider(queryProvider);
    reader.setParameterValues(Collections.<String, Object>singletonMap("limit", "%"));
    reader.setSaveState(true);
    return reader;
}
 
Example 2
@Bean(name = STEP_PROCESS_CSV_FILE)
public Step readCsvFileAndPopulateDbTable(
        StepBuilderFactory stepBuilderFactory,
        PlatformTransactionManager platformTransactionManager,
        @Qualifier(DATA_READER) ItemReader<JavaChampion> itemReader,
        @Qualifier(DATA_PROCESSOR) ItemProcessor<JavaChampion, JavaChampion> itemProcessor,
        @Qualifier(DATA_WRITER) ItemWriter<JavaChampion> itemWriter) {

    StepBuilder builder = stepBuilderFactory.get(STEP_PROCESS_CSV_FILE);

    return builder.<JavaChampion, JavaChampion>chunk(10)
            .reader(itemReader)
            .processor(itemProcessor)
            .writer(itemWriter)
            .transactionManager(platformTransactionManager)
            .build();
}
 
Example 3
Source Project: SpringBootBucket   Source File: BudgetVtollConfig.java    License: MIT License 6 votes vote down vote up
/**
     * step步骤,包含ItemReader,ItemProcessor和ItemWriter
     *
     * @param stepBuilderFactory
     * @param reader
     * @param writer
     * @param processor
     * @return
     */
    @Bean(name = "vtollStep1")
    public Step vtollStep1(StepBuilderFactory stepBuilderFactory,
                           @Qualifier("vtollReader") ItemReader<BudgetVtoll> reader,
                           @Qualifier("vtollWriter") ItemWriter<BudgetVtoll> writer,
                           @Qualifier("vtollProcessor") ItemProcessor<BudgetVtoll, BudgetVtoll> processor) {
        return stepBuilderFactory
                .get("vtollStep1")
                .<BudgetVtoll, BudgetVtoll>chunk(5000)//批处理每次提交5000条数据
                .reader(reader)//给step绑定reader
                .processor(processor)//给step绑定processor
                .writer(writer)//给step绑定writer
                .faultTolerant()
                .retry(Exception.class)   // 重试
                .noRetry(ParseException.class)
                .retryLimit(1)           //每条记录重试一次
                .skip(Exception.class)
                .skipLimit(200)         //一共允许跳过200次异常
//                .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好
//                .throttleLimit(10)        //并发任务数为 10,默认为4
                .build();
    }
 
Example 4
Source Project: SpringBootBucket   Source File: CantonConfig.java    License: MIT License 6 votes vote down vote up
/**
     * step步骤,包含ItemReader,ItemProcessor和ItemWriter
     *
     * @param stepBuilderFactory
     * @param reader
     * @param writer
     * @param processor
     * @return
     */
    @Bean(name = "cantonStep1")
    public Step cantonStep1(StepBuilderFactory stepBuilderFactory,
                           @Qualifier("cantonReader") ItemReader<Canton> reader,
                           @Qualifier("cantonWriter") ItemWriter<Canton> writer,
                           @Qualifier("cantonProcessor") ItemProcessor<Canton, Canton> processor) {
        return stepBuilderFactory
                .get("cantonStep1")
                .<Canton, Canton>chunk(5000)//批处理每次提交5000条数据
                .reader(reader)//给step绑定reader
                .processor(processor)//给step绑定processor
                .writer(writer)//给step绑定writer
                .faultTolerant()
                .retry(Exception.class)   // 重试
                .noRetry(ParseException.class)
                .retryLimit(1)           //每条记录重试一次
                .skip(Exception.class)
                .skipLimit(200)         //一共允许跳过200次异常
//                .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好
//                .throttleLimit(10)        //并发任务数为 10,默认为4
                .build();
    }
 
Example 5
Source Project: SpringBootBucket   Source File: LogConfig.java    License: MIT License 6 votes vote down vote up
/**
     * step步骤,包含ItemReader,ItemProcessor和ItemWriter
     *
     * @param stepBuilderFactory
     * @param reader
     * @param writer
     * @param processor
     * @return
     */
    @Bean(name = "logStep1")
    public Step logStep1(StepBuilderFactory stepBuilderFactory,
                         @Qualifier("logReader") ItemReader<Log> reader,
                         @Qualifier("logWriter") ItemWriter<Log> writer,
                         @Qualifier("logProcessor") ItemProcessor<Log, Log> processor) {
        return stepBuilderFactory
                .get("logStep1")
                .<Log, Log>chunk(5000)//批处理每次提交5000条数据
                .reader(reader)//给step绑定reader
                .processor(processor)//给step绑定processor
                .writer(writer)//给step绑定writer
                .faultTolerant()
                .retry(Exception.class)   // 重试
                .noRetry(ParseException.class)
                .retryLimit(1)           //每条记录重试一次
                .skip(Exception.class)
                .skipLimit(200)         //一共允许跳过200次异常
//                .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好
//                .throttleLimit(10)        //并发任务数为 10,默认为4
                .build();
    }
 
Example 6
Source Project: SpringBootBucket   Source File: AppConfig.java    License: MIT License 6 votes vote down vote up
/**
     * step步骤,包含ItemReader,ItemProcessor和ItemWriter
     *
     * @param stepBuilderFactory
     * @param reader
     * @param writer
     * @param processor
     * @return
     */
    @Bean(name = "zappStep1")
    public Step zappStep1(StepBuilderFactory stepBuilderFactory,
                          @Qualifier("appReader") ItemReader<App> reader,
                          @Qualifier("appWriter") ItemWriter<App> writer,
                          @Qualifier("appProcessor") ItemProcessor<App, App> processor) {
        return stepBuilderFactory
                .get("zappStep1")
                .<App, App>chunk(5000)//批处理每次提交5000条数据
                .reader(reader)//给step绑定reader
                .processor(processor)//给step绑定processor
                .writer(writer)//给step绑定writer
                .faultTolerant()
                .retry(Exception.class)   // 重试
                .noRetry(ParseException.class)
                .retryLimit(1)           //每条记录重试一次
                .skip(Exception.class)
                .skipLimit(200)         //一共允许跳过200次异常
//                .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好
//                .throttleLimit(10)        //并发任务数为 10,默认为4
                .build();
    }
 
Example 7
@Bean
public ItemReader<Staff> staffItemReader() {
    val reader = new FlatFileItemReader<Staff>();
    reader.setResource(new ClassPathResource("sample_staffs.csv"));
    reader.setLinesToSkip(1); // ヘッダーをスキップする
    reader.setLineMapper(new DefaultLineMapper<Staff>() {
        {
            setLineTokenizer(new DelimitedLineTokenizer() {
                {
                    setNames(new String[] { "firstName", "lastName", "email", "tel" });
                }
            });
            setFieldSetMapper(new BeanWrapperFieldSetMapper<Staff>() {
                {
                    setTargetType(Staff.class);
                }
            });
        }
    });
    return reader;
}
 
Example 8
Source Project: CogStack-Pipeline   Source File: JobConfiguration.java    License: Apache License 2.0 6 votes vote down vote up
@Bean
@StepScope
@Qualifier("documentItemReader")
@Profile("jdbc_in")
public ItemReader<Document> documentItemReader(
        @Value("#{stepExecutionContext[minValue]}") String minValue,
        @Value("#{stepExecutionContext[maxValue]}") String maxValue,
        @Value("#{stepExecutionContext[min_time_stamp]}") String minTimeStamp,
        @Value("#{stepExecutionContext[max_time_stamp]}") String maxTimeStamp,
        @Qualifier("documentRowMapper")RowMapper<Document> documentRowmapper,
        @Qualifier("sourceDataSource") DataSource jdbcDocumentSource) throws Exception {

    JdbcPagingItemReader<Document> reader = new JdbcPagingItemReader<>();
    reader.setDataSource(jdbcDocumentSource);

    // read and set obligatory properties
    SqlPagingQueryProviderFactoryBean qp = new SqlPagingQueryProviderFactoryBean();
    qp.setSelectClause(env.getRequiredProperty("source.selectClause"));
    qp.setFromClause(env.getRequiredProperty("source.fromClause"));
    qp.setSortKey(env.getRequiredProperty("source.sortKey"));
    qp.setWhereClause(stepPartitioner.getPartitioningLogic(minValue,maxValue, minTimeStamp,maxTimeStamp));
    qp.setDataSource(jdbcDocumentSource);

    // set optional properties
    if (env.containsProperty("source.pageSize")) {
        reader.setPageSize(Integer.parseInt(env.getProperty("source.pageSize")));
    }
    else { // it's a good idea to batch size and page size (commit interval) set to the same value
        LOG.info("property: 'source.pageSize' not specified -> setting DB reader page size to batch step chunk size: {}", chunkSize);
        reader.setPageSize(chunkSize);
    }
    reader.setQueryProvider(qp.getObject());
    reader.setRowMapper(documentRowmapper);
    return reader;
}
 
Example 9
@Bean
@ConditionalOnMissingBean
@ConditionalOnProperty(prefix = "spring.batch.job", name = "jobName")
public Job job(ItemReader<Map<Object, Object>> itemReader,
		ItemWriter<Map<Object, Object>> itemWriter) {

	SimpleStepBuilder<Map<Object, Object>, Map<Object, Object>> stepBuilder = this.stepBuilderFactory
			.get(this.properties.getStepName())
			.<Map<Object, Object>, Map<Object, Object>>chunk(
					this.properties.getChunkSize())
			.reader(itemReader);

	stepBuilder.processor(this.itemProcessor);

	Step step = stepBuilder.writer(itemWriter).build();

	return this.jobBuilderFactory.get(this.properties.getJobName()).start(step)
			.build();
}
 
Example 10
Source Project: Demo   Source File: CsvBatchConfig.java    License: Apache License 2.0 5 votes vote down vote up
@Bean
public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<Student> reader, ItemWriter<Student> writer,
        ItemProcessor<Student,Student> processor) {
    return stepBuilderFactory
            .get("step1")
            .<Student, Student>chunk(65000) //批处理每次提交 65000 条数据
            .reader(reader) // 给 step 绑定 reader
            .processor(processor) // 给 step 绑定 processor
            .writer(writer) // 给 step 绑定 writer
            .build();
}
 
Example 11
Source Project: spring-graalvm-native   Source File: BatchConfiguration.java    License: Apache License 2.0 5 votes vote down vote up
@Bean
public Step step1(ItemReader<CustomerCredit> itemReader,
		ItemProcessor<CustomerCredit, CustomerCredit> itemProcessor,
		ItemWriter<CustomerCredit> itemWriter) {
	return this.stepBuilderFactory.get("step1")
			.<CustomerCredit, CustomerCredit>chunk(2)
			.reader(itemReader)
			.processor(itemProcessor)
			.writer(itemWriter)
			.build();
}
 
Example 12
Source Project: SpringAll   Source File: FileItemReaderDemo.java    License: MIT License 5 votes vote down vote up
private ItemReader<TestData> fileItemReader() {
    FlatFileItemReader<TestData> reader = new FlatFileItemReader<>();
    reader.setResource(new ClassPathResource("file")); // 设置文件资源地址
    reader.setLinesToSkip(1); // 忽略第一行

    // AbstractLineTokenizer的三个实现类之一,以固定分隔符处理行数据读取,
    // 使用默认构造器的时候,使用逗号作为分隔符,也可以通过有参构造器来指定分隔符
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();

    // 设置属性名,类似于表头
    tokenizer.setNames("id", "field1", "field2", "field3");
    // 将每行数据转换为TestData对象
    DefaultLineMapper<TestData> mapper = new DefaultLineMapper<>();
    mapper.setLineTokenizer(tokenizer);
    // 设置映射方式
    mapper.setFieldSetMapper(fieldSet -> {
        TestData data = new TestData();
        data.setId(fieldSet.readInt("id"));
        data.setField1(fieldSet.readString("field1"));
        data.setField2(fieldSet.readString("field2"));
        data.setField3(fieldSet.readString("field3"));
        return data;
    });

    reader.setLineMapper(mapper);
    return reader;
}
 
Example 13
Source Project: SpringAll   Source File: XmlFileItemReaderDemo.java    License: MIT License 5 votes vote down vote up
private ItemReader<TestData> xmlFileItemReader() {
    StaxEventItemReader<TestData> reader = new StaxEventItemReader<>();
    reader.setResource(new ClassPathResource("file.xml")); // 设置xml文件源
    reader.setFragmentRootElementName("test"); // 指定xml文件的根标签
    // 将xml数据转换为TestData对象
    XStreamMarshaller marshaller = new XStreamMarshaller();
    // 指定需要转换的目标数据类型
    Map<String, Class<TestData>> map = new HashMap<>(1);
    map.put("test", TestData.class);
    marshaller.setAliases(map);

    reader.setUnmarshaller(marshaller);
    return reader;
}
 
Example 14
Source Project: SpringAll   Source File: DataSourceItemReaderDemo.java    License: MIT License 5 votes vote down vote up
private ItemReader<TestData> dataSourceItemReader() throws Exception {
    JdbcPagingItemReader<TestData> reader = new JdbcPagingItemReader<>();
    reader.setDataSource(dataSource); // 设置数据源
    reader.setFetchSize(5); // 每次取多少条记录
    reader.setPageSize(5); // 设置每页数据量

    // 指定sql查询语句 select id,field1,field2,field3 from TEST
    MySqlPagingQueryProvider provider = new MySqlPagingQueryProvider();
    provider.setSelectClause("id,field1,field2,field3"); //设置查询字段
    provider.setFromClause("from TEST"); // 设置从哪张表查询

    // 将读取到的数据转换为TestData对象
    reader.setRowMapper((resultSet, rowNum) -> {
        TestData data = new TestData();
        data.setId(resultSet.getInt(1));
        data.setField1(resultSet.getString(2)); // 读取第一个字段,类型为String
        data.setField2(resultSet.getString(3));
        data.setField3(resultSet.getString(4));
        return data;
    });

    Map<String, Order> sort = new HashMap<>(1);
    sort.put("id", Order.ASCENDING);
    provider.setSortKeys(sort); // 设置排序,通过id 升序

    reader.setQueryProvider(provider);

    // 设置namedParameterJdbcTemplate等属性
    reader.afterPropertiesSet();
    return reader;
}
 
Example 15
Source Project: SpringAll   Source File: MultiFileIteamReaderDemo.java    License: MIT License 5 votes vote down vote up
private ItemReader<TestData> multiFileItemReader() {
    MultiResourceItemReader<TestData> reader = new MultiResourceItemReader<>();
    reader.setDelegate(fileItemReader()); // 设置文件读取代理,方法可以使用前面文件读取中的例子

    Resource[] resources = new Resource[]{
            new ClassPathResource("file1"),
            new ClassPathResource("file2")
    };

    reader.setResources(resources); // 设置多文件源
    return reader;
}
 
Example 16
Source Project: SpringAll   Source File: JSONFileItemReaderDemo.java    License: MIT License 5 votes vote down vote up
private ItemReader<TestData> jsonItemReader() {
    // 设置json文件地址
    ClassPathResource resource = new ClassPathResource("file.json");
    // 设置json文件转换的目标对象类型
    JacksonJsonObjectReader<TestData> jacksonJsonObjectReader = new JacksonJsonObjectReader<>(TestData.class);
    JsonItemReader<TestData> reader = new JsonItemReader<>(resource, jacksonJsonObjectReader);
    // 给reader设置一个别名
    reader.setName("testDataJsonItemReader");
    return reader;
}
 
Example 17
Source Project: Spring-5.0-Cookbook   Source File: BatchConfig.java    License: MIT License 5 votes vote down vote up
@Bean("step1")
public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<Employee> reader,
                     ItemProcessor<Employee, Permanent> processor) {
      return stepBuilderFactory.get("step1")
              .<Employee, Permanent>chunk(5)
              .reader(reader)
              .processor(processor)
              .writer(writer())
              .build();
}
 
Example 18
Source Project: Spring-5.0-Cookbook   Source File: BatchConfig.java    License: MIT License 5 votes vote down vote up
@Bean("step2")
public Step step2(StepBuilderFactory stepBuilderFactory, ItemReader<Employee> reader,
                   ItemProcessor<Employee, Permanent> processor) {
      return stepBuilderFactory.get("step2")
              .<Employee, Permanent>chunk(2)
              .reader(reader)
              .processor(processor)
              .writer(xmlWriter())
              .build();
}
 
Example 19
@Bean
public Step step(ItemReader<PatientRecord> itemReader,
                 Function<PatientRecord, PatientEntity> processor,
                 JpaItemWriter<PatientEntity> writer) throws Exception {
    return this.stepBuilderFactory
        .get(Constants.STEP_NAME)
        .<PatientRecord, PatientEntity>chunk(2)
        .reader(itemReader)
        .processor(processor)
        .writer(writer)
        .build();
}
 
Example 20
Source Project: CogStack-Pipeline   Source File: JobConfiguration.java    License: Apache License 2.0 5 votes vote down vote up
@Bean
@Qualifier("compositeSlaveStep")
public Step compositeSlaveStep(
                    ItemReader<Document> reader,
        @Qualifier("compositeItemProcessor") ItemProcessor<Document, Document> processor,
        @Qualifier("compositeESandJdbcItemWriter") ItemWriter<Document> writer,
        @Qualifier("slaveTaskExecutor")TaskExecutor taskExecutor,
        @Qualifier("nonFatalExceptionItemProcessorListener")
                            ItemProcessListener nonFatalExceptionItemProcessorListener,
        //@Qualifier("targetDatasourceTransactionManager")PlatformTransactionManager manager,
        StepBuilderFactory stepBuilderFactory
) {
    FaultTolerantStepBuilder stepBuilder = stepBuilderFactory.get("compositeSlaveStep")
            .<Document, Document> chunk(chunkSize)
            .reader(reader)
            .processor(processor)
            .writer(writer)
            .faultTolerant()
            .skipLimit(skipLimit)
            .skip(WebserviceProcessingFailedException.class);
    if (env.acceptsProfiles("jdbc_out_map")) {
      stepBuilder = stepBuilder.skip(InvalidDataAccessApiUsageException.class);
    }
    return stepBuilder.noSkip(Exception.class)
     //       .listener(nonFatalExceptionItemProcessorListener)
            .listener(new SkipListener())
            .taskExecutor(taskExecutor)
            .build();
}
 
Example 21
Source Project: messaging   Source File: BatchConfiguration.java    License: Apache License 2.0 5 votes vote down vote up
@Bean
Job job(JobBuilderFactory jobBuilderFactory,
 StepBuilderFactory stepBuilderFactory, JdbcTemplate template,
 ItemReader<Contact> fileReader,
 ItemProcessor<Contact, Contact> emailProcessor,
 ItemWriter<Contact> jdbcWriter) {

 Step setup = stepBuilderFactory.get("clean-contact-table")
  .tasklet((contribution, chunkContext) -> {
   template.update("delete from CONTACT");
   return RepeatStatus.FINISHED;
  }).build();

 Step fileToJdbc = stepBuilderFactory.get("file-to-jdbc-fileToJdbc")
  .<Contact, Contact>chunk(5)
  // <1>
  .reader(fileReader).processor(emailProcessor).writer(jdbcWriter)
  .faultTolerant().skip(InvalidEmailException.class)
  // <2>
  .skipPolicy((Throwable t, int skipCount) -> {
   LogFactory.getLog(getClass()).info("skipping ");
   return t.getClass().isAssignableFrom(InvalidEmailException.class);
  }).retry(HttpStatusCodeException.class) // <3>
  .retryLimit(2).build();

 return jobBuilderFactory.get("etl") // <4>
  .start(setup).next(fileToJdbc).build();
}
 
Example 22
Source Project: wallride   Source File: UpdatePostViewsJobConfigurer.java    License: Apache License 2.0 5 votes vote down vote up
public Step updatePostViewsStep() {
	return stepBuilders.get("updatePostViewsStep")
			.chunk(10)
			.reader((ItemReader) updatePostViewsItemReader)
			.writer((ItemWriter) updatePostViewsItemWriter)
			.build();
}
 
Example 23
@Bean
public ItemReader<String> reader() {
	return new ListItemReader<String>(Arrays.asList("1", "2", "3"));
}
 
Example 24
Source Project: tutorials   Source File: ChunksConfig.java    License: MIT License 5 votes vote down vote up
@Bean
protected Step processLines(ItemReader<Line> reader, ItemProcessor<Line, Line> processor, ItemWriter<Line> writer) {
    return steps.get("processLines").<Line, Line> chunk(2)
      .reader(reader)
      .processor(processor)
      .writer(writer)
      .build();
}
 
Example 25
Source Project: tutorials   Source File: SpringBatchRetryConfig.java    License: MIT License 5 votes vote down vote up
public ItemReader<Transaction> itemReader(Resource inputData) throws ParseException {
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(tokens);
    DefaultLineMapper<Transaction> lineMapper = new DefaultLineMapper<>();
    lineMapper.setLineTokenizer(tokenizer);
    lineMapper.setFieldSetMapper(new RecordFieldSetMapper());
    FlatFileItemReader<Transaction> reader = new FlatFileItemReader<>();
    reader.setResource(inputData);
    reader.setLinesToSkip(1);
    reader.setLineMapper(lineMapper);
    return reader;
}
 
Example 26
Source Project: tutorials   Source File: SpringBatchConfig.java    License: MIT License 5 votes vote down vote up
public ItemReader<Transaction> itemReader(Resource inputData) throws UnexpectedInputException, ParseException {
    FlatFileItemReader<Transaction> reader = new FlatFileItemReader<>();
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    String[] tokens = {"username", "userid", "transactiondate", "amount"};
    tokenizer.setNames(tokens);
    reader.setResource(inputData);
    DefaultLineMapper<Transaction> lineMapper = new DefaultLineMapper<>();
    lineMapper.setLineTokenizer(tokenizer);
    lineMapper.setFieldSetMapper(new RecordFieldSetMapper());
    reader.setLinesToSkip(1);
    reader.setLineMapper(lineMapper);
    return reader;
}
 
Example 27
Source Project: tutorials   Source File: SpringBatchConfiguration.java    License: MIT License 5 votes vote down vote up
@Bean
public Step step1(ItemReader<BookRecord> csvItemReader, ItemWriter<Book> jsonItemWriter) throws IOException {
    // @formatter:off
    return stepBuilderFactory
      .get("step1")
      .<BookRecord, Book> chunk(3)
      .reader(csvItemReader)
      .processor(bookItemProcessor())
      .writer(jsonItemWriter)
      .build();
    // @formatter:on
}
 
Example 28
Source Project: tutorials   Source File: SpringBatchConfiguration.java    License: MIT License 5 votes vote down vote up
@Bean
public Step step2(ItemReader<BookRecord> csvItemReader, ItemWriter<BookDetails> listItemWriter) {
    // @formatter:off
    return stepBuilderFactory
      .get("step2")
      .<BookRecord, BookDetails> chunk(3)
      .reader(csvItemReader)
      .processor(bookDetailsItemProcessor())
      .writer(listItemWriter)
      .build();
    // @formatter:on
}
 
Example 29
Source Project: SpringAll   Source File: MySimpleItemReaderDemo.java    License: MIT License 4 votes vote down vote up
private ItemReader<String> mySimpleItemReader() {
    List<String> data = Arrays.asList("java", "c++", "javascript", "python");
    return new MySimpleIteamReader(data);
}
 
Example 30
Source Project: Spring-5.0-Cookbook   Source File: BatchConfig.java    License: MIT License 4 votes vote down vote up
@StepScope
@Bean
public ItemReader<Department> reader() {
    return new DepartmentItemReader("depts.xml");
}