Spring批处理不从数据库读取数据,也不会写入文件

问题描述:

我想使用多个数据源,一个用于Spring批量元数据,另一个用于业务数据。我的批处理作业只是运行,甚至不尝试连接到secondaryDataSource。有人能指出我的配置有什么问题吗?Spring批处理不从数据库读取数据,也不会写入文件

@Configuration 
@EnableBatchProcessing 
public class BatchConfiguration extends DefaultBatchConfigurer { 

    @Override 
    @Autowired 
    public void setDataSource(
      @Qualifier("batchDataSource") DataSource batchDataSource) { 
     super.setDataSource(batchDataSource); 
    } 
} 

public class SpringBatchConfig { 

    @Autowired 
    private JobBuilderFactory jobs; 

    @Autowired 
    private StepBuilderFactory steps; 

    private static final String QUERY_FIND_STUDENTS = "select * from ..."; 

    @Bean 
    ItemReader<DotDetailsDTO> reader(
      @Qualifier("secondaryDataSource") DataSource dataSource) 
      throws SQLException { 
     JdbcCursorItemReader<DotDetailsDTO> databaseReader = new JdbcCursorItemReader<>(); 

     databaseReader.setDataSource(dataSource); 
     databaseReader.setSql(QUERY_FIND_STUDENTS); 
     databaseReader.setRowMapper(new DOTRowMapper()); 

     return databaseReader; 
    } 

    @Bean 
    public ItemProcessor<DotDetailsDTO, DotDetailsDTO> itemProcessor() { 
     return new CustomItemProcessor(); 
    } 

    @Bean 
    public ItemWriter<DotDetailsDTO> writer() throws Exception { 
     FlatFileItemWriter<DotDetailsDTO> writer = new FlatFileItemWriter<DotDetailsDTO>(); 
     writer.setResource(new ClassPathResource("file:test.csv")); 
     DelimitedLineAggregator<DotDetailsDTO> delLineAgg = new DelimitedLineAggregator<DotDetailsDTO>(); 
     delLineAgg.setDelimiter(","); 
     BeanWrapperFieldExtractor<DotDetailsDTO> fieldExtractor = new BeanWrapperFieldExtractor<DotDetailsDTO>(); 
     fieldExtractor.setNames(new String[] { "airwayBillNumber", 
       "outboundDate", "orig", "dest", "lotNumber", 
       "lotFlightNumber", "lotOrig", "lotDest", "lotPcs", "lotWt", 
       "lotFlightDepartDate", "iataCode" }); 
     delLineAgg.setFieldExtractor(fieldExtractor); 
     writer.setLineAggregator(delLineAgg); 
     writer.afterPropertiesSet(); 
     return writer; 
    } 

    @Bean 
    protected Step step1(ItemReader<DotDetailsDTO> reader, 
      ItemProcessor<DotDetailsDTO, DotDetailsDTO> processor, 
      ItemWriter<DotDetailsDTO> writer) throws SQLException { 
     return steps.get("step1").<DotDetailsDTO, DotDetailsDTO> chunk(10) 
       .reader(reader).processor(processor).writer(writer).build(); 
    } 

    @Bean(name = "firstBatchJob") 
    public Job job(@Qualifier("step1") Step step1) { 
     return jobs.get("firstBatchJob").start(step1).build(); 
    } 

} 

public class DataSourceConfiguration { 

    @Bean(name="batchDataSource") 
    public DataSource dataSource() throws SQLException { 
     BasicDataSource dataSource = new BasicDataSource(); 
     ... 
     return dataSource; 
    } 

    @Bean 
    public JdbcTemplate jdbcTemplate(
      @Qualifier("batchDataSource") final DataSource dataSource) { 
     return new JdbcTemplate(dataSource); 
    } 

    @Primary 
    @Bean(name="secondaryDataSource") 
    public DataSource secondaryDataSource() throws SQLException { 
     OracleDataSource secondaryDataSource = new OracleDataSource(); 
     ... 
     return secondaryDataSource; 
    } 

    @Bean 
    public JdbcTemplate secondaryJdbcTemplate(
      @Qualifier("secondaryDataSource") final DataSource secondaryDataSource) { 
     return new JdbcTemplate(secondaryDataSource); 
    } 
} 

public static void main(String[] args) { 
    // Spring Java config 
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); 
    context.register(DataSourceConfiguration.class); 
    context.register(BatchConfiguration.class); 
    context.register(SpringBatchConfig.class); 
    context.refresh(); 

    JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); 
    Job job = (Job) context.getBean("firstBatchJob"); 
    System.out.println("Starting the batch job"); 
    try { 
     JobExecution execution = jobLauncher.run(job, new JobParameters()); 
     System.out.println("Job Status : " + execution.getStatus()); 
     System.out.println("Job completed"); 
    } catch (Exception e) { 
     e.printStackTrace(); 
     System.out.println("Job failed"); 
    } 

} 

哇2天后,我想清楚是什么问题。我没有提供新的JobParameters,因为我一次又一次地运行同样的旧的破碎的工作。

下面是主要方法中的修复。

public static void main(String[] args) { 

    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); 
    context.register(DataSourceConfiguration.class); 
    context.register(BatchConfiguration.class); 
    context.register(SpringBatchConfig.class); 
    context.refresh(); 

    JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); 
    Job job = (Job) context.getBean("firstBatchJob"); 
    System.out.println("Starting the batch job"); 
    try { 
     DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); 
     Date date = new Date(); 
     JobParameters jobParam = 
      new JobParametersBuilder().addString("jobDate",dateFormat.format(date)).toJobParameters(); 
     JobExecution execution = jobLauncher.run(job, jobParam); 
     System.out.println("Job Status : " + execution.getStatus()); 
     System.out.println("Job completed : " + execution.getJobId()); 
    } catch (Exception e) { 
     e.printStackTrace(); 
     System.out.println("Job failed"); 
    } 

}