请以此为主要参考:http : //walkingtechie.blogspot.co.uk/2017/03/spring-batch-
csv-file-to-mysql.html 这说明了如何使用批处理将CSV文件导入MySQL数据库。
但是,正如您所说,所有示例都假设使用了一个不需要的硬代码文件。
在下面的代码中,重要的位(与我提供的链接中的示例不同)是Controller,该控制器将一个多部分文件保存到一个临时文件夹中。然后将文件名作为参数传递给Job:
JobExecution jobExecution = jobLauncher.run(importUserJob, new JobParametersBuilder() .addString("fullPathFileName", fileToimport.getAbsolutePath()) .toJobParameters());最后,importReader使用参数fullPathFileName加载用户上传的文件:
@Bean public FlatFileItemReader<Person> importReader(@Value("#{jobParameters[fullPathFileName]}") String pathToFile) { FlatFileItemReader<Person> reader = new FlatFileItemReader<>(); reader.setResource(new FileSystemResource(pathToFile));下面是完整的代码(未经测试,但具有大多数组件),可以使您有所了解:
@Configuration@EnableBatchProcessingpublic class BatchConfig{ @Bean public ResourcelessTransactionManager batchTransactionManager(){ ResourcelessTransactionManager transactionManager = new ResourcelessTransactionManager(); return transactionManager; } @Bean protected JobRepository jobRepository(ResourcelessTransactionManager batchTransactionManager) throws Exception{ MapJobRepositoryFactoryBean jobRepository = new MapJobRepositoryFactoryBean(); jobRepository.setTransactionManager(batchTransactionManager); return (JobRepository)jobRepository.getObject(); } @Bean public JobLauncher jobLauncher(JobRepository jobRepository) throws Exception { SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); jobLauncher.setJobRepository(jobRepository); return jobLauncher; }}@Configurationpublic class importJobConfig { @Bean public FlatFileItemReader<Person> importReader(@Value("#{jobParameters[fullPathFileName]}") String pathToFile) { FlatFileItemReader<Person> reader = new FlatFileItemReader<>(); reader.setResource(new FileSystemResource(pathToFile)); reader.setLineMapper(new DefaultLineMapper<Person>() {{ setLineTokenizer(new DelimitedLineTokenizer() {{ setNames(new String[]{"firstName", "lastName"}); }}); setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{ setTargetType(Person.class); }}); }}); return reader; } @Bean public PersonItemProcessor processor() { return new PersonItemProcessor(); } @Bean public JdbcBatchItemWriter<Person> writer() { JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<>(); writer.setItemSqlParameterSourceProvider( new BeanPropertyItemSqlParameterSourceProvider<Person>()); writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)"); writer.setDataSource(dataSource); return writer; } // end::readerwriterprocessor[] // tag::jobstep[] @Bean public Job importUserJob(JobCompletionNotificationListener listener) { return jobBuilderFactory.get("importUserJob").incrementer(new RunIdIncrementer()) .listener(listener).flow(step1()).end().build(); } @Bean public Step step1(@Qualifier("importReader") ItemReader<Person> importReader) { return stepBuilderFactory.get("step1").<Person, Person>chunk(10).reader(importReader) .processor(processor()).writer(writer()).build(); }}@RestControllerpublic class MyimportController { @Autowired private JobLauncher jobLauncher; @Autowired private Job importUserJob; @RequestMapping(value="/import/file", method=RequestMethod.POST) public String create(@RequestParam("file") MultipartFile multipartFile) throws IOException{ //Save multipartFile file in a temporary physical folder String path = new ClassPathResource("tmpuploads/").getURL().getPath();//it's assumed you have a folder called tmpuploads in the resources folder File fileToimport = new File(path + multipartFile.getOriginalFilename()); OutputStream outputStream = new FileOutputStream(fileToimport); IOUtils.copy(multipartFile.getInputStream(), outputStream); outputStream.flush(); outputStream.close(); //Launch the Batch Job JobExecution jobExecution = jobLauncher.run(importUserJob, new JobParametersBuilder() .addString("fullPathFileName", fileToimport.getAbsolutePath()) .toJobParameters()); return "OK"; }}


