예제 참고 전 개념 정리 : https://velog.io/@studyjun/Spring-Batch
프로젝트 환경
- Java 19
- Spring Boot 3.0.6
- Batch 5.0.1
logging:
level:
root: info
com.studyjun.studyBatch: debug
spring:
batch:
job:
names: ${job.name:NONE}
jpa:
hibernate:
ddl-auto: update
use-new-id-generator-mappings: true
show-sql: true
datasource:
url: jdbc:mariadb://localhost:3306/studybatch
username: studyjun
password: 1234
driver-class-name: org.mariadb.jdbc.Driver
@Slf4j
@Configuration
public class TaskletJob {
@Bean
public Job taskletJobBatchBuild(JobRepository jobRepository, Step taskletJobStep1) {
return new JobBuilder("taskletJob", jobRepository)
.start(taskletJobStep1)
.build();
}
@Bean
public Step taskletJobStep1(JobRepository jobRepository, Tasklet testTasklet, PlatformTransactionManager platformTransactionManager){
return new StepBuilder("taskletJobStep1", jobRepository)
.tasklet(testTasklet, platformTransactionManager).build();
}
@Bean
public Tasklet testTasklet(){
return ((contribution, chunkContext) -> {
log.info("-> job -> [step1]");
return RepeatStatus.FINISHED;
});
}
}
@Slf4j
@Configuration
public class TaskletJob {
@Bean
public Job taskletJobBatchBuild(JobRepository jobRepository, Step taskletJobStep1, Step taskletJobStep2) {
return new JobBuilder("taskletJob", jobRepository)
.start(taskletJobStep1)
.next(taskletJobStep2)
.build();
}
@Bean
public Step taskletJobStep1(JobRepository jobRepository, Tasklet testTasklet1, PlatformTransactionManager platformTransactionManager){
return new StepBuilder("taskletJobStep1", jobRepository)
.tasklet(testTasklet1, platformTransactionManager).build();
}
@Bean
public Step taskletJobStep2(JobRepository jobRepository, Tasklet testTasklet2, PlatformTransactionManager platformTransactionManager){
return new StepBuilder("taskletJobStep2", jobRepository)
.tasklet(testTasklet2(new Date()), platformTransactionManager).build();
}
@Bean
public Tasklet testTasklet1(){
return ((contribution, chunkContext) -> {
log.info("-> job -> [step1] ");
return RepeatStatus.FINISHED;
});
}
@Bean
@JobScope
public Tasklet testTasklet2(){
return ((contribution, chunkContext) -> {
log.info("-> step1 -> [step2] ");
return RepeatStatus.FINISHED;
});
}
}
@Setter
@Getter
@ToString
@Entity
@AllArgsConstructor
@NoArgsConstructor
public class Dept {
@Id
Integer deptNo;
String dName;
String loc;
}
@Setter
@Getter
@ToString
@Entity
@AllArgsConstructor
@NoArgsConstructor
public class Dept2 {
@Id
Integer deptNo;
String dName;
String loc;
}
@SpringBootTest
public class TestRepository {
@Autowired
DeptRepository deptRepository;
@Test
@Commit
public void dept01() {
for (int i = 1; i <= 100; i++) {
deptRepository.save(new Dept(i, "dName_" + String.valueOf(i), "loc_" + String.valueOf(i)));
}
}
}
@Slf4j
@RequiredArgsConstructor
@Configuration
public class JpaPageJob1 {
private final EntityManagerFactory entityManagerFactory;
private int chunkSize = 10;
@Bean
public Job jpaPageJob1BatchBuild(JobRepository jobRepository, Step jpaPageJobStep1) {
return new JobBuilder("JpaPageJob1", jobRepository)
.start(jpaPageJobStep1).build();
}
@Bean
public Step jpaPageJobStep1(JobRepository jobRepository, PlatformTransactionManager platformTransactionManager) {
return new StepBuilder("JpaPageJobStep1", jobRepository)
.<Dept, Dept2>chunk(chunkSize)
.reader(jpaPageJob1DBItemReader())
.processor(jpaPageJob1Processor())
.writer(jpaPageJob1DBItemWriter())
.transactionManager(platformTransactionManager)
.build();
}
@Bean
public ItemProcessor<Dept, Dept2> jpaPageJob1Processor() {
return dept -> {
return new Dept2(dept.getDeptNo(), "NEW_" + dept.getDName(), "NEW_" + dept.getLoc());
};
}
@Bean
public JpaPagingItemReader<Dept> jpaPageJob1DBItemReader() {
return new JpaPagingItemReaderBuilder<Dept>()
.name("jpaPageJob1DBItemReader")
.entityManagerFactory(entityManagerFactory)
.pageSize(chunkSize)
.queryString("SELECT d FROM Dept d order by 'dept_no' asc")
.build();
}
@Bean
public JpaItemWriter<Dept2> jpaPageJob1DBItemWriter() {
JpaItemWriter<Dept2> jpaItemWriter = new JpaItemWriter<>();
jpaItemWriter.setEntityManagerFactory(entityManagerFactory);
return jpaItemWriter;
}
}
@Getter
@Setter
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class OneDto {
String one;
@Override
public String toString() {
return one;
}
}
@Slf4j
@Configuration
public class TextJob {
private static final int chunkSize = 5;
@Bean
public Job textJobBatchBuild(JobRepository jobRepository, Step textJobBatchStep) {
return new JobBuilder("textJob", jobRepository)
.start(textJobBatchStep).build();
}
@Bean
public Step textJobBatchStep(JobRepository jobRepository, PlatformTransactionManager platformTransactionManager) {
return new StepBuilder("textJobBatchStep", jobRepository)
.<OneDto, OneDto>chunk(chunkSize)
.reader(textJobFileReader())
.writer(textJobFileWriter())
.transactionManager(platformTransactionManager)
.build();
}
@Bean
public FlatFileItemReader<OneDto> textJobFileReader() {
FlatFileItemReader<OneDto> flatFileItemReader = new FlatFileItemReader<>();
flatFileItemReader.setResource(new ClassPathResource("textJobInput.txt"));
flatFileItemReader.setLineMapper((((line, lineNumber) -> new OneDto(lineNumber + "==" + line))));
return flatFileItemReader;
}
@Bean
public FlatFileItemWriter textJobFileWriter() {
return new FlatFileItemWriterBuilder<OneDto>()
.name("textJobFileWriter")
.resource(new FileSystemResource("textJobOutPut.txt"))
.lineAggregator(new CustomPassThroughLineAggregator<>())
.build();
}
}
@Getter
@Setter
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class TwoDto {
String one;
String two;
}
@Slf4j
@Configuration
public class CsvJob {
private static final int chunkSize = 5;
@Bean
public Job csvJobBatchBuild(JobRepository jobRepository, Step csvJobBatchStep) {
return new JobBuilder("csvJob", jobRepository)
.start(csvJobBatchStep).build();
}
@Bean
public Step csvJobBatchStep(JobRepository jobRepository, PlatformTransactionManager platformTransactionManager) {
return new StepBuilder("csvJobBatchStep", jobRepository)
.<TwoDto, TwoDto>chunk(chunkSize)
.reader(csvJobFileReader())
.transactionManager(platformTransactionManager)
.writer(csvJobFileWriter(new FileSystemResource("csvJobOutput")))
.build();
}
@Bean
public FlatFileItemReader<TwoDto> csvJobFileReader() {
FlatFileItemReader<TwoDto> flatFileItemReader = new FlatFileItemReader<>();
flatFileItemReader.setResource(new ClassPathResource("csvJobInput.csv"));
flatFileItemReader.setLinesToSkip(1);
DefaultLineMapper<TwoDto> dtoDefaultLineMapper = new DefaultLineMapper<>();
DelimitedLineTokenizer delimitedLineTokenizer = new DelimitedLineTokenizer();
delimitedLineTokenizer.setNames("one", "two");
delimitedLineTokenizer.setDelimiter(":");
BeanWrapperFieldSetMapper<TwoDto> beanWrapperFieldSetMapper = new BeanWrapperFieldSetMapper<>();
beanWrapperFieldSetMapper.setTargetType(TwoDto.class);
dtoDefaultLineMapper.setLineTokenizer(delimitedLineTokenizer);
dtoDefaultLineMapper.setFieldSetMapper(beanWrapperFieldSetMapper);
flatFileItemReader.setLineMapper(dtoDefaultLineMapper);
return flatFileItemReader;
}
@Bean
public FlatFileItemWriter<TwoDto> csvJobFileWriter(WritableResource resource){
BeanWrapperFieldExtractor<TwoDto> beanWrapperFieldExtractor = new BeanWrapperFieldExtractor<>();
beanWrapperFieldExtractor.setNames(new String[]{"one", "two"});
beanWrapperFieldExtractor.afterPropertiesSet();
DelimitedLineAggregator<TwoDto> dtoDelimitedLineAggregator = new DelimitedLineAggregator<>();
dtoDelimitedLineAggregator.setDelimiter("@");
dtoDelimitedLineAggregator.setFieldExtractor(beanWrapperFieldExtractor);
return new FlatFileItemWriterBuilder<TwoDto>().name("csvJobFileWriter")
.resource(resource)
.lineAggregator(dtoDelimitedLineAggregator)
.build();
}
}
@Slf4j
@Configuration
public class FixedLengthJob {
private static final int chunkSize = 5;
@Bean
public Job fixedLengthJobBatchBuild(JobRepository jobRepository, Step fixedLengthJobBatchStep) {
return new JobBuilder("FixedLengthJob", jobRepository)
.start(fixedLengthJobBatchStep)
.build();
}
@Bean
public Step fixedLengthJobBatchStep(JobRepository jobRepository, PlatformTransactionManager platformTransactionManager) {
return new StepBuilder("fixedLengthJobBatchStep", jobRepository)
.<TwoDto, TwoDto>chunk(chunkSize)
.reader(fixedLengthJobFileReader())
.transactionManager(platformTransactionManager)
.writer(fixedLengthJobFileWriter(new FileSystemResource("fixedLengthJobOutput.txt")))
.build();
}
@Bean
public FlatFileItemReader<TwoDto> fixedLengthJobFileReader() {
FlatFileItemReader<TwoDto> flatFileItemReader = new FlatFileItemReader<>();
flatFileItemReader.setResource(new ClassPathResource("fixedLengthJobInput.txt"));
flatFileItemReader.setLinesToSkip(1);
DefaultLineMapper<TwoDto> dtoDefaultLineMapper = new DefaultLineMapper<>();
FixedLengthTokenizer fixedLengthTokenizer = new FixedLengthTokenizer();
fixedLengthTokenizer.setNames("one", "two");
fixedLengthTokenizer.setColumns(new Range(1, 5), new Range(6, 10));
BeanWrapperFieldSetMapper<TwoDto> beanWrapperFieldSetMapper = new BeanWrapperFieldSetMapper<>();
beanWrapperFieldSetMapper.setTargetType(TwoDto.class);
dtoDefaultLineMapper.setLineTokenizer(fixedLengthTokenizer);
dtoDefaultLineMapper.setFieldSetMapper(beanWrapperFieldSetMapper);
flatFileItemReader.setLineMapper(dtoDefaultLineMapper);
return flatFileItemReader;
}
@Bean
public FlatFileItemWriter<TwoDto> fixedLengthJobFileWriter(FileSystemResource resource) {
BeanWrapperFieldExtractor<TwoDto> fieldExtractor = new BeanWrapperFieldExtractor<>();
fieldExtractor.setNames((new String[] {"one", "two"}));
fieldExtractor.afterPropertiesSet();
FormatterLineAggregator<TwoDto> lineAggregator = new FormatterLineAggregator<>();
lineAggregator.setFormat("%-5s###%5s");
lineAggregator.setFieldExtractor(fieldExtractor);
return new FlatFileItemWriterBuilder<TwoDto>()
.name("fixedLengthJobFileWriter")
.resource(resource)
.lineAggregator(lineAggregator).build();
}
}