Skip to content

Instantly share code, notes, and snippets.

View Edijun's full-sized avatar

Edi Djunaidi Edijun

View GitHub Profile
## Spring DATASOURCE (DataSourceAutoConfiguration & DataSourceProperties)
spring.datasource.url =
spring.datasource.username =
spring.datasource.password =
spring.datasource.platform = postgres
## Hibernate Properties
# The SQL dialect makes Hibernate generate better SQL for the chosen database
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.PostgreSQL94Dialect
spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true
## Spring DATASOURCE (DataSourceAutoConfiguration & DataSourceProperties)
spring.datasource.url =
spring.datasource.username =
spring.datasource.password =
spring.datasource.platform = postgres
## Hibernate Properties
# The SQL dialect makes Hibernate generate better SQL for the chosen database
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.PostgreSQL94Dialect
spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true
-- Table: public.product
-- DROP TABLE public.product;
CREATE TABLE public.product
(
id character varying(10) COLLATE pg_catalog."default" NOT NULL,
name character varying(50) COLLATE pg_catalog."default",
price integer,
description character varying(100) COLLATE pg_catalog."default",
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.listener.JobExecutionListenerSupport;
@Override
public void setDataSource(DataSource dataSource) {
// override to do not set datasource even if a datasource exist.
// initialize will use a Map based JobRepository (instead of database)
}
@Bean
Job csvFileToDatabaseJob(JobCompletionNotificationListener listener) {
return jobBuilderFactory
.get("csvFileToDatabaseJob")
.incrementer(new RunIdIncrementer()).listener(listener)
.flow(csvFileToDatabaseStep())
.end().build();
}
@Bean
public Step csvFileToDatabaseStep() {
return stepBuilderFactory
.get("csvFileToDatabaseStep")
.<Product, Product>chunk(1)
.reader(csvProductReader())
.processor(csvProductProcessor())
.writer(csvProductWriter())
.build();
}
@Bean
public JdbcBatchItemWriter<Product> csvProductWriter() {
JdbcBatchItemWriter<Product> csvProductWriter = new JdbcBatchItemWriter<Product>();
csvProductWriter.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Product>());
csvProductWriter.setSql("INSERT INTO product (id, name, price, description) VALUES (:id, :name, :price, :description)");
csvProductWriter.setDataSource(dataSource);
return csvProductWriter;
}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import id.edijun.example.springbatch.model.Product;
public class ProductProcessor implements ItemProcessor<Product, Product> {
private static final Logger log = LoggerFactory.getLogger(ProductProcessor.class);
@Override
@Bean
ItemProcessor<Product, Product> csvProductProcessor() {
return new ProductProcessor();
}