SFTP #Poller not triggering polling nothing happens - spring-integration

I am trying to set the spring boot application which will pool the csv . i do not see any activity happning in the spring boot application nor on filezilla SFTP server but if I change the same code to FTP then it works
#Component
#EnableIntegration
public class IntegrationConfiguration {
#Autowired
FTPConfigProperties ftpConfigProperties;
#Autowired
private BeanFactory beanFactory;
#Value("classpath:certificate.crt")
Resource certficateFile;
#Bean
public SessionFactory<ChannelSftp.LsEntry> ftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory();
factory.setHost("127.0.0.1");
factory.setPort(990);
factory.setUser("abhinav");
factory.setPassword("nssdw");
factory.setPrivateKey(certficateFile);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<ChannelSftp.LsEntry>(factory, 100000);
}
#Bean
public SftpInboundFileSynchronizer ftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(ftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setRemoteDirectory("/");
fileSynchronizer.setFilter(filter());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setPreserveTimestamp(true);
fileSynchronizer.setBeanFactory(beanFactory);
return fileSynchronizer;
}
//here the poller is configured
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(fixedDelay = "10000"))
public MessageSource<File> ftpMessageSource() throws Exception {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
ftpInboundFileSynchronizer());
source.setLocalDirectory(new File("ftp-inbound"));
source.setAutoCreateLocalDirectory(true);
source.setMaxFetchSize(1);
source.setBeanFactory(beanFactory);
source.setUseWatchService(true);
return source;
}
public CompositeFileListFilter<ChannelSftp.LsEntry> filter() {
CompositeFileListFilter<ChannelSftp.LsEntry> filter = new CompositeFileListFilter<ChannelSftp.LsEntry>();
filter.addFilter(new SftpSimplePatternFileListFilter("*.csv"));
filter.addFilter(acceptOnceFilter());
filter.addFilter(new LastModifiedLsEntryFileListFilter());
return filter;
}
#Bean
public SftpPersistentAcceptOnceFileListFilter acceptOnceFilter() {
SftpPersistentAcceptOnceFileListFilter filter = new SftpPersistentAcceptOnceFileListFilter(metadataStore(),"ftpPersistentAcceptOnce");
filter.setFlushOnUpdate(true);
return filter;
}
#Bean
public ConcurrentMetadataStore metadataStore() {
PropertiesPersistingMetadataStore propertiesPersistingMetadataStore = new PropertiesPersistingMetadataStore();
propertiesPersistingMetadataStore.setBaseDirectory("./metastore");
propertiesPersistingMetadataStore.setFileName("ftpStream.properties");
return propertiesPersistingMetadataStore;
}
#Bean
#ServiceActivator(inputChannel = "jobChannel", outputChannel = "nullChannel")
protected JobLaunchingMessageHandler launcher(JobLauncher jobLauncher) {
return new JobLaunchingMessageHandler(jobLauncher);
}
}
here the next call where I trigger the spring batch then it goes to service actuator
#Component
public class FileToJobTransformer implements ApplicationContextAware {
private ApplicationContext context;
#Autowired
private Job job;
#Transformer(inputChannel = "fromSftpChannel", outputChannel = "jobChannel")
public JobLaunchRequest transform(File aFile) throws Exception {
String fileName = aFile.getName();
JobParameters jobParameters = new JobParametersBuilder().addString(
"input.file", aFile.getAbsolutePath()).toJobParameters();
JobLaunchRequest request = new JobLaunchRequest(job, jobParameters);
return request;
}
#Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.context = applicationContext;
}
}
the custome code is as follow
public class LastModifiedLsEntryFileListFilter implements FileListFilter<ChannelSftp.LsEntry> {
private static final long DEFAULT_AGE = 60;
private volatile long age = DEFAULT_AGE;
public long getAge() {
return this.age;
}
public void setAge(long age) {
setAge(age, TimeUnit.SECONDS);
}
public void setAge(long age, TimeUnit unit) {
this.age = unit.toSeconds(age);
}
#Override
public List<ChannelSftp.LsEntry> filterFiles(ChannelSftp.LsEntry[] files) {
System.out.println("files = [" + files.length + "]");
List<ChannelSftp.LsEntry> list = new ArrayList<ChannelSftp.LsEntry>();
long now = System.currentTimeMillis() / 1000;
for (ChannelSftp.LsEntry file : files) {
if (file.getAttrs()
.isDir()) {
continue;
}
int lastModifiedTime = file.getAttrs()
.getMTime();
if (lastModifiedTime + this.age <= now) {
list.add(file);
}
}
Collections.reverse(list);
ArrayList<ChannelSftp.LsEntry> oneElementList = new ArrayList<ChannelSftp.LsEntry>(1) ;
oneElementList.add(list.get(0));
return oneElementList;
}
}

Related

Spring Integration + Spring Batch: the job doesn`t stop

I want to read file from ftp server, then save it into local repository and delete from server, run the job that read file, find one record on DB, change one parameter and save it.
What is going wrong: job doesn't finish; increments salary and saves employee many times.
Spring Integration configuration:
#Bean
public FtpInboundFileSynchronizer ftpInboundFileSynchronizer(DefaultFtpSessionFactory sessionFactory) {
FtpInboundFileSynchronizer fileSynchronizer = new FtpInboundFileSynchronizer(sessionFactory);
fileSynchronizer.setRemoteDirectory(remoteDirectory);
fileSynchronizer.setDeleteRemoteFiles(true);
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(value = "fileInputChannel", poller = #Poller(cron = "*/5 * * * * ?"))
public FtpInboundFileSynchronizingMessageSource ftpInboundFileSynchronizingMessageSource(FtpInboundFileSynchronizer fileSynchronizer) throws Exception {
FtpInboundFileSynchronizingMessageSource messageSource = new FtpInboundFileSynchronizingMessageSource(fileSynchronizer);
messageSource.setAutoCreateLocalDirectory(true);
messageSource.setLocalDirectory(new File(localDirectory));
messageSource.setLocalFilter(new AcceptOnceFileListFilter<>());
return messageSource;
}
#Bean
#ServiceActivator(inputChannel = "fileInputChannel")
public FileWritingMessageHandler fileWritingMessageHandler() {
FileWritingMessageHandler messageHandler = new FileWritingMessageHandler(new File(localDirectory));
messageHandler.setOutputChannelName("jobLaunchRequestChannel");
return messageHandler;
}
#ServiceActivator(inputChannel = "jobLaunchRequestChannel", outputChannel = "jobLaunchingGatewayChannel")
public JobLaunchRequest jobLaunchRequest(File file) throws IOException {
String[] content = FileUtils.readFileToString(file, "UTF-8").split("\\s+");
JobParameters jobParameters = new JobParametersBuilder()
.addString("filename", file.getAbsolutePath())
.addString("id", content[0]).addString("salary", content[1])
// .addLong("time", System.currentTimeMillis())
.toJobParameters();
return new JobLaunchRequest(increaseSalaryJob, jobParameters);
}
#Bean
#ServiceActivator(inputChannel = "jobLaunchingGatewayChannel")
public JobLaunchingGateway jobLaunchingGateway(SimpleJobLauncher jobLauncher) {
JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(jobLauncher);
jobLaunchingGateway.setOutputChannelName("finish");
return jobLaunchingGateway;
}
#ServiceActivator(inputChannel = "finish")
public void finish() {
System.out.println("FINISH");
}
}
Spring Batch configuration:
#Bean
public Job increaseSalaryJob(CustomJobListener listener, Step step1) {
return jobBuilderFactory.get("increaseSalaryJob")
.preventRestart()
.listener(listener)
.start(step1)
.build();
}
#Bean
public Step step1(ItemReader<Employee> reader) {
return stepBuilderFactory.get("step1")
.transactionManager(transactionManager)
.<Employee, Employee> chunk(1)
.reader(reader)
.processor(processor())
.writer(writer())
.build();
}
#Bean
#StepScope
public ItemReader<Employee> reader(#Value("#{jobParameters[id]}") Integer id) {
log.info("reader");
return () -> employeeService.get(id);
}
#Bean
#StepScope
public ItemProcessor<Employee, Employee> processor() {
log.info("processor");
return employee -> {
log.info(employee.getName() + " had salary " + employee.getSalary());
Integer salary = employee.getSalary() + 1;
employee.setSalary(salary);
log.info(employee.getName() + " have salary " + employee.getSalary());
return employee;
};
}
#Bean
#StepScope
public ItemWriter<Employee> writer() {
log.info("writer");
return employees -> {
for (Employee employee : employees) {
try {
employeeService.update(employee);
log.info(employee.getName() + " updated with salary " + employee.getSalary());
} catch (ValidationException e) {
e.printStackTrace();
}
}
};
}
#Bean
public MapJobRepositoryFactoryBean jobRepositoryFactoryBean(PlatformTransactionManager transactionManager) {
return new MapJobRepositoryFactoryBean(transactionManager);
}
#Bean
public JobRepository jobRepository(MapJobRepositoryFactoryBean jobRepositoryFactoryBean) throws Exception {
jobRepositoryFactoryBean.setTransactionManager(transactionManager);
return jobRepositoryFactoryBean.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
I will be glad of any help.
You need to make sure that your reader returns null at some point. This is how the step interprets that there is no more data to process and exits (which in turn will stop the surrounding job if there are no more steps to run).
That said, I see the input of your chunk-oriented step is a single id. For this use case, a simple tasklet is enough, no need for a chunk-oriented tasklet with a single input record and chunkSize=1.

spring batch job stuck in loop even when ItemReader returns null

I am converting xml to csv using spring batch and integration. I have a approach that job will start by reading files in from landing dir. while processing it will move files in inprocess dir. on error file will be moved to error dir. on successfully processing/writing file will be moved to output/completed.
After searching a while i got to know that there must be problem with itemreader but it is also returning null. I am not getting where is the problem.
Below is my batch configuration
#Bean
public Job commExportJob(Step parseStep) throws IOException {
return jobs.get("commExportJob")
.incrementer(new RunIdIncrementer())
.flow(parseStep)
.end()
.listener(listener())
.build();
}
#Bean
public Step streamStep() throws IOException {
CommReader itemReader = itemReader(null);
if (itemReader != null) {
return
steps.get("streamStep")
.<Object, Object>chunk(env.getProperty(Const.INPUT_READ_CHUNK_SIZE, Integer.class))
.reader(itemReader)
.processor(itemProcessor())
.writer(itemWriter(null))
.listener(getChunkListener())
.build();
}
return null;
}
#Bean
#StepScope
public ItemWriter<Object> itemWriter(#Value("#{jobParameters['outputFilePath']}") String outputFilePath) {
log.info("CommBatchConfiguration.itemWriter() : " + outputFilePath);
CommItemWriter writer = new CommItemWriter();
return writer;
}
#Bean
#StepScope
public CommReader itemReader(#Value("#{jobParameters['inputFilePath']}") String inputFilePath) {
log.info("CommBatchConfiguration.itemReader() : " + inputFilePath);
CommReader reader = new CommReader(inputFilePath);
// reader.setEncoding(env.getProperty("char.encoding","UTF-8"));
return reader;
}
#Bean
#StepScope
public CommItemProcessor itemProcessor() {
log.info("CommBatchConfiguration.itemProcessor() : Entry");
return new CommItemProcessor(ruleService);
}
CommReader.java
File inputFile = null;
private String jobName;
public CommReader(String inputFilePath) {
inputFile = new File(inputFilePath);
}
#Value("#{stepExecution}")
private StepExecution stepExecution;
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
#Override
public Object read() throws IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder;
if (inputFile.exists()) {
try {
builder = factory.newDocumentBuilder();
log.info("CommReader.read() :" + inputFile.getAbsolutePath());
Document document = builder.parse(inputFile);
return document;
} catch (ParserConfigurationException | SAXException | TransformerFactoryConfigurationError e) {
log.error("Exception while reading ", e);
}
}
return null;
}
#Override
public void close() throws ItemStreamException {
}
#Override
public void open(ExecutionContext arg0) throws ItemStreamException {
}
#Override
public void update(ExecutionContext arg0) throws ItemStreamException {
}
#Override
public void setResource(Resource arg0) {
}
CommItemProcessor.java
#Autowired
CommExportService ruleService;
public CommItemProcessor(CommExportService ruleService) {
this.ruleService = ruleService;
}
#Override
public Object process(Object bean) throws Exception {
log.info("CommItemProcessor.process() : Item Processor : " + bean);
return bean;
}
CommItemWriter.java
FlatFileItemWriter<byte[]> delegate;
ExecutionContext execContext;
FileOutputStream fileWrite;
File stylesheet;
StreamSource stylesource;
Transformer transformer;
List<List<?>> itemsTotal = null;
int recordCount = 0;
#Autowired
FileUtil fileUtil;
#Value("${input.completed.dir}")
String completedDir;
#Value("${input.inprocess.dir}")
String inprocessDir;
public void update(ExecutionContext arg0) throws ItemStreamException {
this.delegate.update(arg0);
}
public void open(ExecutionContext arg0) throws ItemStreamException {
this.execContext = arg0;
this.delegate.open(arg0);
}
public void close() throws ItemStreamException {
this.delegate.close();
}
#Override
public void write(List<? extends Object> items) throws Exception {
log.info("CommItemWriter.write() : items.size() : " + items.size());
stylesheet = new File("./config/style.xsl");
stylesource = new StreamSource(stylesheet);
String fileName = fileUtil.getFileName();
try {
transformer = TransformerFactory.newInstance().newTransformer(stylesource);
} catch (TransformerConfigurationException | TransformerFactoryConfigurationError e) {
log.error("Exception while writing",e);
}
for (Object object : items) {
log.info("CommItemWriter.write() : Object : " + object.getClass().getName());
log.info("CommItemWriter.write() : FileName : " + fileName);
Source source = new DOMSource((Document) object);
Result outputTarget = new StreamResult(
new File(fileName));
transformer.transform(source, outputTarget);
}
}
In chunkListener there is nothing much i am doing.
Below is the job listener.
#Override
public void beforeJob(JobExecution jobExecution) {
log.info("JK: CommJobListener.beforeJob()");
}
#Override
public void afterJob(JobExecution jobExecution) {
log.info("JK: CommJobListener.afterJob()");
JobParameters jobParams = jobExecution.getJobParameters();
File inputFile = new File(jobParams.getString("inputFilePath"));
File outputFile = new File(jobParams.getString("outputFilePath"));
try {
if (jobExecution.getStatus().isUnsuccessful()) {
Files.move(inputFile.toPath(), Paths.get(inputErrorDir, inputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
Files.move(outputFile.toPath(), Paths.get(outputErrorDir, outputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
} else {
String inputFileName = inputFile.getName();
Files.move(inputFile.toPath(), Paths.get(inputCompletedDir, inputFileName),
StandardCopyOption.REPLACE_EXISTING);
Files.move(outputFile.toPath(), Paths.get(outputCompletedDir, outputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
}
} catch (IOException ioe) {
log.error("IOException occured ",ioe);
}
}
I am also using integration flow.
#Bean
public IntegrationFlow messagesFlow(JobLauncher jobLauncher) {
try {
Map<String, Object> headers = new HashMap<>();
headers.put("jobName", "commExportJob");
return IntegrationFlows
.from(Files.inboundAdapter(new File(env.getProperty(Const.INPUT_LANDING_DIR)))
,
e -> e.poller(Pollers
.fixedDelay(env.getProperty(Const.INPUT_POLLER_DELAY, Integer.class).intValue())
.maxMessagesPerPoll(
env.getProperty(Const.INPUT_MAX_MESSAGES_PER_POLL, Integer.class).intValue())
.taskExecutor(getFileProcessExecutor())))
.handle("moveFile","moveFile")
.enrichHeaders(headers)
.transform(jobTransformer)
.handle(jobLaunchingGw(jobLauncher))
.channel("nullChannel").get();
} catch (Exception e) {
log.error("Exception in Integration flow",e);
}
return null;
}
#Autowired
private Environment env;
#Bean
public MessageHandler jobLaunchingGw(JobLauncher jobLauncher) {
return new JobLaunchingGateway(jobLauncher);
}
#MessagingGateway
public interface IMoveFile {
#Gateway(requestChannel = "moveFileChannel")
Message<File> moveFile(Message<File> inputFileMessage);
}
#Bean(name = "fileProcessExecutor")
public Executor getFileProcessExecutor() {
ThreadPoolTaskExecutor fileProcessExecutor = new ThreadPoolTaskExecutor();
fileProcessExecutor.setCorePoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_POOLSIZE, Integer.class));
fileProcessExecutor.setMaxPoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_MAXPOOLSIZE, Integer.class));
fileProcessExecutor.setQueueCapacity(env.getRequiredProperty(Const.INPUT_EXECUTOR_QUEUECAPACITY, Integer.class));
fileProcessExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
fileProcessExecutor.initialize();
return fileProcessExecutor;
}
Try to return document itself instead of Object in read method of CommReader.java and then check whether its coming null or not in the writer to stop it.

Spring Cloud App Starter, sftp source, recurse a directory for files

I am using SFTP Source in Spring cloud dataflow and it is working for getting files define in sftp:remote-dir:/home/someone/source , Now I have a many subfolders under the remote-dir and I want to recursively get all the files under this directory which match the patten. I am trying to use filename-regex: but so far it only works on one level. How do I recursively get the files I need.
The inbound channel adapter does not support recursion; use a custom source with the outbound gateway with an MGET command, with recursion (-R).
The doc is missing that option; fixed in the current docs.
I opened an issue to create a standard app starter.
EDIT
With the Java DSL...
#SpringBootApplication
#EnableBinding(Source.class)
public class So44710754Application {
public static void main(String[] args) {
SpringApplication.run(So44710754Application.class, args);
}
// should store in Redis or similar for persistence
private final ConcurrentMap<String, Boolean> processed = new ConcurrentHashMap<>();
#Bean
public IntegrationFlow flow() {
return IntegrationFlows.from(source(), e -> e.poller(Pollers.fixedDelay(30_000)))
.handle(gateway())
.split()
.<File>filter(p -> this.processed.putIfAbsent(p.getAbsolutePath(), true) == null)
.transform(Transformers.fileToByteArray())
.channel(Source.OUTPUT)
.get();
}
private MessageSource<String> source() {
return () -> new GenericMessage<>("foo/*");
}
private AbstractRemoteFileOutboundGateway<LsEntry> gateway() {
AbstractRemoteFileOutboundGateway<LsEntry> gateway = Sftp.outboundGateway(sessionFactory(), "mget", "payload")
.localDirectory(new File("/tmp/foo"))
.options(Option.RECURSIVE)
.get();
gateway.setFileExistsMode(FileExistsMode.IGNORE);
return gateway;
}
private SessionFactory<LsEntry> sessionFactory() {
DefaultSftpSessionFactory sf = new DefaultSftpSessionFactory();
sf.setHost("10.0.0.3");
sf.setUser("ftptest");
sf.setPassword("ftptest");
sf.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(sf);
}
}
And with Java config...
#SpringBootApplication
#EnableBinding(Source.class)
public class So44710754Application {
public static void main(String[] args) {
SpringApplication.run(So44710754Application.class, args);
}
#InboundChannelAdapter(channel = "sftpGate", poller = #Poller(fixedDelay = "30000"))
public String remoteDir() {
return "foo/*";
}
#Bean
#ServiceActivator(inputChannel = "sftpGate")
public SftpOutboundGateway mgetGate() {
SftpOutboundGateway sftpOutboundGateway = new SftpOutboundGateway(sessionFactory(), "mget", "payload");
sftpOutboundGateway.setOutputChannelName("splitterChannel");
sftpOutboundGateway.setFileExistsMode(FileExistsMode.IGNORE);
sftpOutboundGateway.setLocalDirectory(new File("/tmp/foo"));
sftpOutboundGateway.setOptions("-R");
return sftpOutboundGateway;
}
#Bean
#Splitter(inputChannel = "splitterChannel")
public DefaultMessageSplitter splitter() {
DefaultMessageSplitter splitter = new DefaultMessageSplitter();
splitter.setOutputChannelName("filterChannel");
return splitter;
}
// should store in Redis, Zookeeper, or similar for persistence
private final ConcurrentMap<String, Boolean> processed = new ConcurrentHashMap<>();
#Filter(inputChannel = "filterChannel", outputChannel = "toBytesChannel")
public boolean filter(File payload) {
return this.processed.putIfAbsent(payload.getAbsolutePath(), true) == null;
}
#Bean
#Transformer(inputChannel = "toBytesChannel", outputChannel = Source.OUTPUT)
public FileToByteArrayTransformer toBytes() {
FileToByteArrayTransformer transformer = new FileToByteArrayTransformer();
return transformer;
}
private SessionFactory<LsEntry> sessionFactory() {
DefaultSftpSessionFactory sf = new DefaultSftpSessionFactory();
sf.setHost("10.0.0.3");
sf.setUser("ftptest");
sf.setPassword("ftptest");
sf.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(sf);
}
}

Spring 4.1 to 4.2 migrattion : Why the persistence does not work?

I used Spring 4.1.0 with Hibernate 4.3.6 and all is Ok.
After Spring migration to 4.2.8, the persistence does not work.
No exception, no trace the persist methode of entity manager is called but nothing in the database.
It's like if the transaction manager was not working.
this is my persistence configuration :
#Configuration
#EnableTransactionManagement
public class PersistenceConfiguration {
#Bean
public BasicDataSource driverManagerDataSource() {
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUrl("jdbc:mysql://localhost:3306/xxx");
dataSource.setUsername("root");
dataSource.setPassword("root");
dataSource.setValidationQuery("SELECT 1");
dataSource.setDefaultAutoCommit(false);
dataSource.setInitialSize(10);
dataSource.setMaxActive(20);
dataSource.setMaxIdle(10);
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean() {
final LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setDataSource(driverManagerDataSource());
localContainerEntityManagerFactoryBean.setPersistenceUnitName("xxxPersistenceUnitName");
localContainerEntityManagerFactoryBean.setPackagesToScan("org.xxx.model");
localContainerEntityManagerFactoryBean.setJpaVendorAdapter(new org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter());
final HashMap<String, String> map = new HashMap<>();
map.put("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
map.put("hibernate.hbm2ddl.auto", "update");
map.put("hibernate.show_sql", "false");
map.put("hibernate.format_sql", "false");
localContainerEntityManagerFactoryBean.setJpaPropertyMap(map);
localContainerEntityManagerFactoryBean.setJpaDialect(new org.springframework.orm.jpa.vendor.HibernateJpaDialect());
return localContainerEntityManagerFactoryBean;
}
#Bean
public JpaTransactionManager transactionManager() {
final JpaTransactionManager jpaTransactionManager = new JpaTransactionManager();
jpaTransactionManager.setEntityManagerFactory(localContainerEntityManagerFactoryBean().getNativeEntityManagerFactory());
return jpaTransactionManager;
}
Dependence injection :
#Configuration
#Import({PersistenceConfiguration.class, UserConfiguration.class, SecurityConfiguration.class})
#ComponentScan(basePackages = "org.xxx")
#EnableWebMvc
public class XxxProjectConfiguration {
private static Logger LOG = Logger.getLogger(XxxProjectConfiguration.class);
#Autowired
private Environment env;
#PostConstruct
public void initApp() {
LOG.debug("Looking for Spring profiles...");
if (env.getActiveProfiles().length == 0) {
LOG.info("No Spring profile configured, running with default configuration.");
} else {
for (String profile : env.getActiveProfiles()) {
LOG.info("Detected Spring profile: {}" + profile);
}
}
}
#Autowired
private UserConfiguration userConfiguration;
// DAO
#Bean
public RelationshipDAO relationshipDAO() {
return new RelationshipDAOImpl();
}
#Bean
public RelationshipStatusDAO relationshipStatusDAO() {
return new RelationshipStatusDAOImpl();
}
#Bean
public MessageDAO messageDAO() {
return new MessageDAOImpl();
}
// Services
#Bean
public UserServiceImpl userService() {
return new UserServiceImpl(userConfiguration.userDAO(), relationshipDAO(), relationshipStatusDAO(), messageDAO());
}
}
And
#Configuration
#Import(PersistenceConfiguration.class)
public class UserConfiguration {
#Bean
public UserDAO userDAO() {
return new UserDAOImpl();
}
}
The service :
#Transactional(propagation=Propagation.SUPPORTS)
public class UserServiceImpl implements UserService, Serializable {
private static final long serialVersionUID = 1L;
private UserDAO userDAO;
private RelationshipDAO relationshipDAO;
private RelationshipStatusDAO relationshipStatusDAO;
private MessageDAO messageDAO;
public UserServiceImpl(final UserDAO userDAO, final RelationshipDAO relationshipDAO, final RelationshipStatusDAO relationshipStatusDAO, final MessageDAO messageDAO) {
this.userDAO = userDAO;
this.relationshipDAO = relationshipDAO;
this.relationshipStatusDAO = relationshipStatusDAO;
this.messageDAO = messageDAO;
}
#Override
#Transactional(propagation = Propagation.REQUIRED, rollbackFor = UserServiceException.class)
public RelationshipStatus wantsRelationship(final long fromUserId, final long toUserId) throws UserServiceException {
try {
final Relationship relationship = new Relationship(new Date());
User fromUser = userDAO.get(fromUserId);
User toUser = new User(toUserId);
relationship.getUsers().add(fromUser);
fromUser.getRelationships().add(relationship);
relationship.getUsers().add(toUser);
toUser.getRelationships().add(relationship);
relationship.setWantsFromUserId(fromUserId);
final Message message = new Message(fromUserId, "Hi ! My name is " + fromUser.getFirstName() + ", I want to meet you");
relationship.getMessages().add(message);
relationship.setStatus(new RelationshipStatus(Status.WANTS));
relationshipDAO.persist(relationship);
return relationship.getStatus();
} catch (Exception e) {
throw new UserServiceException(e);
}
}
...
}
I do not understand anything...
The missing code is :
#Bean
public PlatformTransactionManager transactionManager(EntityManagerFactory emf){
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}

Spring FTP Integration should look for modified file with same name

I am using FileSystemPersistentAcceptOnceFileListFilter and PropertiesPersistingMetadataStore to check if there is any new file (or) any file with same name (or) same file got modified then the payload should invoke the file. But it is not listening for the existing file got modified/modified with time.
Please suggest any good solution for resolving this issue
#Bean
#InboundChannelAdapter(value = "inputChannel", poller = #Poller(fixedDelay = "1000", maxMessagesPerPoll = "1"))
public MessageSource<File> receive() throws Exception {
FtpInboundFileSynchronizingMessageSource messageSource = new FtpInboundFileSynchronizingMessageSource(
synchronizer());
Logger.info(messageSource, "receive ");
messageSource.setLocalDirectory(Temp);
messageSource.setAutoCreateLocalDirectory(true);
messageSource
.setLocalFilter(fileSystemPersistentAcceptOnceFileListFilter());
return messageSource;
}
#Bean
public FileListFilter<FTPFile> compositeFilter() throws Exception {
Pattern pattern = Pattern.compile(".*\\.xml$");
CompositeFileListFilter<FTPFile> compositeFileListFilter = new CompositeFileListFilter<FTPFile>();
FileListFilter<FTPFile> fileListFilter = new FtpRegexPatternFileListFilter(
pattern);
compositeFileListFilter.addFilter(fileListFilter);
compositeFileListFilter.addFilter(getAcceptOnceFileFilter());
Logger.info(compositeFileListFilter.getClass().getName(), " compositeFilter ");
return compositeFileListFilter;
}
#Bean
public FileListFilter<FTPFile> getAcceptOnceFileFilter() {
FileListFilter<FTPFile> ftpPersistentAcceptOnceFileListFilter = null;
try {
ftpPersistentAcceptOnceFileListFilter = new FtpPersistentAcceptOnceFileListFilter(
getMetadataStore(), "######");
} catch (Exception e) {
e.printStackTrace();
}
Logger.info(ftpPersistentAcceptOnceFileListFilter.getClass().getName(), " getAcceptOnceFileFilter ");
return ftpPersistentAcceptOnceFileListFilter;
}
#Bean
public PropertiesPersistingMetadataStore getMetadataStore()
throws Exception {
PropertiesPersistingMetadataStore metadataStore = new PropertiesPersistingMetadataStore();
metadataStore.setBaseDirectory("temp");
metadataStore.afterPropertiesSet();
Logger.info(metadataStore.getClass().getName(), " metadataStore ");
return metadataStore;
}
#Bean
public AbstractInboundFileSynchronizer<FTPFile> synchronizer()
throws Exception {
AbstractInboundFileSynchronizer<FTPFile> fileSynchronizer = new FtpInboundFileSynchronizer(
sessionFactory());
fileSynchronizer.setRemoteDirectory("/RemoteFile/");
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setFilter(compositeFilter());
Logger.info(fileSynchronizer.getClass().getName(), " fileSynchronizer ");
return fileSynchronizer;
}
#Bean
public FileSystemPersistentAcceptOnceFileListFilter fileSystemPersistentAcceptOnceFileListFilter() {
ConcurrentMetadataStore metaDataStore;
FileSystemPersistentAcceptOnceFileListFilter fileSystemPersistentFilter = null;
try {
metaDataStore = getMetadataStore();
fileSystemPersistentFilter = new FileSystemPersistentAcceptOnceFileListFilter(
metaDataStore, "######");
fileSystemPersistentFilter.setFlushOnUpdate(true);
return fileSystemPersistentFilter;
} catch (Exception e) {
e.printStackTrace();
}
Logger.info(fileSystemPersistentFilter.getClass().getName(), " fileSystemPersistentFilter ");
return fileSystemPersistentFilter;
}
#Bean(name = "sessionFactory")
public SessionFactory<FTPFile> sessionFactory() throws SocketException, IOException {
DefaultFtpSessionFactory ftp = new DefaultFtpSessionFactory();
ftp.setHost(hostName);
ftp.setUsername(username);
ftp.setPassword(passWord);
ftp.setBufferSize(1000);
return ftp;
}
#Bean(name = "inputChannel")
public PollableChannel inputChannel() {
QueueChannel channel = new QueueChannel();
return channel;
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(10));
return pollerMetadata;
}
#ServiceActivator(inputChannel = "inputChannel")
public void foo(String payload) {
System.out.println("payload: " + payload);
}
}
The FileSystemPersistentAcceptOnceFileListFilter is for local files, after the transfer already. To meet your requirements there is similar FtpPersistentAcceptOnceFileListFilter for remote entries.

Resources