Spring FTP Integration should look for modified file with same name - spring-integration

I am using FileSystemPersistentAcceptOnceFileListFilter and PropertiesPersistingMetadataStore to check if there is any new file (or) any file with same name (or) same file got modified then the payload should invoke the file. But it is not listening for the existing file got modified/modified with time.
Please suggest any good solution for resolving this issue
#Bean
#InboundChannelAdapter(value = "inputChannel", poller = #Poller(fixedDelay = "1000", maxMessagesPerPoll = "1"))
public MessageSource<File> receive() throws Exception {
FtpInboundFileSynchronizingMessageSource messageSource = new FtpInboundFileSynchronizingMessageSource(
synchronizer());
Logger.info(messageSource, "receive ");
messageSource.setLocalDirectory(Temp);
messageSource.setAutoCreateLocalDirectory(true);
messageSource
.setLocalFilter(fileSystemPersistentAcceptOnceFileListFilter());
return messageSource;
}
#Bean
public FileListFilter<FTPFile> compositeFilter() throws Exception {
Pattern pattern = Pattern.compile(".*\\.xml$");
CompositeFileListFilter<FTPFile> compositeFileListFilter = new CompositeFileListFilter<FTPFile>();
FileListFilter<FTPFile> fileListFilter = new FtpRegexPatternFileListFilter(
pattern);
compositeFileListFilter.addFilter(fileListFilter);
compositeFileListFilter.addFilter(getAcceptOnceFileFilter());
Logger.info(compositeFileListFilter.getClass().getName(), " compositeFilter ");
return compositeFileListFilter;
}
#Bean
public FileListFilter<FTPFile> getAcceptOnceFileFilter() {
FileListFilter<FTPFile> ftpPersistentAcceptOnceFileListFilter = null;
try {
ftpPersistentAcceptOnceFileListFilter = new FtpPersistentAcceptOnceFileListFilter(
getMetadataStore(), "######");
} catch (Exception e) {
e.printStackTrace();
}
Logger.info(ftpPersistentAcceptOnceFileListFilter.getClass().getName(), " getAcceptOnceFileFilter ");
return ftpPersistentAcceptOnceFileListFilter;
}
#Bean
public PropertiesPersistingMetadataStore getMetadataStore()
throws Exception {
PropertiesPersistingMetadataStore metadataStore = new PropertiesPersistingMetadataStore();
metadataStore.setBaseDirectory("temp");
metadataStore.afterPropertiesSet();
Logger.info(metadataStore.getClass().getName(), " metadataStore ");
return metadataStore;
}
#Bean
public AbstractInboundFileSynchronizer<FTPFile> synchronizer()
throws Exception {
AbstractInboundFileSynchronizer<FTPFile> fileSynchronizer = new FtpInboundFileSynchronizer(
sessionFactory());
fileSynchronizer.setRemoteDirectory("/RemoteFile/");
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setFilter(compositeFilter());
Logger.info(fileSynchronizer.getClass().getName(), " fileSynchronizer ");
return fileSynchronizer;
}
#Bean
public FileSystemPersistentAcceptOnceFileListFilter fileSystemPersistentAcceptOnceFileListFilter() {
ConcurrentMetadataStore metaDataStore;
FileSystemPersistentAcceptOnceFileListFilter fileSystemPersistentFilter = null;
try {
metaDataStore = getMetadataStore();
fileSystemPersistentFilter = new FileSystemPersistentAcceptOnceFileListFilter(
metaDataStore, "######");
fileSystemPersistentFilter.setFlushOnUpdate(true);
return fileSystemPersistentFilter;
} catch (Exception e) {
e.printStackTrace();
}
Logger.info(fileSystemPersistentFilter.getClass().getName(), " fileSystemPersistentFilter ");
return fileSystemPersistentFilter;
}
#Bean(name = "sessionFactory")
public SessionFactory<FTPFile> sessionFactory() throws SocketException, IOException {
DefaultFtpSessionFactory ftp = new DefaultFtpSessionFactory();
ftp.setHost(hostName);
ftp.setUsername(username);
ftp.setPassword(passWord);
ftp.setBufferSize(1000);
return ftp;
}
#Bean(name = "inputChannel")
public PollableChannel inputChannel() {
QueueChannel channel = new QueueChannel();
return channel;
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(10));
return pollerMetadata;
}
#ServiceActivator(inputChannel = "inputChannel")
public void foo(String payload) {
System.out.println("payload: " + payload);
}
}

The FileSystemPersistentAcceptOnceFileListFilter is for local files, after the transfer already. To meet your requirements there is similar FtpPersistentAcceptOnceFileListFilter for remote entries.

Related

same file not picked up by multiple hosts

I have an app hosted multiple hosts listening to single remote SFTP location. How should i make sure same file is not picked by an host which is already picked up by other? I am pretty new to spring integration. Appreciate someone can share examples
}
EDIT:
Here is my integration flow getting file from sftp and placing in local directory and performing business logic in transformer and returning file and send it to remote sftp.
#Bean
public SessionFactory<ChannelSftp.LsEntry> sftpSessionFactory() {
LOGGER.debug(" Creating SFTP Session Factory -Start");
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(sftpHost);
factory.setUser(sftpUser);
factory.setPort(port);
factory.setPassword(sftpPassword);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(factory);
}
#Bean
public SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(true);
fileSynchronizer.setRemoteDirectory(sftpInboundDirectory);
fileSynchronizer.setFilter(new SftpPersistentAcceptOnceFileListFilter(store(), "*.json"));
return fileSynchronizer;
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(5000));
return pollerMetadata;
}
#Bean
#InboundChannelAdapter(channel = "fileInputChannel", poller = #Poller(fixedDelay = "5000"))
public MessageSource<File> sftpMessageSource() {
SftpInboundFileSynchronizingMessageSource source =
new SftpInboundFileSynchronizingMessageSource(sftpInboundFileSynchronizer());
source.setLocalDirectory(localDirectory);
source.setAutoCreateLocalDirectory(true);
source.setLocalFilter(new AcceptOnceFileListFilter<File>());
source.setMaxFetchSize(1);
return source;
}
#Bean
IntegrationFlow integrationFlow() {
return IntegrationFlows.from(this.sftpMessageSource()).channel(fileInputChannel()).
transform(this::messageTransformer).channel(fileOutputChannel()).
handle(orderOutMessageHandler()).get();
}
#Bean
#ServiceActivator(inputChannel = "fileOutputChannel")
public SftpMessageHandler orderOutMessageHandler() {
SftpMessageHandler handler = new SftpMessageHandler(sftpSessionFactory());
LOGGER.debug(" Creating SFTP MessageHandler - Start ");
handler.setRemoteDirectoryExpression(new LiteralExpression(sftpOutboundDirectory));
handler.setFileNameGenerator(new FileNameGenerator() {
#Override
public String generateFileName(Message<?> message) {
if (message.getPayload() instanceof File) {
return ((File) message.getPayload()).getName();
} else {
throw new IllegalArgumentException("Expected Input is File.");
}
}
});
LOGGER.debug(" Creating SFTP MessageHandler - End ");
return handler;
}
#Bean
#org.springframework.integration.annotation.Transformer(inputChannel = "fileInputChannel", outputChannel = "fileOutputChannel")
public Transformer messageTransformer() {
return message -> {
File file=orderTransformer.transformInboundMessage(message);
return (Message<?>) file;
};
}
#Bean
public ConcurrentMetadataStore store() {
return new SimpleMetadataStore(hazelcastInstance().getMap("idempotentReceiverMetadataStore"));
}
#Bean
public HazelcastInstance hazelcastInstance() {
return Hazelcast.newHazelcastInstance(new Config().setProperty("hazelcast.logging.type", "slf4j"));
See SftpPersistentAcceptOnceFileListFilter to be injected into the SFTP Inbound Channel Adapter. This one has to be supplied with a MetadataStore based on the shared database.
See more info in the docs:
https://docs.spring.io/spring-integration/docs/current/reference/html/system-management.html#metadata-store
https://docs.spring.io/spring-integration/docs/current/reference/html/sftp.html#sftp-inbound

Spring Integration + Spring Batch: the job doesn`t stop

I want to read file from ftp server, then save it into local repository and delete from server, run the job that read file, find one record on DB, change one parameter and save it.
What is going wrong: job doesn't finish; increments salary and saves employee many times.
Spring Integration configuration:
#Bean
public FtpInboundFileSynchronizer ftpInboundFileSynchronizer(DefaultFtpSessionFactory sessionFactory) {
FtpInboundFileSynchronizer fileSynchronizer = new FtpInboundFileSynchronizer(sessionFactory);
fileSynchronizer.setRemoteDirectory(remoteDirectory);
fileSynchronizer.setDeleteRemoteFiles(true);
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(value = "fileInputChannel", poller = #Poller(cron = "*/5 * * * * ?"))
public FtpInboundFileSynchronizingMessageSource ftpInboundFileSynchronizingMessageSource(FtpInboundFileSynchronizer fileSynchronizer) throws Exception {
FtpInboundFileSynchronizingMessageSource messageSource = new FtpInboundFileSynchronizingMessageSource(fileSynchronizer);
messageSource.setAutoCreateLocalDirectory(true);
messageSource.setLocalDirectory(new File(localDirectory));
messageSource.setLocalFilter(new AcceptOnceFileListFilter<>());
return messageSource;
}
#Bean
#ServiceActivator(inputChannel = "fileInputChannel")
public FileWritingMessageHandler fileWritingMessageHandler() {
FileWritingMessageHandler messageHandler = new FileWritingMessageHandler(new File(localDirectory));
messageHandler.setOutputChannelName("jobLaunchRequestChannel");
return messageHandler;
}
#ServiceActivator(inputChannel = "jobLaunchRequestChannel", outputChannel = "jobLaunchingGatewayChannel")
public JobLaunchRequest jobLaunchRequest(File file) throws IOException {
String[] content = FileUtils.readFileToString(file, "UTF-8").split("\\s+");
JobParameters jobParameters = new JobParametersBuilder()
.addString("filename", file.getAbsolutePath())
.addString("id", content[0]).addString("salary", content[1])
// .addLong("time", System.currentTimeMillis())
.toJobParameters();
return new JobLaunchRequest(increaseSalaryJob, jobParameters);
}
#Bean
#ServiceActivator(inputChannel = "jobLaunchingGatewayChannel")
public JobLaunchingGateway jobLaunchingGateway(SimpleJobLauncher jobLauncher) {
JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(jobLauncher);
jobLaunchingGateway.setOutputChannelName("finish");
return jobLaunchingGateway;
}
#ServiceActivator(inputChannel = "finish")
public void finish() {
System.out.println("FINISH");
}
}
Spring Batch configuration:
#Bean
public Job increaseSalaryJob(CustomJobListener listener, Step step1) {
return jobBuilderFactory.get("increaseSalaryJob")
.preventRestart()
.listener(listener)
.start(step1)
.build();
}
#Bean
public Step step1(ItemReader<Employee> reader) {
return stepBuilderFactory.get("step1")
.transactionManager(transactionManager)
.<Employee, Employee> chunk(1)
.reader(reader)
.processor(processor())
.writer(writer())
.build();
}
#Bean
#StepScope
public ItemReader<Employee> reader(#Value("#{jobParameters[id]}") Integer id) {
log.info("reader");
return () -> employeeService.get(id);
}
#Bean
#StepScope
public ItemProcessor<Employee, Employee> processor() {
log.info("processor");
return employee -> {
log.info(employee.getName() + " had salary " + employee.getSalary());
Integer salary = employee.getSalary() + 1;
employee.setSalary(salary);
log.info(employee.getName() + " have salary " + employee.getSalary());
return employee;
};
}
#Bean
#StepScope
public ItemWriter<Employee> writer() {
log.info("writer");
return employees -> {
for (Employee employee : employees) {
try {
employeeService.update(employee);
log.info(employee.getName() + " updated with salary " + employee.getSalary());
} catch (ValidationException e) {
e.printStackTrace();
}
}
};
}
#Bean
public MapJobRepositoryFactoryBean jobRepositoryFactoryBean(PlatformTransactionManager transactionManager) {
return new MapJobRepositoryFactoryBean(transactionManager);
}
#Bean
public JobRepository jobRepository(MapJobRepositoryFactoryBean jobRepositoryFactoryBean) throws Exception {
jobRepositoryFactoryBean.setTransactionManager(transactionManager);
return jobRepositoryFactoryBean.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
I will be glad of any help.
You need to make sure that your reader returns null at some point. This is how the step interprets that there is no more data to process and exits (which in turn will stop the surrounding job if there are no more steps to run).
That said, I see the input of your chunk-oriented step is a single id. For this use case, a simple tasklet is enough, no need for a chunk-oriented tasklet with a single input record and chunkSize=1.

SFTP #Poller not triggering polling nothing happens

I am trying to set the spring boot application which will pool the csv . i do not see any activity happning in the spring boot application nor on filezilla SFTP server but if I change the same code to FTP then it works
#Component
#EnableIntegration
public class IntegrationConfiguration {
#Autowired
FTPConfigProperties ftpConfigProperties;
#Autowired
private BeanFactory beanFactory;
#Value("classpath:certificate.crt")
Resource certficateFile;
#Bean
public SessionFactory<ChannelSftp.LsEntry> ftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory();
factory.setHost("127.0.0.1");
factory.setPort(990);
factory.setUser("abhinav");
factory.setPassword("nssdw");
factory.setPrivateKey(certficateFile);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<ChannelSftp.LsEntry>(factory, 100000);
}
#Bean
public SftpInboundFileSynchronizer ftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(ftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setRemoteDirectory("/");
fileSynchronizer.setFilter(filter());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setPreserveTimestamp(true);
fileSynchronizer.setBeanFactory(beanFactory);
return fileSynchronizer;
}
//here the poller is configured
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(fixedDelay = "10000"))
public MessageSource<File> ftpMessageSource() throws Exception {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
ftpInboundFileSynchronizer());
source.setLocalDirectory(new File("ftp-inbound"));
source.setAutoCreateLocalDirectory(true);
source.setMaxFetchSize(1);
source.setBeanFactory(beanFactory);
source.setUseWatchService(true);
return source;
}
public CompositeFileListFilter<ChannelSftp.LsEntry> filter() {
CompositeFileListFilter<ChannelSftp.LsEntry> filter = new CompositeFileListFilter<ChannelSftp.LsEntry>();
filter.addFilter(new SftpSimplePatternFileListFilter("*.csv"));
filter.addFilter(acceptOnceFilter());
filter.addFilter(new LastModifiedLsEntryFileListFilter());
return filter;
}
#Bean
public SftpPersistentAcceptOnceFileListFilter acceptOnceFilter() {
SftpPersistentAcceptOnceFileListFilter filter = new SftpPersistentAcceptOnceFileListFilter(metadataStore(),"ftpPersistentAcceptOnce");
filter.setFlushOnUpdate(true);
return filter;
}
#Bean
public ConcurrentMetadataStore metadataStore() {
PropertiesPersistingMetadataStore propertiesPersistingMetadataStore = new PropertiesPersistingMetadataStore();
propertiesPersistingMetadataStore.setBaseDirectory("./metastore");
propertiesPersistingMetadataStore.setFileName("ftpStream.properties");
return propertiesPersistingMetadataStore;
}
#Bean
#ServiceActivator(inputChannel = "jobChannel", outputChannel = "nullChannel")
protected JobLaunchingMessageHandler launcher(JobLauncher jobLauncher) {
return new JobLaunchingMessageHandler(jobLauncher);
}
}
here the next call where I trigger the spring batch then it goes to service actuator
#Component
public class FileToJobTransformer implements ApplicationContextAware {
private ApplicationContext context;
#Autowired
private Job job;
#Transformer(inputChannel = "fromSftpChannel", outputChannel = "jobChannel")
public JobLaunchRequest transform(File aFile) throws Exception {
String fileName = aFile.getName();
JobParameters jobParameters = new JobParametersBuilder().addString(
"input.file", aFile.getAbsolutePath()).toJobParameters();
JobLaunchRequest request = new JobLaunchRequest(job, jobParameters);
return request;
}
#Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.context = applicationContext;
}
}
the custome code is as follow
public class LastModifiedLsEntryFileListFilter implements FileListFilter<ChannelSftp.LsEntry> {
private static final long DEFAULT_AGE = 60;
private volatile long age = DEFAULT_AGE;
public long getAge() {
return this.age;
}
public void setAge(long age) {
setAge(age, TimeUnit.SECONDS);
}
public void setAge(long age, TimeUnit unit) {
this.age = unit.toSeconds(age);
}
#Override
public List<ChannelSftp.LsEntry> filterFiles(ChannelSftp.LsEntry[] files) {
System.out.println("files = [" + files.length + "]");
List<ChannelSftp.LsEntry> list = new ArrayList<ChannelSftp.LsEntry>();
long now = System.currentTimeMillis() / 1000;
for (ChannelSftp.LsEntry file : files) {
if (file.getAttrs()
.isDir()) {
continue;
}
int lastModifiedTime = file.getAttrs()
.getMTime();
if (lastModifiedTime + this.age <= now) {
list.add(file);
}
}
Collections.reverse(list);
ArrayList<ChannelSftp.LsEntry> oneElementList = new ArrayList<ChannelSftp.LsEntry>(1) ;
oneElementList.add(list.get(0));
return oneElementList;
}
}

spring batch job stuck in loop even when ItemReader returns null

I am converting xml to csv using spring batch and integration. I have a approach that job will start by reading files in from landing dir. while processing it will move files in inprocess dir. on error file will be moved to error dir. on successfully processing/writing file will be moved to output/completed.
After searching a while i got to know that there must be problem with itemreader but it is also returning null. I am not getting where is the problem.
Below is my batch configuration
#Bean
public Job commExportJob(Step parseStep) throws IOException {
return jobs.get("commExportJob")
.incrementer(new RunIdIncrementer())
.flow(parseStep)
.end()
.listener(listener())
.build();
}
#Bean
public Step streamStep() throws IOException {
CommReader itemReader = itemReader(null);
if (itemReader != null) {
return
steps.get("streamStep")
.<Object, Object>chunk(env.getProperty(Const.INPUT_READ_CHUNK_SIZE, Integer.class))
.reader(itemReader)
.processor(itemProcessor())
.writer(itemWriter(null))
.listener(getChunkListener())
.build();
}
return null;
}
#Bean
#StepScope
public ItemWriter<Object> itemWriter(#Value("#{jobParameters['outputFilePath']}") String outputFilePath) {
log.info("CommBatchConfiguration.itemWriter() : " + outputFilePath);
CommItemWriter writer = new CommItemWriter();
return writer;
}
#Bean
#StepScope
public CommReader itemReader(#Value("#{jobParameters['inputFilePath']}") String inputFilePath) {
log.info("CommBatchConfiguration.itemReader() : " + inputFilePath);
CommReader reader = new CommReader(inputFilePath);
// reader.setEncoding(env.getProperty("char.encoding","UTF-8"));
return reader;
}
#Bean
#StepScope
public CommItemProcessor itemProcessor() {
log.info("CommBatchConfiguration.itemProcessor() : Entry");
return new CommItemProcessor(ruleService);
}
CommReader.java
File inputFile = null;
private String jobName;
public CommReader(String inputFilePath) {
inputFile = new File(inputFilePath);
}
#Value("#{stepExecution}")
private StepExecution stepExecution;
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
#Override
public Object read() throws IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder;
if (inputFile.exists()) {
try {
builder = factory.newDocumentBuilder();
log.info("CommReader.read() :" + inputFile.getAbsolutePath());
Document document = builder.parse(inputFile);
return document;
} catch (ParserConfigurationException | SAXException | TransformerFactoryConfigurationError e) {
log.error("Exception while reading ", e);
}
}
return null;
}
#Override
public void close() throws ItemStreamException {
}
#Override
public void open(ExecutionContext arg0) throws ItemStreamException {
}
#Override
public void update(ExecutionContext arg0) throws ItemStreamException {
}
#Override
public void setResource(Resource arg0) {
}
CommItemProcessor.java
#Autowired
CommExportService ruleService;
public CommItemProcessor(CommExportService ruleService) {
this.ruleService = ruleService;
}
#Override
public Object process(Object bean) throws Exception {
log.info("CommItemProcessor.process() : Item Processor : " + bean);
return bean;
}
CommItemWriter.java
FlatFileItemWriter<byte[]> delegate;
ExecutionContext execContext;
FileOutputStream fileWrite;
File stylesheet;
StreamSource stylesource;
Transformer transformer;
List<List<?>> itemsTotal = null;
int recordCount = 0;
#Autowired
FileUtil fileUtil;
#Value("${input.completed.dir}")
String completedDir;
#Value("${input.inprocess.dir}")
String inprocessDir;
public void update(ExecutionContext arg0) throws ItemStreamException {
this.delegate.update(arg0);
}
public void open(ExecutionContext arg0) throws ItemStreamException {
this.execContext = arg0;
this.delegate.open(arg0);
}
public void close() throws ItemStreamException {
this.delegate.close();
}
#Override
public void write(List<? extends Object> items) throws Exception {
log.info("CommItemWriter.write() : items.size() : " + items.size());
stylesheet = new File("./config/style.xsl");
stylesource = new StreamSource(stylesheet);
String fileName = fileUtil.getFileName();
try {
transformer = TransformerFactory.newInstance().newTransformer(stylesource);
} catch (TransformerConfigurationException | TransformerFactoryConfigurationError e) {
log.error("Exception while writing",e);
}
for (Object object : items) {
log.info("CommItemWriter.write() : Object : " + object.getClass().getName());
log.info("CommItemWriter.write() : FileName : " + fileName);
Source source = new DOMSource((Document) object);
Result outputTarget = new StreamResult(
new File(fileName));
transformer.transform(source, outputTarget);
}
}
In chunkListener there is nothing much i am doing.
Below is the job listener.
#Override
public void beforeJob(JobExecution jobExecution) {
log.info("JK: CommJobListener.beforeJob()");
}
#Override
public void afterJob(JobExecution jobExecution) {
log.info("JK: CommJobListener.afterJob()");
JobParameters jobParams = jobExecution.getJobParameters();
File inputFile = new File(jobParams.getString("inputFilePath"));
File outputFile = new File(jobParams.getString("outputFilePath"));
try {
if (jobExecution.getStatus().isUnsuccessful()) {
Files.move(inputFile.toPath(), Paths.get(inputErrorDir, inputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
Files.move(outputFile.toPath(), Paths.get(outputErrorDir, outputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
} else {
String inputFileName = inputFile.getName();
Files.move(inputFile.toPath(), Paths.get(inputCompletedDir, inputFileName),
StandardCopyOption.REPLACE_EXISTING);
Files.move(outputFile.toPath(), Paths.get(outputCompletedDir, outputFile.getName()),
StandardCopyOption.REPLACE_EXISTING);
}
} catch (IOException ioe) {
log.error("IOException occured ",ioe);
}
}
I am also using integration flow.
#Bean
public IntegrationFlow messagesFlow(JobLauncher jobLauncher) {
try {
Map<String, Object> headers = new HashMap<>();
headers.put("jobName", "commExportJob");
return IntegrationFlows
.from(Files.inboundAdapter(new File(env.getProperty(Const.INPUT_LANDING_DIR)))
,
e -> e.poller(Pollers
.fixedDelay(env.getProperty(Const.INPUT_POLLER_DELAY, Integer.class).intValue())
.maxMessagesPerPoll(
env.getProperty(Const.INPUT_MAX_MESSAGES_PER_POLL, Integer.class).intValue())
.taskExecutor(getFileProcessExecutor())))
.handle("moveFile","moveFile")
.enrichHeaders(headers)
.transform(jobTransformer)
.handle(jobLaunchingGw(jobLauncher))
.channel("nullChannel").get();
} catch (Exception e) {
log.error("Exception in Integration flow",e);
}
return null;
}
#Autowired
private Environment env;
#Bean
public MessageHandler jobLaunchingGw(JobLauncher jobLauncher) {
return new JobLaunchingGateway(jobLauncher);
}
#MessagingGateway
public interface IMoveFile {
#Gateway(requestChannel = "moveFileChannel")
Message<File> moveFile(Message<File> inputFileMessage);
}
#Bean(name = "fileProcessExecutor")
public Executor getFileProcessExecutor() {
ThreadPoolTaskExecutor fileProcessExecutor = new ThreadPoolTaskExecutor();
fileProcessExecutor.setCorePoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_POOLSIZE, Integer.class));
fileProcessExecutor.setMaxPoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_MAXPOOLSIZE, Integer.class));
fileProcessExecutor.setQueueCapacity(env.getRequiredProperty(Const.INPUT_EXECUTOR_QUEUECAPACITY, Integer.class));
fileProcessExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
fileProcessExecutor.initialize();
return fileProcessExecutor;
}
Try to return document itself instead of Object in read method of CommReader.java and then check whether its coming null or not in the writer to stop it.

retrofit2.0.0+okhttp3 download logging output OOM

addInterceptor ->HttpLoggingInterceptor bug-->OOM
HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
//设置缓存路径
File httpCacheDirectory = new File(MyApplication.mContext.getCacheDir(), "responses");
//设置缓存 10M
Cache cache = new Cache(httpCacheDirectory, 10 * 1024 * 1024);
OkHttpClient client = null;
final TrustManager[] trustManager = new TrustManager[]{
new X509TrustManager() {
#Override
public void checkClientTrusted(java.security.cert.X509Certificate[] chain, String authType) throws java.security.cert.CertificateException {
}
#Override
public void checkServerTrusted(java.security.cert.X509Certificate[] chain, String authType) throws java.security.cert.CertificateException {
}
#Override
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return new java.security.cert.X509Certificate[0];
}
}
};
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, trustManager, new SecureRandom());
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
client = new OkHttpClient.Builder().addInterceptor(interceptor).sslSocketFactory(sslSocketFactory).addInterceptor(new BaseInterceptor()).hostnameVerifier(new HostnameVerifier() {
#Override
public boolean verify(String hostname, SSLSession session) {
return true;
}
}).cache(cache).build();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (KeyManagementException e) {
e.printStackTrace();
}
_instance = new Retrofit.Builder().baseUrl(ConstantUtils.HOST)
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create()).client(client).build();
}
return _instance.create(ICommonService.class);
RetrofitUtils.generateCommonService().down().subscribeOn(Schedulers.newThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Subscriber<ResponseBody>() {
#Override
public void onCompleted() {
}
#Override
public void onError(Throwable e) {
LogUtils.e("errorrrrrrrrrr");
}
#Override
public void onNext(ResponseBody responseBody) {
BufferedOutputStream bos = null;
FileOutputStream fos = null;
try {
byte[] by = responseBody.bytes();
File file = new File(BaseApplication.getContext().getFilesDir(), "download.apk");
fos = new FileOutputStream(file);
bos = new BufferedOutputStream(fos);
bos.write(by);
LogUtils.e("length=======>",file.length()+"");
mainView.updateApk(file);
}catch (IOException e){
e.printStackTrace();
}finally {
if (bos != null)
{
try
{
bos.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
if (fos != null)
{
try
{
fos.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
LogUtils.e("success=========");
}
});
When you use HttpLoggingInterceptor.Level.BODY , then you try to download large file , will save all body in memory for log.
That's easy let to OOM .
Try to remove log body or just logging NONE , basic or header and try again.
HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
//设置缓存路径
Try this .
HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.NONE);
//设置缓存路径

Resources