Streaming from remote SFTP directories and sub-directories with Spring Integration - spring-integration

I am using Spring Integration Streaming Inbound Channel Adapter, to get stream from remote SFTP and parse every lines of content process.
I use :
IntegrationFlows.from(Sftp.inboundStreamingAdapter(template)
.filter(remoteFileFilter)
.remoteDirectory("test_dir"),
e -> e.id("sftpInboundAdapter")
.autoStartup(true)
.poller(Pollers.fixedDelay(fetchInt)))
.handle(Files.splitter(true, true))
....
And it can work now. But I can only get file from test_dir directory, but I need to recursively get files from this dir and sub-directory and parse every line.
I noticed that the Inbound Channel Adapter which is Sftp.inboundAdapter(sftpSessionFactory).scanner(...) . It can scan sub-directory. But I didn't see anything for Streaming Inbound Channel Adapter.
So, how can I implement the 'recursively get files from dir' in Streaming Inbound Channel Adapter?
Thanks.

You can use a two outbound gateways - the first doing ls -R (recursive list); split the result and use a gateway configured with mget -stream to get each file.
EDIT
#SpringBootApplication
public class So60987851Application {
public static void main(String[] args) {
SpringApplication.run(So60987851Application.class, args);
}
#Bean
IntegrationFlow flow(SessionFactory<LsEntry> csf) {
return IntegrationFlows.from(() -> "foo", e -> e.poller(Pollers.fixedDelay(5_000)))
.handle(Sftp.outboundGateway(csf, Command.LS, "payload")
.options(Option.RECURSIVE, Option.NAME_ONLY)
// need a more robust metadata store for persistence, unless the files are removed
.filter(new SftpPersistentAcceptOnceFileListFilter(new SimpleMetadataStore(), "test")))
.split()
.log()
.enrichHeaders(headers -> headers.headerExpression("fileToRemove", "'foo/' + payload"))
.handle(Sftp.outboundGateway(csf, Command.GET, "'foo/' + payload")
.options(Option.STREAM))
.split(new FileSplitter())
.log()
// instead of a filter, we can remove the remote file.
// but needs some logic to wait until all lines read
// .handle(Sftp.outboundGateway(csf, Command.RM, "headers['fileToRemove']"))
// .log()
.get();
}
#Bean
CachingSessionFactory<LsEntry> csf(DefaultSftpSessionFactory sf) {
return new CachingSessionFactory<>(sf);
}
#Bean
DefaultSftpSessionFactory sf() {
DefaultSftpSessionFactory sf = new DefaultSftpSessionFactory();
sf.setHost("10.0.0.8");
sf.setUser("gpr");
sf.setPrivateKey(new FileSystemResource(new File("/Users/grussell/.ssh/id_rsa")));
sf.setAllowUnknownKeys(true);
return sf;
}
}

It works for me, this is my full code
#Configuration
public class SftpIFConfig {
#InboundChannelAdapter(value = "sftpMgetInputChannel",
poller = #Poller(fixedDelay = "5000"))
public String filesForMGET(){
return "/upload/done";
}
#Bean
public IntegrationFlow sftpMGetFlow(SessionFactory<ChannelSftp.LsEntry> csf) {
return IntegrationFlows.from("sftpMgetInputChannel")
.handle(Sftp.outboundGateway(csf,
AbstractRemoteFileOutboundGateway.Command.LS, "payload")
.options(AbstractRemoteFileOutboundGateway.Option.RECURSIVE, AbstractRemoteFileOutboundGateway.Option.NAME_ONLY)
//Persistent file list filter using the server's file timestamp to detect if we've already 'seen' this file.
.filter(new SftpPersistentAcceptOnceFileListFilter(new SimpleMetadataStore(), "test")))
.split()
.log(message -> "file path -> "+message.getPayload())
.enrichHeaders(headers -> headers.headerExpression("fileToRemove", "'/upload/done/' + payload"))
.log(message -> "Heder file info -> "+message.getHeaders())
.handle(Sftp.outboundGateway(csf, AbstractRemoteFileOutboundGateway.Command.GET, "'/upload/done/' + payload")
.options(AbstractRemoteFileOutboundGateway.Option.STREAM))
.split(new FileSplitter())
.log(message -> "File content -> "+message.getPayload())
.get();
}
#Bean
CachingSessionFactory<ChannelSftp.LsEntry> csf(DefaultSftpSessionFactory sf) {
return new CachingSessionFactory<>(sf);
}
#Bean
DefaultSftpSessionFactory sf() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory();
factory.setHost("0.0.0.0");
factory.setPort(2222);
factory.setAllowUnknownKeys(true);
factory.setUser("xxxx");
factory.setPassword("xxx");
return factory;
}

Like dsillman2000 commented, I also found this answer could take more of an explanation.
After figuring this out based on the examples here, here is my extended example that works for me, with extracted variables and methods that (hopefully) clearly say what the individual parts are or do.
Dependencies: Mostly org.springframework.integration:spring-integration-sftp:2.6.6
package com.example.sftp.incoming;
import com.jcraft.jsch.ChannelSftp;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.InboundChannelAdapter;
import org.springframework.integration.annotation.Poller;
import org.springframework.integration.dsl.HeaderEnricherSpec;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.file.filters.AbstractFileListFilter;
import org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway;
import org.springframework.integration.file.remote.session.CachingSessionFactory;
import org.springframework.integration.file.remote.session.SessionFactory;
import org.springframework.integration.file.splitter.FileSplitter;
import org.springframework.integration.metadata.SimpleMetadataStore;
import org.springframework.integration.sftp.dsl.Sftp;
import org.springframework.integration.sftp.dsl.SftpOutboundGatewaySpec;
import org.springframework.integration.sftp.filters.SftpPersistentAcceptOnceFileListFilter;
import org.springframework.integration.sftp.session.DefaultSftpSessionFactory;
import org.springframework.messaging.Message;
import java.util.function.Consumer;
import java.util.function.Function;
#Configuration
public class SftpIncomingRecursiveConfiguration {
private static final String BASE_REMOTE_FOLDER_TO_GET_FILES_FROM = "/tmp/sftptest/";
private static final String REMOTE_FOLDER_PATH_AS_EXPRESSION = "'" + BASE_REMOTE_FOLDER_TO_GET_FILES_FROM + "'";
private static final String INBOUND_CHANNEL_NAME = "sftpGetInputChannel";
#Value("${demo.sftp.host}")
private String sftpHost;
#Value("${demo.sftp.user}")
private String sftpUser;
#Value("${demo.sftp.password}")
private String sftpPassword;
#Value("${demo.sftp.port}")
private String sftpPort;
#Bean
public SessionFactory<ChannelSftp.LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(sftpHost);
factory.setPort(Integer.parseInt(sftpPort));
factory.setUser(sftpUser);
factory.setPassword(sftpPassword);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(factory);
}
// poll for new files every 500ms
#InboundChannelAdapter(value = INBOUND_CHANNEL_NAME, poller = #Poller(fixedDelay = "500"))
public String filesForSftpGet() {
return BASE_REMOTE_FOLDER_TO_GET_FILES_FROM;
}
#Bean
public IntegrationFlow sftpGetFlow(SessionFactory<ChannelSftp.LsEntry> sessionFactory) {
return IntegrationFlows
.from(INBOUND_CHANNEL_NAME)
.handle(listRemoteFiles(sessionFactory))
.split()
.log(logTheFilePath())
.enrichHeaders(addAMessageHeader())
.log(logTheMessageHeaders())
.handle(getTheFile(sessionFactory))
.split(splitContentIntoLines())
.log(logTheFileContent())
.get();
}
private SftpOutboundGatewaySpec listRemoteFiles(SessionFactory<ChannelSftp.LsEntry> sessionFactory) {
return Sftp.outboundGateway(sessionFactory,
AbstractRemoteFileOutboundGateway.Command.LS, REMOTE_FOLDER_PATH_AS_EXPRESSION)
.options(AbstractRemoteFileOutboundGateway.Option.RECURSIVE, AbstractRemoteFileOutboundGateway.Option.NAME_ONLY)
.filter(onlyFilesWeHaveNotSeenYet())
.filter(onlyTxtFiles());
}
/* Persistent file list filter using the server's file timestamp to detect if we've already 'seen' this file.
Without it, the program would report the same file over and over again. */
private SftpPersistentAcceptOnceFileListFilter onlyFilesWeHaveNotSeenYet() {
return new SftpPersistentAcceptOnceFileListFilter(new SimpleMetadataStore(), "keyPrefix");
}
private AbstractFileListFilter<ChannelSftp.LsEntry> onlyTxtFiles() {
return new AbstractFileListFilter<>() {
#Override
public boolean accept(ChannelSftp.LsEntry file) {
return file.getFilename().endsWith(".txt");
}
};
}
private Function<Message<Object>, Object> logTheFilePath() {
return message -> "### File path: " + message.getPayload();
}
private Consumer<HeaderEnricherSpec> addAMessageHeader() {
return headers -> headers.headerExpression("fileToRemove", REMOTE_FOLDER_PATH_AS_EXPRESSION + " + payload");
}
private Function<Message<Object>, Object> logTheMessageHeaders() {
return message -> "### Header file info: " + message.getHeaders();
}
private SftpOutboundGatewaySpec getTheFile(SessionFactory<ChannelSftp.LsEntry> sessionFactory) {
return Sftp.outboundGateway(sessionFactory, AbstractRemoteFileOutboundGateway.Command.GET,
REMOTE_FOLDER_PATH_AS_EXPRESSION + " + payload")
.options(AbstractRemoteFileOutboundGateway.Option.STREAM);
}
private FileSplitter splitContentIntoLines() {
return new FileSplitter();
}
private Function<Message<Object>, Object> logTheFileContent() {
return message -> "### File content line: '" + message.getPayload() + "'";
}
}
EDIT: Please note, there is a difference here. The other example uses a poller to generate the message with the remote file path from "filesForMGET" over and over again, and that message payload (file path) gets used as an argument to "ls". I'm hard-coding it here, ignoring the message content from the poller.

Related

How to poll for multiple files at once with Spring Integration with WebFlux?

I have the following configuration below for file monitoring using Spring Integration and WebFlux.
It works well, but if I drop in 100 files it will pick up one file at a time with a 10 second gap between the "Received a notification of new file" log messages.
How do I poll for multiple files at once, so I don't have to wait 1000 seconds for all my files to finally register?
#Configuration
#EnableIntegration
public class FileMonitoringConfig {
private static final Logger logger =
LoggerFactory.getLogger(FileMonitoringConfig.class.getName());
#Value("${monitoring.folder}")
private String monitoringFolder;
#Value("${monitoring.polling-in-seconds:10}")
private int pollingInSeconds;
#Bean
Publisher<Message<Object>> myMessagePublisher() {
return IntegrationFlows.from(
Files.inboundAdapter(new File(monitoringFolder))
.useWatchService(false),
e -> e.poller(Pollers.fixedDelay(pollingInSeconds, TimeUnit.SECONDS)))
.channel(myChannel())
.toReactivePublisher();
}
#Bean
Function<Flux<Message<Object>>, Publisher<Message<Object>>> myReactiveSource() {
return flux -> myMessagePublisher();
}
#Bean
FluxMessageChannel myChannel() {
return new FluxMessageChannel();
}
#Bean
#ServiceActivator(
inputChannel = "myChannel",
async = "true",
reactive = #Reactive("myReactiveSource"))
ReactiveMessageHandler myMessageHandler() {
return new ReactiveMessageHandler() {
#Override
public Mono<Void> handleMessage(Message<?> message) throws MessagingException {
return Mono.fromFuture(doHandle(message));
}
private CompletableFuture<Void> doHandle(Message<?> message) {
return CompletableFuture.runAsync(
() -> {
logger.info("Received a notification of new file: {}", message.getPayload());
File file = (File) message.getPayload();
});
}
};
}
}
The Inbound Channel Adapter polls a single data record from the source per poll cycle.
Consider to add maxMessagesPerPoll(-1) to your poller() configuration.
See more in docs: https://docs.spring.io/spring-integration/docs/current/reference/html/core.html#channel-adapter-namespace-inbound

How to write unit test for connection factory in Spring webFlux

#Component
public class DisplayOverrideRowMapper {
public DisplayOverride mapRow(Row row, RowMetadata meta) {
return new DisplayOverride(
row.get("store_auth_override_id",Integer.class),
row.get("item_no",String.class),
row.get("item_dsc",String.class),
toForceOnlineOffline(row.get("display_override_type",String.class)),
row.get("billing_division_no",String.class),
row.get("store_no",String.class),
row.get("in_store_flg",Boolean.class),
row.get("pickup_flg",Boolean.class),
row.get("delivery_flg",Boolean.class),
row.get("ship_flg",Boolean.class),
row.get("reason_cd",Integer.class),
row.get("effective_dt", LocalDate.class),
row.get("expiration_dt",LocalDate.class),
row.get("last_update_ts",LocalDate.class),
row.get("last_update_user_id",String.class));
}
private DisplayOverrideType toForceOnlineOffline(String displayOverridetype) {
switch (displayOverridetype) {
case "offline":
return DisplayOverrideType.FORCE_OFFLINE;
case "online":
return DisplayOverrideType.FORCE_ONLINE;
default:
return null;
}
}
}
The below func method I want to write unit test for
#Autowired
public OverridesRepositoryHelperImpl(DisplayOverrideRowMapper displayOverrideRowMapper,
ConnectionFactory connectionFactory) {
this.displayOverrideRowMapper = displayOverrideRowMapper;
this.connectionFactory = connectionFactory;
}
#Override
public Flux<DisplayOverride> func(DisplayOverrideRequest displayOverrideRequest) {
StringBuilder sql = new StringBuilder(SELECT_GET_OVERRIDE);
String results = buildWhereClauseForGet(displayOverrideRequest);
sql.append(results);
return Flux.from(connectionFactory.create())
.flatMap(c ->
Flux.from(c.createStatement(sql.toString()).execute())
.map(result -> result.map((row,meta) -> displayOverrideRowMapper.mapRow(row,meta)))
.flatMap(p -> Flux.from(p))
.doFinally(st -> c.close())
);
}
Can any help me in writing the unit test for this func method. As i wrote this but it not covering the all the lines of func method as when I am stubbing the connection.create() method then the actual code does not shows any coverage
When I use the debugger it goes inside this line connectionFactory.create()).thenReturn((Publisher) Flux.just(connection) but it not going further inside
#MockBean
DisplayOverrideRowMapper displayOverrideRowMapper;
#MockBean
ConnectionFactory connectionFactory;
#MockBean
Connection connection;
#MockBean
Result result;
Flux<DisplayOverride> displayOverrideResponseFlux = Flux.just(displayOverrideResponse); // I already created this so you don’t need to worry about it
Mockito.when(displayOverrideRowMapper.mapRow(any(), any())).thenCallRealMethod();
Mockito.when(connectionFactory.create()).thenReturn((Publisher) Flux.just(connection).flatMap(
c -> Flux.just(result)
.map(result -> result.map((row, meta) -> displayOverrideRowMapper.mapRow(row, meta)))
.flatMap(p -> Flux.from(p)).doFinally(signalType -> c.close())));
OverridesRepositoryHelperImpl overridesRepositoryHelper = new OverridesRepositoryHelperImpl(displayOverrideRowMapper,connectionFactory);
Flux<DisplayOverride> Result = overridesRepositoryHelper.func(displayOverrideRequest)

rename a file in source directory after successfully sending to jms queue , in spring integration. using ExpressionEvaluatingRequestHandlerAdvice

getting following exception
AdviceMessage [payload=org.springframework.expression.spel.SpelEvaluationException: EL1008E: Property or field 'absolutePath' cannot be found on object of type 'java.lang.String' - maybe not public or not valid?
see the source code..
#Bean
#InboundChannelAdapter(value = "fileInputChannel", poller = #Poller(fixedDelay = "1000"))
public MessageSource<File> fileReadingMessageSource(#Value("${file.poller.path}") final String path,
#Value("${file.poller.fileName-pattern}") final String fileExt) {
CompositeFileListFilter<File> filters = new CompositeFileListFilter<>();
filters.addFilter(new SimplePatternFileListFilter(fileExt));
// filters.addFilter(new AcceptOnceFileListFilter<File>());
FileReadingMessageSource source = new FileReadingMessageSource();
source.setAutoCreateDirectory(false);
source.setDirectory(new File(path));
source.setFilter(filters);
source.setUseWatchService(true);
source.setWatchEvents(WatchEventType.CREATE);
System.out.println(path);
return source;
}
#Bean
public IntegrationFlow processFile() {
return IntegrationFlows
.from("fileInputChannel")
.transform(fileToStringTransformer())
.handle("fileProcessor", "process")//added some data manupilation code and returns Message<String>//
.log(LoggingHandler.Level.INFO, "process file", m -> m.getHeaders().get("Message_Type"))
.channel(this.jmsOutboundChannel())
.get();
}
#Bean
public IntegrationFlow sendToJmsQueue(JmsTemplate wlsJmsTemplate) {
return IntegrationFlows.from(this.jmsOutboundChannel())
.log(LoggingHandler.Level.INFO, "sending to queue", m ->
m.getHeaders().get("Message_Type"))
.handle(Jms.outboundAdapter(wlsJmsTemplate).destination(inboundDataQueue),
e -> e.advice(expressionAdvice()))
}
#Bean
public Advice expressionAdvice() {
ExpressionEvaluatingRequestHandlerAdvice advice = new ExpressionEvaluatingRequestHandlerAdvice();
advice.setSuccessChannelName("success.input");
advice.setOnSuccessExpressionString("payload.delete()");
//advice.setOnSuccessExpressionString("payload.renameTo(new java.io.File(payload.absolutePath + '.Done'))");
advice.setFailureChannelName("failure.input");
advice.setOnFailureExpressionString("payload.renameTo(new java.io.File(payload.absolutePath + '.FailedToSend'))");
advice.setTrapException(true);
return advice;
}
When using a FileReadingMessageSource, as well as putting the File in the payload, the file is also added as a header FileHeaders.ORIGINAL_FILE so that it is available later if a transformation occurs.
So you expressions need to use
headers['file_originalFile'].renameTo(new java.io.File(headers['file_originalFile']).absolutePath + '.failed')
and
headers['file_originalFile'].delete()

Watch remote directory for added files and stream it for reading data over SFTP

I want to add a watch on the remote machine for newly added CSV files or unread. Once files are identified read it according to their timestamp which will be there in the file name. The file will be read using streaming rather coping to the local machine. While the file is getting read, append _reading to the filename and append _read once the file is read. The file will be read over SFTP protocol and I am planning to use spring integration sftp. In case of error while reading file or data in the file is not as per expectation I want to move that file in sub-directory.
I have tried to poll the remote directory and reading once CSV file. Once read I am removing the file from the directory.
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-sftp</artifactId>
<version>5.1.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-core</artifactId>
<version>5.0.6.RELEASE</version>
</dependency>
Spring boot version 2.0.3.RELEASE
#Bean
public SessionFactory<ChannelSftp.LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(hostname);
factory.setPort(22);
factory.setUser(username);
factory.setPassword(password);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<ChannelSftp.LsEntry>(factory);
}
#Bean
public MessageSource<InputStream> sftpMessageSource() {
SftpStreamingMessageSource messageSource = new SftpStreamingMessageSource(template());
messageSource.setRemoteDirectory(path);
messageSource.setFilter(compositeFilters());
return messageSource;
}
public CompositeFileListFilter compositeFilters() {
return new CompositeFileListFilter()
.addFilter(new SftpRegexPatternFileListFilter(".*csv"));
}
#Bean
public SftpRemoteFileTemplate template() {
return new SftpRemoteFileTemplate(sftpSessionFactory());
}
#Bean
public IntegrationFlow sftpOutboundListFlow() {
return IntegrationFlows.from(this.sftpMessageSource(), e -> e.poller(Pollers.fixedDelay(5, TimeUnit.SECONDS)))
.handle(Sftp.outboundGateway(template(), NLST, path).options(Option.RECURSIVE)))
.filter(compositeFilters())
.transform(sorter())
.split()
.handle(Sftp.outboundGateway(template(), GET, "headers['file_remoteDirectory'] + headers['file_remoteFile']").options(STREAM))
.transform(csvToPojoTransformer())
.handle(service())
.handle(Sftp.outboundGateway(template(), MV, "headers['file_remoteDirectory'] + headers['file_remoteFile'] + _read"))
.handle(after())
.get();
}
#Bean
public MessageHandler sorter() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
List<String> fileNames = (List<String>) message.getPayload();
Collections.sort(fileNames);
}
};
}
#Bean
public MessageHandler csvToPojoTransformer() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
InputStream streamData = (InputStream) message.getPayload();
convertStreamtoObject(streamData, Class.class);
}
};
}
public List<?> convertStreamtoObject(InputStream inputStream, Class clazz) {
HeaderColumnNameMappingStrategy ms = new HeaderColumnNameMappingStrategy();
ms.setType(clazz);
Reader reader = new InputStreamReader(inputStream);
CsvToBean cb = new CsvToBeanBuilder(reader)
.withType(clazz)
.withMappingStrategy(ms)
.withSkipLines(0)
.withSeparator('|')
.withThrowExceptions(true)
.build();
return cb.parse();
}
#Bean
public MessageHandler service() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
List<Class> csvDataAsListOfPojo = List < Class > message.getPayload();
// use this
}
};
}
#Bean
public ExpressionEvaluatingRequestHandlerAdvice after() {
ExpressionEvaluatingRequestHandlerAdvice advice = new ExpressionEvaluatingRequestHandlerAdvice();
advice.setSuccessChannelName("success.input");
advice.setOnSuccessExpressionString("payload + ' was successful'");
advice.setFailureChannelName("failure.input");
advice.setOnFailureExpressionString("payload + ' was bad, with reason: ' + #exception.cause.message");
advice.setTrapException(true);
return advice;
}
#Bean
public IntegrationFlow success() {
return f -> f.handle(System.out::println);
}
#Bean
public IntegrationFlow failure() {
return f -> f.handle(System.out::println);
}
Updated Code
For complex scenarios (list, move, fetch, remove, etc), you should use SFTP remote file gateways instead.
The SFTP outbound gateway provides a limited set of commands that let you interact with a remote SFTP server:
ls (list files)
nlst (list file names)
get (retrieve a file)
mget (retrieve multiple files)
rm (remove file(s))
mv (move and rename file)
put (send a file)
mput (send multiple files)
Or use the SftpRemoteFileTemplate directly from your code.
EDIT
In response to your comments; you need something like this
Inbound Channel Adapter (with poller) - returns directory name
LS Gateway
Filter (remove any files already fetched)
Transformer (sort the list)
Splitter
GET Gateway(stream option)
Transformer (csv to POJO)
Service (process POJO)
If you add
RM Gateway
after your service (to remove the remote file), you don't need the filter step.
You might find the Java DSL simpler to assemble this flow...
#Bean
public IntegrationFlow flow() {
return IntegrationFlows.from(() -> "some/dir", e -> e.poller(...))
.handle(...) // LS Gateway
.filter(...)
.transform(sorter())
.split
.handle(...) // GET Gateway
.transform(csvToPojoTransformer())
.handle(myService())
.get()
}

How to route using message headers in Spring Integration DSL Tcp

I have 2 server side services and I would like route messages to them using message headers, where remote clients put service identification into field type.
Is the code snippet, from server side config, the correct way? It throws cast exception indicating that route() see only payload, but not the message headers. Also all example in the Spring Integration manual shows only payload based decisioning.
#Bean
public IntegrationFlow serverFlow( // common flow for all my services, currently 2
TcpNetServerConnectionFactory serverConnectionFactory,
HeartbeatServer heartbeatServer,
FeedServer feedServer) {
return IntegrationFlows
.from(Tcp.inboundGateway(serverConnectionFactory))
.<Message<?>, String>route((m) -> m.getHeaders().get("type", String.class),
(routeSpec) -> routeSpec
.subFlowMapping("hearbeat", subflow -> subflow.handle(heartbeatServer::processRequest))
.subFlowMapping("feed", subflow -> subflow.handle(feedServer::consumeFeed)))
.get();
}
Client side config:
#Bean
public IntegrationFlow heartbeatClientFlow(
TcpNetClientConnectionFactory clientConnectionFactory,
HeartbeatClient heartbeatClient) {
return IntegrationFlows.from(heartbeatClient::send, e -> e.poller(Pollers.fixedDelay(Duration.ofSeconds(5))))
.enrichHeaders(c -> c.header("type", "heartbeat"))
.log()
.handle(outboundGateway(clientConnectionFactory))
.handle(heartbeatClient::receive)
.get();
}
#Bean
public IntegrationFlow feedClientFlow(
TcpNetClientConnectionFactory clientConnectionFactory) {
return IntegrationFlows.from(FeedClient.MessageGateway.class)
.enrichHeaders(c -> c.header("type", "feed"))
.log()
.handle(outboundGateway(clientConnectionFactory))
.get();
}
And as usual here is the full demo project code, ClientConfig and ServerConfig.
There is no standard way to send headers over raw TCP. You need to encode them into the payload somehow (and extract them on the server side).
The framework provides a mechanism to do this for you, but it requires extra configuration.
See the documentation.
Specifically...
The MapJsonSerializer uses a Jackson ObjectMapper to convert between a Map and JSON. You can use this serializer in conjunction with a MessageConvertingTcpMessageMapper and a MapMessageConverter to transfer selected headers and the payload in JSON.
I'll try to find some time to create an example of how to use it.
But, of course, you can roll your own encoding/decoding.
EDIT
Here's an example configuration to use JSON to convey message headers over TCP...
#SpringBootApplication
public class TcpWithHeadersApplication {
public static void main(String[] args) {
SpringApplication.run(TcpWithHeadersApplication.class, args);
}
// Client side
public interface TcpExchanger {
public String exchange(String data, #Header("type") String type);
}
#Bean
public IntegrationFlow client(#Value("${tcp.port:1234}") int port) {
return IntegrationFlows.from(TcpExchanger.class)
.handle(Tcp.outboundGateway(Tcp.netClient("localhost", port)
.deserializer(jsonMapping())
.serializer(jsonMapping())
.mapper(mapper())))
.get();
}
// Server side
#Bean
public IntegrationFlow server(#Value("${tcp.port:1234}") int port) {
return IntegrationFlows.from(Tcp.inboundGateway(Tcp.netServer(port)
.deserializer(jsonMapping())
.serializer(jsonMapping())
.mapper(mapper())))
.log(Level.INFO, "exampleLogger", "'Received type header:' + headers['type']")
.route("headers['type']", r -> r
.subFlowMapping("upper",
subFlow -> subFlow.transform(String.class, p -> p.toUpperCase()))
.subFlowMapping("lower",
subFlow -> subFlow.transform(String.class, p -> p.toLowerCase())))
.get();
}
// Common
#Bean
public MessageConvertingTcpMessageMapper mapper() {
MapMessageConverter converter = new MapMessageConverter();
converter.setHeaderNames("type");
return new MessageConvertingTcpMessageMapper(converter);
}
#Bean
public MapJsonSerializer jsonMapping() {
return new MapJsonSerializer();
}
// Console
#Bean
#DependsOn("client")
public ApplicationRunner runner(TcpExchanger exchanger,
ConfigurableApplicationContext context) {
return args -> {
System.out.println("Enter some text; if it starts with a lower case character,\n"
+ "it will be uppercased by the server; otherwise it will be lowercased;\n"
+ "enter 'quit' to end");
Scanner scanner = new Scanner(System.in);
String request = scanner.nextLine();
while (!"quit".equals(request.toLowerCase())) {
if (StringUtils.hasText(request)) {
String result = exchanger.exchange(request,
Character.isLowerCase(request.charAt(0)) ? "upper" : "lower");
System.out.println(result);
}
request = scanner.nextLine();
}
scanner.close();
context.close();
};
}
}

Resources