how to use Kafka 0.8 Log4j appender - log4j

I am trying to run Kafka-0.8 Log4j appender and I am unable to make it.
I want my application to send log directly to kafka via Log4j appender.
Here is my log4j.properties.
I couldn`t find any proper encoder, so I just configure it to use default encoder.
(e.g I commented the line.)
log4j.rootLogger=INFO, stdout, KAFKA
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
log4j.appender.KAFKA=kafka.producer.KafkaLog4jAppender
log4j.appender.KAFKA.layout=org.apache.log4j.PatternLayout
log4j.appender.KAFKA.layout.ConversionPattern=%-5p: %c - %m%n
log4j.appender.KAFKA.BrokerList=hnode01:9092
log4j.appender.KAFKA.Topic=DKTestEvent
#log4j.appender.KAFKA.SerializerClass=kafka.log4j.AppenderStringEncoder
And this is my sample application.
import org.apache.log4j.Logger;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.PropertyConfigurator;
public class HelloWorld {
static Logger logger = Logger.getLogger(HelloWorld.class.getName());
public static void main(String[] args) {
PropertyConfigurator.configure(args[0]);
logger.info("Entering application.");
logger.debug("Debugging!.");
logger.info("Exiting application.");
}
}
I used maven for compiling.
I included kafka_2.8.2-0.8.0 and log4j_1.2.17 in my pom.xml
And I am getting these error:
INFO [main] (Logging.scala:67) - Verifying properties
INFO [main] (Logging.scala:67) - Property metadata.broker.list is overridden to hnode01:9092
INFO [main] (Logging.scala:67) - Property serializer.class is overridden to kafka.serializer.StringEncoder
INFO [main] (HelloWorld.java:14) - Entering application.
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 0 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 1 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 2 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 3 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 4 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 5 for 1 topic(s) Set(DKTestEvent)
.
.
.
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 60 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 61 for 1 topic(s) Set(DKTestEvent)
INFO [main] (HelloWorld.java:14) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 62 for 1 topic(s) Set(DKTestEvent)
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 63 for 1 topic(s) Set(DKTestEvent)
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 64 for 1 topic(s) Set(DKTestEvent)
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 65 for 1 topic(s) Set(DKTestEvent)
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 66 for 1 topic(s) Set(DKTestEvent)
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 67 for 1 topic(s) Set(DKTestEvent)
.
.
.
INFO [main] (Logging.scala:67) - Fetching metadata from broker id:0,host:hnode01,port:9092 with correlation id 534 for 1 topic(s) Set(DKTestEvent)
ERROR [main] (Logging.scala:67) -
ERROR [main] (Logging.scala:67) -
ERROR [main] (Logging.scala:67) -
ERROR [main] (Logging.scala:67) -
ERROR [main] (Logging.scala:67) -
ERROR [main] (Logging.scala:67) -
java.lang.StackOverflowError
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:643)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:277)
at java.net.URLClassLoader.access$000(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:212)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:205)
at java.lang.ClassLoader.loadClass(ClassLoader.java:323)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:294)
at java.lang.ClassLoader.loadClass(ClassLoader.java:268)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:643)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:277)
at java.net.URLClassLoader.access$000(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:212)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:205)
at java.lang.ClassLoader.loadClass(ClassLoader.java:323)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:294)
at java.lang.ClassLoader.loadClass(ClassLoader.java:268)
at org.apache.log4j.spi.ThrowableInformation.getThrowableStrRep(ThrowableInformation.java:87)
at org.apache.log4j.spi.LoggingEvent.getThrowableStrRep(LoggingEvent.java:413)
at org.apache.log4j.WriterAppender.subAppend(WriterAppender.java:313)
at org.apache.log4j.WriterAppender.append(WriterAppender.java:162)
at org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
at org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
at org.apache.log4j.Category.callAppenders(Category.java:206)
at org.apache.log4j.Category.forcedLog(Category.java:391)
at org.apache.log4j.Category.error(Category.java:322)
at kafka.utils.Logging$$anonfun$swallowError$1.apply(Logging.scala:105)
at kafka.utils.Logging$$anonfun$swallowError$1.apply(Logging.scala:105)
at kafka.utils.Utils$.swallow(Utils.scala:189)
at kafka.utils.Logging$class.swallowError(Logging.scala:105)
at kafka.utils.Utils$.swallowError(Utils.scala:46)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:67)
at kafka.producer.Producer.send(Producer.scala:76)
at kafka.producer.KafkaLog4jAppender.append(KafkaLog4jAppender.scala:96)
at org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
at org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
at org.apache.log4j.Category.callAppenders(Category.java:206)
at org.apache.log4j.Category.forcedLog(Category.java:391)
at org.apache.log4j.Category.info(Category.java:666)
at kafka.utils.Logging$class.info(Logging.scala:67)
at kafka.client.ClientUtils$.info(ClientUtils.scala:31)
at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:51)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.async.DefaultEventHandler$$anonfun$handle$1.apply$mcV$sp(DefaultEventHandler.scala:67)
at kafka.utils.Utils$.swallow(Utils.scala:187)
at kafka.utils.Logging$class.swallowError(Logging.scala:105)
at kafka.utils.Utils$.swallowError(Utils.scala:46)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:67)
at kafka.producer.Producer.send(Producer.scala:76)
at kafka.producer.KafkaLog4jAppender.append(KafkaLog4jAppender.scala:96)
at org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
at org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
.
.
.
I am getting above error continuously if i don`t terminate the program.
If I miss something, kindly let me know.

I think Jonas has identified the problem, that is the Kafka producer logging is also getting logged to the Kafka appender causing an infinite loop and eventual stack overflow (no pun intended)
You can configure all Kafka logs to go to a different appender. The following shows sending the output to stdout:
log4j.logger.kafka=INFO, stdout
So you should end up with the following in your log4j.properties
log4j.rootLogger=INFO, stdout, KAFKA
log4j.logger.kafka=INFO, stdout
log4j.logger.HelloWorld=INFO, KAFKA

I have been able to generate events via log4j in Kafka 0.8.2.2. Here is my log4j configuration:
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-5p %c{1} - %m%n" />
</layout>
</appender>
<appender name="fileAppender" class="org.apache.log4j.RollingFileAppender">
<param name="Threshold" value="INFO" />
<param name="MaxBackupIndex" value="100" />
<param name="File" value="/tmp/agna-LogFile.log" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p [%c{1}] %m %n" />
</layout>
</appender>
<appender name="kafkaAppender" class="kafka.producer.KafkaLog4jAppender">
<param name="Topic" value="kafkatopic" />
<param name="BrokerList" value="localhost:9092" />
<param name="syncSend" value="true" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L %% - %m%n" />
</layout>
</appender>
<logger name="org.apache.kafka">
<level value="error" />
<appender-ref ref="console" />
</logger>
<logger name="com.example.kafkaLogger">
<level value="debug" />
<appender-ref ref="kafkaAppender" />
</logger>
<root>
<priority value="debug" />
<appender-ref ref="console" />
</root>
</log4j:configuration>
Here is the source code:
package com.example;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
public class JsonProducer {
static Logger defaultLogger = LoggerFactory.getLogger(JsonProducer.class);
static Logger kafkaLogger = LoggerFactory.getLogger("com.example.kafkaLogger");
public static void main(String args[]) {
JsonProducer obj = new JsonProducer();
String str = obj.getJsonObjAsString();
// Use the logger
kafkaLogger.info(str);
try {
// Construct and send message
obj.constructAndSendMessage();
} catch (InterruptedException e) {
defaultLogger.error("Caught interrupted exception " + e);
} catch (ExecutionException e) {
defaultLogger.error("Caught execution exception " + e);
}
}
private String getJsonObjAsString() {
JSONObject obj = new JSONObject();
obj.put("name", "John");
obj.put("age", new Integer(55));
obj.put("address", "123 MainSt, Palatine, IL");
JSONArray list = new JSONArray();
list.add("msg 1");
list.add("msg 2");
list.add("msg 3");
obj.put("messages", list);
return obj.toJSONString();
}
private void constructAndSendMessage() throws InterruptedException, ExecutionException {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
boolean sync = false;
String topic = "kafkatopic";
String key = "mykey";
String value = "myvalue1 mayvalue2 myvalue3";
ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic, key, value);
if (sync) {
producer.send(producerRecord).get();
} else {
producer.send(producerRecord);
}
producer.close();
}
}
The whole project is a available under the following link:
https://github.com/ypant/kafka-json-producer.git

Try to set the appender async, like this:
log4j.appender.KAFKA.ProducerType=async
Seems reasonable that it goes in to an infinite loop because the kafka producer has logging in itself..

Related

Log4j2 - LogManager.getLogger("name") not finding custom loggers

Log4j2 - LogManager.getLogger("name") not finding custom loggers.
All of the following return the same logger - that being the Root logger for the class that this code is in. I would like to think these would all be different and I would get my 3 custom ones back for the first 3 calls.
Note the files specified by the appenders are created, but no logs are sent to them.
Logger _l = (Logger) LogManager.getLogger("Global");
_l = (Logger) LogManager.getLogger("fakeswitch");
_l = (Logger) LogManager.getLogger("fakeswitch_two");
_l = (Logger) LogManager.getLogger();
I create custom loggers using the following:
ComponentBuilder triggeringPolicy = configurationBuilder.newComponent("Policies")
.addComponent(configurationBuilder.newComponent("TimeBasedTriggeringPolicy").
addAttribute("interval", "1"));
AppenderComponentBuilder log4jFileAppenderBuilder = configurationBuilder.
newAppender(pName + "_SmdrDailyRollingFileAppender", "RollingFile");
log4jFileAppenderBuilder.addAttribute("filename", pLogFilename);
log4jFileAppenderBuilder.addAttribute("filePattern", pLogFilenamePattern);
log4jFileAppenderBuilder.addComponent(triggeringPolicy);
// Configure the PatternLayout
LayoutComponentBuilder layoutComponentBuilder = configurationBuilder.newLayout("PatternLayout").
addAttribute("pattern", DEBUG_PATTERN_LAYOUT_STRING);
log4jFileAppenderBuilder.add(layoutComponentBuilder);
// Add it back into configuration
configurationBuilder.add(log4jFileAppenderBuilder);
// https://logging.apache.org/log4j/2.x/manual/customconfig.html
LoggerComponentBuilder logger = configurationBuilder.newLogger(pName, Level.DEBUG);
logger.add(configurationBuilder.newAppenderRef(pName + "_SmdrDailyRollingFileAppender"));
logger.addAttribute("additivity", false);
configurationBuilder.add(logger);
// Actually use it
LoggerContext _loggerContext = Configurator.initialize(configurationBuilder.build());
The equiv XML from writeXmlConfiguration is:
<?xml version="1.0" ?>
<Configuration>
<Appenders>
<RollingFile name="Global_SmdrDailyRollingFileAppender" filename="ps/debug/SMDR_DEBUG.txt"
filePattern="ps/debug/SMDR_DEBUG_%d{yyyyMMdd}.txt.gz">
<Policies>
<TimeBasedTriggeringPolicy interval="1"/>
</Policies>
<PatternLayout pattern="%d{MM.DD.yy-HH:mm:ss} %m%n"/>
</RollingFile>
<RollingFile name="fakeswitch_SmdrDailyRollingFileAppender" filename="ps/debug/SMDR_DEBUG_fakeswitch.txt"
filePattern="ps/debug/SMDR_DEBUG_fakeswitch_%d{yyyyMMdd}.txt.gz">
<Policies>
<TimeBasedTriggeringPolicy interval="1"/>
</Policies>
<PatternLayout pattern="%d{MM.DD.yy-HH:mm:ss} %m%n"/>
</RollingFile>
<RollingFile name="fakeswitch_two_SmdrDailyRollingFileAppender"
filename="ps/debug/SMDR_DEBUG_fakeswitch_two.txt"
filePattern="ps/debug/SMDR_DEBUG_fakeswitch_two_%d{yyyyMMdd}.txt.gz">
<Policies>
<TimeBasedTriggeringPolicy interval="1"/>
</Policies>
<PatternLayout pattern="%d{MM.DD.yy-HH:mm:ss} %m%n"/>
</RollingFile>
</Appenders>
<Loggers>
<Logger name="Global" level="DEBUG" additivity="false">
<AppenderRef ref="Global_SmdrDailyRollingFileAppender"/>
</Logger>
<Logger name="fakeswitch" level="DEBUG" additivity="false">
<AppenderRef ref="fakeswitch_SmdrDailyRollingFileAppender"/>
</Logger>
<Logger name="fakeswitch_two" level="DEBUG" additivity="false">
<AppenderRef ref="fakeswitch_two_SmdrDailyRollingFileAppender"/>
</Logger>
</Loggers>
</Configuration>
This was answered for me by Piotr P Karwasz.
This only works if the LoggerContext has not been initialized yet. Since
every call to a LogManager method initializes a LoggerContext, it is
almost certainly too late to use Configurator.initialize.
Use Configuration.reconfigure instead, which works in all cases.
Piotr
--- my code change was
LoggerContext _loggerContext = Configurator.initialize(configurationBuilder.build());
to
Configurator.reconfigure(configurationBuilder.build());

log4j2 RollingFileAppender old file gets removed after 7 rollovers

I use following log4j RollingFile appender in my webapp.
<Appenders>
<RollingFile name="logFile"
fileName="${env:SYSTEM_LOGS}/${env:LOG_FILE_NAME}.log" immediateFlush="true"
filePattern="${env:SYSTEM_LOGS}/${env:LOG_FILE_NAME}.log.%d{yyyy_MM_dd.HH_mm_ss}.%i">
<PatternLayout pattern="%d{yyyyMMdd-HHmmss.SSS}|%X{username}|%-5p|%t| %-100m (%c{1})%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
</RollingFile>
</Appenders>
With filePattern="${env:SYSTEM_LOGS}/${env:LOG_FILE_NAME}.log.%d{yyyy_MM_dd.HH_mm_ss}.%i", when log is rolled over, old file gets renamed to a filename with an index number (specified with %i), so all old files should get renamed and should be preserved.
I rollover the log programmatically with following code.
org.apache.logging.log4j.Logger logManagerLogger = LogManager.getLogger();
Map<String, org.apache.logging.log4j.core.Appender> appenders = ((org.apache.logging.log4j.core.Logger) logManagerLogger).getAppenders();
appenders.forEach((appenderName, appender) -> {
if (appender instanceof RollingFileAppender) {
LOGGER.info("Switching log for appender " + appenderName);
((RollingFileAppender) appender).getManager().rollover();
}
});
But, after 7 rollovers, the existing file gets removed (not renamed according to the specified filePattern) and log is continued in a new file.
What could be the issue here?
set DefaultRolloverStrategy(default is 7), In your config will be:
<Appenders>
<RollingFile name="logFile"
fileName="${env:SYSTEM_LOGS}/${env:LOG_FILE_NAME}.log" immediateFlush="true"
filePattern="${env:SYSTEM_LOGS}/${env:LOG_FILE_NAME}.log.%d{yyyy_MM_dd.HH_mm_ss}.%i">
<PatternLayout pattern="%d{yyyyMMdd-HHmmss.SSS}|%X{username}|%-5p|%t| %-100m (%c{1})%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="100"/>
</RollingFile>
</Appenders>
now, it will have 100 log file to rollover.
If you want unlimited rollingfile,
According to Log4j2 documentation, from release 2.8, it can be done by setting fileIndex attribute to nomax. For example:
<DefaultRolloverStrategy fileIndex="nomax" />

Mule - JMS (ActiveMQ) Reconnection

This is my mule flow 1:
HTTP > Payload String > Logger > JMS /normalqueue
The first flow has an error handling:
File (Write a file per message handled)
Flow 2:
JMS /normalqueue > Logger
Recovery flow (Invoked with a groovy script):
File (Read file) > File to String > Flow reference (To First Flow again)
This is the XML from Mule:
<http:listener-config name="HTTP_Listener_Configuration" host="0.0.0.0" port="8081" doc:name="HTTP Listener Configuration"/>
<jms:activemq-connector name="Active_MQ" username="admin" password="admin" brokerURL="tcp://192.168.198.131:61616" validateConnections="true" doc:name="Active MQ" persistentDelivery="true">
<reconnect blocking="false" frequency="6000"/>
</jms:activemq-connector>
<file:connector name="File" writeToDirectory="C:\errors" autoDelete="true" streaming="true" validateConnections="true" doc:name="File"/>
<flow name="lab-file-catchFlow">
<http:listener config-ref="HTTP_Listener_Configuration" path="/" doc:name="HTTP"/>
<set-payload value="#[message.payloadAs(java.lang.String)]" doc:name="Set Payload"/>
<logger message="Started message: #[message.payloadAs(java.lang.String)]" level="INFO" doc:name="Logger"/>
<jms:outbound-endpoint queue="activemq" connector-ref="Active_MQ" doc:name="JMS">
<jms:transaction action="ALWAYS_BEGIN"/>
</jms:outbound-endpoint>
<catch-exception-strategy doc:name="Catch Exception Strategy">
<file:outbound-endpoint path="C:\errors" connector-ref="File" responseTimeout="10000" doc:name="File"/>
</catch-exception-strategy>
</flow>
<flow name="flow-recovery" initialState="stopped" processingStrategy="synchronous">
<file:inbound-endpoint path="C:\errors" connector-ref="File" responseTimeout="10000" doc:name="File"/>
<file:file-to-string-transformer doc:name="File to String"/>
<logger message=" Recovery message: #[message.payloadAs(java.lang.String)]" level="ERROR" doc:name="Logger"/>
<flow-ref name="lab-file-catchFlow" doc:name="Flow Reference"/>
</flow>
<flow name="lab-file-catchFlow2" processingStrategy="synchronous">
<jms:inbound-endpoint queue="activemq" connector-ref="Active_MQ" doc:name="JMS"/>
<logger message="#[message.payloadAs(java.lang.String)]" level="INFO" doc:name="Logger"/>
</flow>
<flow name="lab-file-catchFlow1" >
<http:listener config-ref="HTTP_Listener_Configuration" path="/modify" doc:name="HTTP"/>
<scripting:component doc:name="Groovy">
<scripting:script engine="Groovy"><![CDATA[ if(muleContext.registry.lookupFlowConstruct('flow-recovery').isStopped())
{
muleContext.registry.lookupFlowConstruct('flow-recovery').start();
return 'Started';
} else
{
muleContext.registry.lookupFlowConstruct('flow-recovery').stop();
return 'Stopped';
}]]></scripting:script>
</scripting:component>
<set-payload value="#[message.payloadAs(java.lang.String)]" doc:name="Set Payload"/>
<logger message="#[message.payloadAs(java.lang.String)]" level="INFO" doc:name="Logger"/>
</flow>
I stop service from ActiveMQ, it store a file with the messages from the error handling and I receive the typical error:
Cannot process event as "Active_MQ" is stopped
Then, I run the ActiveMQ service again and start the recovery flow with a groovy script. That flow recover all messages, converts to string and return to the first flow to requeue.
The problem is that mule doesn't detect when service is running again, I need to restart the mule project to detect it.
Is there any way auto detect when the activeMQ is running again with Mule?
By <reconnect-forever/>, Mule will keep re-trying to connect to ActiveMQ
<jms:activemq-connector name="Active_MQ" username="admin" password="admin" brokerURL="tcp://192.168.198.131:61616" validateConnections="true" doc:name="Active MQ" persistentDelivery="true">
<reconnect-forever/>
</jms:activemq-connector>

spring integration-sftp inbound adapter with polling facility at server startup

I am trying to sftp file with Spring integration using a maven web projecy
Need a polling facility. If I am starting the SftpInbound.java, the polling is working. Need to have the polling at server start up.
The content of java file and configuration
SftpInbound.java
package com.myproj.integration.bsy.sftp;
import java.io.File;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.integration.endpoint.SourcePollingChannelAdapter;
import org.springframework.integration.file.remote.RemoteFileTemplate;
import org.springframework.integration.file.remote.session.CachingSessionFactory;
import org.springframework.integration.file.remote.session.SessionFactory;
import org.springframework.messaging.Message;
import org.springframework.messaging.PollableChannel;
import org.springframework.scheduling.annotation.Scheduled;
import com.myproj.integration.bsy.sftp.*;
import com.jcraft.jsch.ChannelSftp.LsEntry;
public class SftpInboundReceive {
#Scheduled(fixedRate=5000)
public void inboundSftpPoll(){
ConfigurableApplicationContext context =
new ClassPathXmlApplicationContext("/META-INF/spring/integration/sftp/SftpInboundReceive-context.xml", this.getClass());
RemoteFileTemplate<LsEntry> template = null;
String file1 = "a.txt";
String file2 = "b.txt";
String file3 = "c.bar";
new File("local-dir", file1).delete();
new File("local-dir", file2).delete();
try {
PollableChannel localFileChannel = context.getBean("receiveChannel", PollableChannel.class);
#SuppressWarnings("unchecked")
SessionFactory<LsEntry> sessionFactory = context.getBean(CachingSessionFactory.class);
template = new RemoteFileTemplate<LsEntry>(sessionFactory);
System.out.println("here 1" +template);
SourcePollingChannelAdapter adapter = context.getBean("sftpInbondAdapter",SourcePollingChannelAdapter.class);
adapter.start();
Message<?> received = localFileChannel.receive();
System.out.println("Received first file message 1: " + received);
received = localFileChannel.receive();
System.out.println("Received second file message: " + received);
received = localFileChannel.receive(1000);
System.out.println("Third file was received as expected" +received);
}catch(Exception e){
e.printStackTrace();
}
finally {
SftpUtils.cleanUp(template, file1, file2, file3);
//context.close();
}
}
public static void main(String args[])
{
SftpInboundReceive oInboundReceiveSample = new SftpInboundReceive();
oInboundReceiveSample.inboundSftpPoll();
}
}
The xml file SftpInboundReceive-context.xml
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"
xmlns:int-sftp="http://www.springframework.org/schema/integration/sftp"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration-4.1.xsd
http://www.springframework.org/schema/integration/sftp http://www.springframework.org/schema/integration/sftp/spring-integration-sftp-4.1.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-4.1.xsd">
<!-- <import resource="SftpSampleCommon.xml"/> -->
<context:property-placeholder order="1"
location="classpath:/sftpuser.properties" ignore-unresolvable="true"/>
<bean id="sftpSessionFactory"
class="org.springframework.integration.file.remote.session.CachingSessionFactory">
<constructor-arg ref="defaultSftpSessionFactory" />
</bean>
<!-- host=xxx.xx.128.143 port=22 username=xxxuser passphrase= private.keyfile=classpath:META-INF/keys/sftp_rsa -->
<bean id="defaultSftpSessionFactory"
class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="${sftp.host}" />
<property name="port" value="${sftp.port}" />
<property name="user" value="${sftp.username}" />
<property name="privateKey" value="${private.keyfile}" />
<property name="privateKeyPassphrase" value="${passphrase}" />
</bean>
<!-- username & password from property file... tested <bean id="defaultSftpSessionFactory"
class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="${sftp.host}"/> <property name="port" value="${sftp.port}"/>
<property name="user" value="${sftp.username}"/> <property name="password"
value="${sftp.password}"/> </bean> -->
<!-- hardcoded, username & password... tested <bean id="defaultSftpSessionFactory"
class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="xxx.xx.128.143"/> <property name="port" value="22"/>
<property name="user" value="xxxuser"/> <property name="password" value="xxxuser#123"/>
</bean> -->
<!-- Inbound channel adapter for SFTP call . with poll facility -->
<int-sftp:inbound-channel-adapter id="sftpInbondAdapter"
auto-startup="true" channel="receiveChannel" session-factory="sftpSessionFactory"
local-directory="file:/target/foo" remote-directory="${sftp.inboundremotedir}"
auto-create-local-directory="true" delete-remote-files="false"
filename-pattern="*.txt">
<int:poller fixed-rate="100000" max-messages-per-poll="1" />
</int-sftp:inbound-channel-adapter>
<int:channel id="receiveChannel">
<int:queue />
</int:channel>
</beans>
STackTrace
p-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 5 of 12 in additional filter chain; firing Filter: 'DefaultLoginPageGeneratingFilter'
16:02:58.368 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 6 of 12 in additional filter chain; firing Filter: 'BasicAuthenticationFilter'
16:02:58.368 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 7 of 12 in additional filter chain; firing Filter: 'RequestCacheAwareFilter'
16:02:58.368 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 8 of 12 in additional filter chain; firing Filter: 'SecurityContextHolderAwareRequestFilter'
16:02:58.370 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 9 of 12 in additional filter chain; firing Filter: 'AnonymousAuthenticationFilter'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.authentication.AnonymousAuthenticationFilter] Populated SecurityContextHolder with anonymous token: 'org.springframework.security.authentication.AnonymousAuthenticationToken#9055e4a6: Principal: anonymousUser; Credentials: [PROTECTED]; Authenticated: true; Details: org.springframework.security.web.authentication.WebAuthenticationDetails#957e: RemoteIpAddress: 127.0.0.1; SessionId: null; Granted Authorities: ROLE_ANONYMOUS'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 10 of 12 in additional filter chain; firing Filter: 'SessionManagementFilter'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 11 of 12 in additional filter chain; firing Filter: 'ExceptionTranslationFilter'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / at position 12 of 12 in additional filter chain; firing Filter: 'FilterSecurityInterceptor'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.util.matcher.AntPathRequestMatcher] Checking match of request : '/'; against '/services/employee/*'
16:02:58.371 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.access.intercept.FilterSecurityInterceptor] Public object - authentication not attempted
16:02:58.371 TRACE [http-bio-8080-exec-3][org.springframework.web.context.support.XmlWebApplicationContext] Publishing event in Root WebApplicationContext: org.springframework.security.access.event.PublicInvocationEvent[source=FilterInvocation: URL: /]
16:02:58.372 DEBUG [http-bio-8080-exec-3][org.springframework.beans.factory.support.DefaultListableBeanFactory] Returning cached instance of singleton bean 'org.springframework.integration.internalMessagingAnnotationPostProcessor'
16:02:58.372 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.FilterChainProxy] / reached end of additional filter chain; proceeding with original chain
16:02:58.375 TRACE [http-bio-8080-exec-3][org.springframework.web.servlet.DispatcherServlet] Bound request context to thread: SecurityContextHolderAwareRequestWrapper[ org.springframework.security.web.context.HttpSessionSecurityContextRepository$Servlet3SaveToSessionRequestWrapper#1b0b34e]
16:02:58.376 DEBUG [http-bio-8080-exec-3][org.springframework.web.servlet.DispatcherServlet] DispatcherServlet with name 'Information Exchange Gateway Integration' processing GET request for [/DummyDataIntg/]
16:02:58.376 TRACE [http-bio-8080-exec-3][org.springframework.web.servlet.DispatcherServlet] Testing handler map [org.springframework.integration.http.inbound.IntegrationRequestMappingHandlerMapping#cdca7] in DispatcherServlet with name 'Information Exchange Gateway Integration'
16:02:58.378 WARN [http-bio-8080-exec-3][org.springframework.web.servlet.PageNotFound] No mapping found for HTTP request with URI [/DummyDataIntg/] in DispatcherServlet with name 'Information Exchange Gateway Integration'
16:02:58.378 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.context.HttpSessionSecurityContextRepository] SecurityContext is empty or contents are anonymous - context will not be stored in HttpSession.
16:02:58.378 TRACE [http-bio-8080-exec-3][org.springframework.web.servlet.DispatcherServlet] Cleared thread-bound request context: SecurityContextHolderAwareRequestWrapper[ org.springframework.security.web.context.HttpSessionSecurityContextRepository$Servlet3SaveToSessionRequestWrapper#1b0b34e]
16:02:58.378 DEBUG [http-bio-8080-exec-3][org.springframework.web.servlet.DispatcherServlet] Successfully completed request
16:02:58.378 TRACE [http-bio-8080-exec-3][org.springframework.web.context.support.XmlWebApplicationContext] Publishing event in WebApplicationContext for namespace 'Information Exchange Gateway Integration-servlet': ServletRequestHandledEvent: url=[/DummyDataIntg/]; client=[127.0.0.1]; method=[GET]; servlet=[Information Exchange Gateway Integration]; session=[null]; user=[null]; time=[6ms]; status=[OK]
16:02:58.378 TRACE [http-bio-8080-exec-3][org.springframework.web.context.support.XmlWebApplicationContext] Publishing event in Root WebApplicationContext: ServletRequestHandledEvent: url=[/DummyDataIntg/]; client=[127.0.0.1]; method=[GET]; servlet=[Information Exchange Gateway Integration]; session=[null]; user=[null]; time=[6ms]; status=[OK]
16:02:58.378 DEBUG [http-bio-8080-exec-3][org.springframework.beans.factory.support.DefaultListableBeanFactory] Returning cached instance of singleton bean 'org.springframework.integration.internalMessagingAnnotationPostProcessor'
16:02:58.378 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.access.ExceptionTranslationFilter] Chain processed normally
16:02:58.378 DEBUG [http-bio-8080-exec-3][org.springframework.security.web.context.SecurityContextPersistenceFilter] SecurityContextHolder now cleared, as request processing completed
16:04:37.403 INFO [task-scheduler-4][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\b.txt, headers={timestamp=1420540477403, id=f8c32928-411b-99b7-f4a0-0dd1b119fc44}]]
16:04:37.403 ERROR [task-scheduler-4][org.springframework.integration.handler.LoggingHandler] \target\foo\b.txt
16:06:17.403 INFO [task-scheduler-9][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\brjb.txt, headers={timestamp=1420540577403, id=061634bf-0562-962b-e583-53a302cdb0d4}]]
16:06:17.403 ERROR [task-scheduler-9][org.springframework.integration.handler.LoggingHandler] \target\foo\brjb.txt
16:07:57.403 INFO [task-scheduler-10][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\d.txt, headers={timestamp=1420540677403, id=abea1188-fc5a-15b0-c40c-73ea686a88c0}]]
16:07:57.403 ERROR [task-scheduler-10][org.springframework.integration.handler.LoggingHandler] \target\foo\d.txt
16:09:37.403 INFO [task-scheduler-4][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\g.txt, headers={timestamp=1420540777403, id=461a8e48-6ebf-5d1c-ab3f-ce28e54e00b8}]]
16:09:37.403 ERROR [task-scheduler-4][org.springframework.integration.handler.LoggingHandler] \target\foo\g.txt
16:11:17.403 INFO [task-scheduler-3][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\h.txt, headers={timestamp=1420540877403, id=4cbeeceb-5949-0e1c-1492-45ec17172480}]]
16:11:17.403 ERROR [task-scheduler-3][org.springframework.integration.handler.LoggingHandler] \target\foo\h.txt
16:12:57.403 INFO [task-scheduler-9][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\p.txt, headers={timestamp=1420540977403, id=3767b4bb-4de4-3178-f693-5ac0bd94a766}]]
16:12:57.403 ERROR [task-scheduler-9][org.springframework.integration.handler.LoggingHandler] \target\foo\p.txt
16:14:37.403 INFO [task-scheduler-6][org.springframework.integration.file.FileReadingMessageSource] Created message: [GenericMessage [payload=\target\foo\wiki.txt, headers={timestamp=1420541077403, id=bd3d0082-9788-fc31-1596-ab7a79186c17}]]
16:14:37.403 ERROR [task-scheduler-6][org.springframework.integration.handler.LoggingHandler] \target\foo\wiki.txt
error log in java file**strong text**
20:25:52.807 ERROR [task-scheduler-1][org.springframework.integration.handler.LoggingHandler] org.springframework.messaging.MessagingException: Problem occurred while synchronizing remote to local directory; nested exception is org.springframework.messaging.MessagingException: Failed to execute on session; nested exception is org.springframework.core.NestedIOException: Failed to list files; nested exception is 2: No such file
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizer.synchronizeToLocalDirectory(AbstractInboundFileSynchronizer.java:209)
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizingMessageSource.doReceive(AbstractInboundFileSynchronizingMessageSource.java:167)
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizingMessageSource.doReceive(AbstractInboundFileSynchronizingMessageSource.java:57)
at org.springframework.integration.endpoint.AbstractMessageSource.receive(AbstractMessageSource.java:64)
at org.springframework.integration.endpoint.SourcePollingChannelAdapter.receiveMessage(SourcePollingChannelAdapter.java:124)
at org.springframework.integration.endpoint.AbstractPollingEndpoint.doPoll(AbstractPollingEndpoint.java:192)
at org.springframework.integration.endpoint.AbstractPollingEndpoint.access$000(AbstractPollingEndpoint.java:55)
at org.springframework.integration.endpoint.AbstractPollingEndpoint$1.call(AbstractPollingEndpoint.java:149)
at org.springframework.integration.endpoint.AbstractPollingEndpoint$1.call(AbstractPollingEndpoint.java:146)
at org.springframework.integration.endpoint.AbstractPollingEndpoint$Poller$1.run(AbstractPollingEndpoint.java:298)
at org.springframework.integration.util.ErrorHandlingTaskExecutor$1.run(ErrorHandlingTaskExecutor.java:52)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:50)
at org.springframework.integration.util.ErrorHandlingTaskExecutor.execute(ErrorHandlingTaskExecutor.java:49)
at org.springframework.integration.endpoint.AbstractPollingEndpoint$Poller.run(AbstractPollingEndpoint.java:292)
at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54)
at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:81)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
Caused by: org.springframework.messaging.MessagingException: Failed to execute on session; nested exception is org.springframework.core.NestedIOException: Failed to list files; nested exception is 2: No such file
at org.springframework.integration.file.remote.RemoteFileTemplate.execute(RemoteFileTemplate.java:343)
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizer.synchronizeToLocalDirectory(AbstractInboundFileSynchronizer.java:167)
... 22 more
Caused by: org.springframework.core.NestedIOException: Failed to list files; nested exception is 2: No such file
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:103)
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:50)
at org.springframework.integration.file.remote.session.CachingSessionFactory$CachedSession.list(CachingSessionFactory.java:205)
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizer$1.doInSession(AbstractInboundFileSynchronizer.java:171)
at org.springframework.integration.file.remote.synchronizer.AbstractInboundFileSynchronizer$1.doInSession(AbstractInboundFileSynchronizer.java:167)
at org.springframework.integration.file.remote.RemoteFileTemplate.execute(RemoteFileTemplate.java:334)
... 23 more
Caused by: 2: No such file
at com.jcraft.jsch.ChannelSftp.throwStatusError(ChannelSftp.java:2846)
at com.jcraft.jsch.ChannelSftp._stat(ChannelSftp.java:2198)
at com.jcraft.jsch.ChannelSftp._stat(ChannelSftp.java:2215)
at com.jcraft.jsch.ChannelSftp.ls(ChannelSftp.java:1565)
at com.jcraft.jsch.ChannelSftp.ls(ChannelSftp.java:1526)
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:91)
... 28 more
You need to take a look how to start Spring Context from Web application using web.xml or WebApplicationInitializer for Servlet 3 environment. In this case the SftpInboundReceive-context.xml can be a part of common ApplicationContext and the polling facility (<int-sftp:inbound-channel-adapter>) will start automatically on application startup, which is caused on server start, when the last one see the web context of your application.
Please, read more docs for Spring Framework: http://projects.spring.io/spring-framework/
Spring Integration is just an EIP extension and follow with the same configuration and lifecycle rules.
I see that you just use the SFTP sample from Spring Intregration. You can found there samples for Tomcat and for Spring Boot as well.

how to configure log4j in a web application using jboss 7.1.1?

The steps to configure log4j are:
Step 1.
Create the file: jboss-deployment-structure.xml
<jboss-deployment-structure>
<deployment>
<exclusions>
<module name="org.apache.log4j" slot="main"/>
<module name="org.apache.commons.logging"/>
</exclusions>
</deployment>
</jboss-deployment-structure>
Step 2.
Create the servlet: Log4jInitServlet.java
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class Log4JInitServlet extends HttpServlet{
/**
*
*/
private static final long serialVersionUID = -3677208571865966932L;
private static final Log log=LogFactory.getLog(Log4JInitServlet.class);
public Log4JInitServlet(){
}
protected void doGet(HttpServletRequest request
,HttpServletResponse response) throws ServletException,IOException{
PrintWriter out = response.getWriter();
out.write("<h1>LogTester Application Version Guide Erasmo Marciano 1.0</h1>");
out.write("<p>Loading this page generates multiple log events for the it.deinformatica.marciano.logtest category.</p>");
out.write("<p>Click on F5 reload this web-page.</p>");
out.write("<p>You wii find level log:debug|fatal|error|trace|info|warn</p>");
out.close();
for (int i = 1; i <= 20; i++) {
log.debug("This is DEBUG message. Event number " + i);
log.fatal("This is FATAL message. Event number " + i);
log.info("This is INFO message. Event number " + i);
log.error("This is ERROR message. Event number " + i);
log.trace("This is TRACE message. Event number " + i);
log.warn("This is WARN message. Event number " + i);
}
}
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
}
}
Step 3.
create the file log4j.properties
### set log levels - for more verbose logging change 'info' to 'debug' ###
log4j.rootLogger=info, stdout
### direct log messages to stdout ###
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
What happens is that only shows INFO messages and no DEBUG. What am I doing wrong
or should do to display messages with lo4j DEBUG?
Please if anyone had a similar problem and solved it.
I have also faced problem for Jboss EAP 6. I have resolved. My working code is like follows:
1. WEB-INF/jboss-deployment-structure.xml file
<?xml version="1.0" encoding="UTF-8"?>
<jboss-deployment-structure>
<deployment>
<exclusions>
<!-- first exclude -->
<module name="javaee.api" />
<module name="org.apache.log4j"/>
<module name="org.slf4j"/>
</exclusions>
<dependencies>
<!-- then include filtered -->
<module name="org.apache.log4j" />
</dependencies>
<exclude-subsystems> <subsystem name="jpa" /> </exclude-subsystems>
</deployment>
</jboss-deployment-structure>
2. resources/log4j.properties file
# Root logger option
log4j.rootLogger=INFO, stdout, INF, DBG, ERR
#---------------------------------------------
# Redirect log messages to a log file
#---------------------------------------------
# Output to Tomcat home
logs.dir=${jboss.home}/standalone/log/
logs.fmt.dly=.yyyy-MM-dd
logs.fmt.date=yyyy-MM-dd HH:mm:ss
# Direct log messages to stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
# DEBUG Logs
log4j.appender.DBG.Threshold=DEBUG
log4j.appender.DBG.filter=org.apache.log4j.varia.LevelRangeFilter
#log4j.appender.DBG.filter.LevelMin=DEBUG
log4j.appender.DBG.filter.LevelMax=DEBUG
log4j.appender.DBG.filter.AcceptOnMatch=True
log4j.appender.DBG=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DBG.File=${jboss.server.log.dir}/app-debug-log.log
log4j.appender.DBG.DatePattern=${logs.dly.ptrn}
log4j.appender.DBG.layout=org.apache.log4j.EnhancedPatternLayout
log4j.appender.DBG.layout.ConversionPattern=%d{${logs.fmt.date}} %-5p [%c{1}:%L] - %m%n
# INFO Logs
log4j.appender.INF=org.apache.log4j.DailyRollingFileAppender
log4j.appender.INF.File=${jboss.server.log.dir}/app-info-log.log
log4j.appender.INF.DatePattern=${logs.fmt.dly}
log4j.appender.INF.Threshold=INFO
#log4j.appender.DBG.filter.LevelMin=INFO
log4j.appender.DBG.filter.LevelMax=INFO
log4j.appender.INF.layout=org.apache.log4j.EnhancedPatternLayout
log4j.appender.INF.layout.ConversionPattern=%d{${logs.fmt.date}} %-5p [%c{1}:%L] - %m%n
# ERROR Logs
log4j.appender.ERR=org.apache.log4j.DailyRollingFileAppender
log4j.appender.ERR.File=${jboss.server.log.dir}/app-err-log.log
log4j.appender.ERR.DatePattern=${logs.fmt.dly}
log4j.appender.ERR.Threshold=ERROR
#log4j.appender.DBG.filter.LevelMin=ERROR
#log4j.appender.DBG.filter.LevelMax=ERROR
log4j.appender.ERR.layout=org.apache.log4j.EnhancedPatternLayout
log4j.appender.ERR.layout.ConversionPattern=%d{${logs.fmt.date}} %-5p [%c{1}:%L] - %m%n
Try to exclude even jboss logging, and slf4j if you use it.
Remenber the xmlns in your xml, and put the file in WEB-INF folder of your webapp:
<jboss-deployment-structure xmlns="urn:jboss:deployment-structure:1.1">
<deployment>
<exclusions>
<module name="org.apache.log4j" />
<module name="org.slf4j" />
<module name="org.apache.commons.logging"/>
<module name="org.log4j"/>
<module name="org.jboss.logging"/>
</exclusions>
</deployment>
</jboss-deployment-structure>

Resources