pubsub-emulator throw error and publisher throw "Retry total timeout exceeded before any response was received" when publish 50k messages - node.js

Environment details
OS: uname -a => Darwin US_C02WG0GXHV2V 17.7.0 Darwin Kernel Version 17.7.0: Thu Jan 23 07:05:23 PST 2020; root:xnu-4570.71.69~1/RELEASE_X86_64 x86_64
Node.js version: node -v => v10.16.2
npm version: npm -v => 6.14.4
#google-cloud/pubsub version: "#google-cloud/pubsub": "^1.7.3"
Description
When I tried to publish 50k messages, the pubsub-emulator throw below error infinitely:
Error stack
[pubsub] May 06, 2020 2:47:20 PM io.grpc.netty.NettyServerHandler onStreamError
[pubsub] 警告: Stream Error
[pubsub] io.netty.handler.codec.http2.Http2Exception$StreamException: Stream closed before write could take place
[pubsub] at io.netty.handler.codec.http2.Http2Exception.streamError(Http2Exception.java:149)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2RemoteFlowController$FlowState.cancel(DefaultHttp2RemoteFlowController.java:481)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2RemoteFlowController$1.onStreamClosed(DefaultHttp2RemoteFlowController.java:105)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection.notifyClosed(DefaultHttp2Connection.java:356)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$ActiveStreams.removeFromActiveStreams(DefaultHttp2Connection.java:1000)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$ActiveStreams.deactivate(DefaultHttp2Connection.java:956)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$DefaultStream.close(DefaultHttp2Connection.java:512)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$DefaultStream.close(DefaultHttp2Connection.java:518)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.closeStream(Http2ConnectionHandler.java:599)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.processRstStreamWriteResult(Http2ConnectionHandler.java:872)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.access$1000(Http2ConnectionHandler.java:66)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler$3.operationComplete(Http2ConnectionHandler.java:796)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler$3.operationComplete(Http2ConnectionHandler.java:793)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:502)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:476)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:415)
[pubsub] at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:540)
[pubsub] at io.netty.util.concurrent.DefaultPromise.setSuccess0(DefaultPromise.java:529)
[pubsub] at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:101)
[pubsub] at io.netty.util.internal.PromiseNotificationUtil.trySuccess(PromiseNotificationUtil.java:48)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.safeSuccess(ChannelOutboundBuffer.java:703)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.remove(ChannelOutboundBuffer.java:258)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.removeBytes(ChannelOutboundBuffer.java:338)
[pubsub] at io.netty.channel.socket.nio.NioSocketChannel.doWrite(NioSocketChannel.java:428)
[pubsub] at io.netty.channel.AbstractChannel$AbstractUnsafe.flush0(AbstractChannel.java:939)
[pubsub] at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.flush0(AbstractNioChannel.java:360)
[pubsub] at io.netty.channel.AbstractChannel$AbstractUnsafe.flush(AbstractChannel.java:906)
[pubsub] at io.netty.channel.DefaultChannelPipeline$HeadContext.flush(DefaultChannelPipeline.java:1370)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.handler.logging.LoggingHandler.flush(LoggingHandler.java:265)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.channel.ChannelDuplexHandler.flush(ChannelDuplexHandler.java:117)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.flush(Http2ConnectionHandler.java:201)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.channel.DefaultChannelPipeline.flush(DefaultChannelPipeline.java:978)
[pubsub] at io.netty.channel.AbstractChannel.flush(AbstractChannel.java:253)
[pubsub] at io.grpc.netty.WriteQueue.flush(WriteQueue.java:118)
[pubsub] at io.grpc.netty.WriteQueue.access$000(WriteQueue.java:32)
[pubsub] at io.grpc.netty.WriteQueue$1.run(WriteQueue.java:44)
[pubsub] at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
[pubsub] at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:404)
[pubsub] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:495)
[pubsub] at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:905)
[pubsub] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[pubsub] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[pubsub] at java.lang.Thread.run(Thread.java:748)
[pubsub]
[pubsub] May 06, 2020 2:47:20 PM io.grpc.netty.NettyServerHandler onStreamError
[pubsub] 警告: Stream Error
[pubsub] io.netty.handler.codec.http2.Http2Exception$StreamException: Stream closed before write could take place
[pubsub] at io.netty.handler.codec.http2.Http2Exception.streamError(Http2Exception.java:149)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2RemoteFlowController$FlowState.cancel(DefaultHttp2RemoteFlowController.java:481)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2RemoteFlowController$1.onStreamClosed(DefaultHttp2RemoteFlowController.java:105)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection.notifyClosed(DefaultHttp2Connection.java:356)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$ActiveStreams.removeFromActiveStreams(DefaultHttp2Connection.java:1000)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$ActiveStreams.deactivate(DefaultHttp2Connection.java:956)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$DefaultStream.close(DefaultHttp2Connection.java:512)
[pubsub] at io.netty.handler.codec.http2.DefaultHttp2Connection$DefaultStream.close(DefaultHttp2Connection.java:518)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.closeStream(Http2ConnectionHandler.java:599)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.processRstStreamWriteResult(Http2ConnectionHandler.java:872)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.access$1000(Http2ConnectionHandler.java:66)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler$3.operationComplete(Http2ConnectionHandler.java:796)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler$3.operationComplete(Http2ConnectionHandler.java:793)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:502)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:476)
[pubsub] at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:415)
[pubsub] at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:540)
[pubsub] at io.netty.util.concurrent.DefaultPromise.setSuccess0(DefaultPromise.java:529)
[pubsub] at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:101)
[pubsub] at io.netty.util.internal.PromiseNotificationUtil.trySuccess(PromiseNotificationUtil.java:48)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.safeSuccess(ChannelOutboundBuffer.java:703)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.remove(ChannelOutboundBuffer.java:258)
[pubsub] at io.netty.channel.ChannelOutboundBuffer.removeBytes(ChannelOutboundBuffer.java:338)
[pubsub] at io.netty.channel.socket.nio.NioSocketChannel.doWrite(NioSocketChannel.java:428)
[pubsub] at io.netty.channel.AbstractChannel$AbstractUnsafe.flush0(AbstractChannel.java:939)
[pubsub] at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.flush0(AbstractNioChannel.java:360)
[pubsub] at io.netty.channel.AbstractChannel$AbstractUnsafe.flush(AbstractChannel.java:906)
[pubsub] at io.netty.channel.DefaultChannelPipeline$HeadContext.flush(DefaultChannelPipeline.java:1370)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.handler.logging.LoggingHandler.flush(LoggingHandler.java:265)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.channel.ChannelDuplexHandler.flush(ChannelDuplexHandler.java:117)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.handler.codec.http2.Http2ConnectionHandler.flush(Http2ConnectionHandler.java:201)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:739)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:731)
[pubsub] at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:717)
[pubsub] at io.netty.channel.DefaultChannelPipeline.flush(DefaultChannelPipeline.java:978)
[pubsub] at io.netty.channel.AbstractChannel.flush(AbstractChannel.java:253)
[pubsub] at io.grpc.netty.WriteQueue.flush(WriteQueue.java:118)
[pubsub] at io.grpc.netty.WriteQueue.access$000(WriteQueue.java:32)
[pubsub] at io.grpc.netty.WriteQueue$1.run(WriteQueue.java:44)
[pubsub] at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
[pubsub] at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:404)
[pubsub] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:495)
[pubsub] at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:905)
[pubsub] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[pubsub] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[pubsub] at java.lang.Thread.run(Thread.java:748)
[pubsub]
My publisher also throws many same errors:
{ Error: Retry total timeout exceeded before any response was received
at repeat (/Users/ldu020/workspace/xxx/xxx-master/workflow/node_modules/google-gax/src/normalCalls/retries.ts:83:23)
at Timeout.setTimeout (/Users/ldu020/workspace/xxx/xxx-master/workflow/node_modules/google-gax/src/normalCalls/retries.ts:124:13)
at ontimeout (timers.js:436:11)
at tryOnTimeout (timers.js:300:5)
at listOnTimeout (timers.js:263:5)
at Timer.processTimers (timers.js:223:10) code: 4 }
{ Error: Retry total timeout exceeded before any response was received
at repeat (/Users/ldu020/workspace/xxx/xxx-master/workflow/node_modules/google-gax/src/normalCalls/retries.ts:83:23)
at Timeout.setTimeout (/Users/ldu020/workspace/xxx/xxx-master/workflow/node_modules/google-gax/src/normalCalls/retries.ts:124:13)
at ontimeout (timers.js:436:11)
at tryOnTimeout (timers.js:300:5)
at listOnTimeout (timers.js:263:5)
at Timer.processTimers (timers.js:223:10) code: 4 }
My subscriber throw below errors:
[2020-05-06T06:29:08.932Z]Received message 46689:
Data: message payload 3998
Attributes: {}
[2020-05-06T06:29:08.932Z]Received message 46690:
Data: message payload 3999
Attributes: {}
ERROR: Error: Failed to "acknowledge" for 500 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "acknowledge" for 100 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "acknowledge" for 200 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "modifyAckDeadline" for 400 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "modifyAckDeadline" for 500 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "modifyAckDeadline" for 100 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to "modifyAckDeadline" for 200 message(s). Reason: 4 DEADLINE_EXCEEDED: Deadline exceeded
ERROR: Error: Failed to connect to channel. Reason: Failed to connect before the deadline
Actually, I don't know it's a bug or I am using pubsub incorrectly. I found there are some questions on StackOverflow and issues on GitHub:
Retry total timeout exceeded before any response was received in GCP PubSub
https://github.com/googleapis/nodejs-pubsub/issues/770
https://github.com/googleapis/gax-nodejs/issues/741
But didn't find a solution.
Steps to reproduce
I made a minimal code example for reproducing it. Please tell what's going on? Thanks!
Repo: https://github.com/mrdulin/nodejs-gcp/tree/master/src/pubsub/pubsub-emulator
Start pubsub-emulator
gcloud beta emulators pubsub start --project=$PROJECT_ID
Create topic:
npx ts-node ./publisher.ts create pubsub-emulator-t1
Create a subscription for the topic:
npx ts-node ./subscriber.ts create pubsub-emulator-t1 pubsub-emulator-t1-sub
Listen for the messages:
npx ts-node ./subscriber.ts receive pubsub-emulator-t1-sub
Publish 50k messages:
npx ts-node ./publisher.ts publish pubsub-emulator-t1
Additional information
I also got the error: Retry total timeout exceeded before any response was received in production environments. So it may not be a problem of pubsub-emulator
#google/pubsub version: "#google-cloud/pubsub": "^1.6.0"
That's why I am trying to make an example to reproduce it. But can't figure out what's going on.
The only way I can reproduce this error now is to post 50k messages.
Test cases: https://gist.github.com/mrdulin/79f1689a9baaafaef90fcad42646bf6d

Related

How to create subscription using pubsub emulator http API?

After starting the pubsub emulator, I'm trying to create a topic and a subscription using the HTTP API. Creating a topic succeeds, but I cannot figure out why creating a subscription doesn't. Am I doing something wrong or is this a bug in the tool? You can see the logs below:
$ curl -s -X PUT http://localhost:8085/v1/projects/myproject/topics/mytopic
{
"name": "projects/myproject/topics/mytopic"
}
$ curl -s -X PUT http://localhost:8085/v1/projects/myproject/subscriptions/mysub \
--data '{"topic":"projects/myproject/topics/mytopic"}'
Not Found
On the emulator side, I see the following:
# create topic logs
[pubsub] Apr 29, 2020 10:37:19 AM io.gapi.emulators.grpc.GrpcServer$3 operationComplete
[pubsub] INFO: Adding handler(s) to newly registered Channel.
[pubsub] Apr 29, 2020 10:37:19 AM io.gapi.emulators.netty.HttpVersionRoutingHandler channelRead
[pubsub] INFO: Detected non-HTTP/2 connection.
[pubsub] Apr 29, 2020 10:37:19 AM io.gapi.emulators.grpc.GrpcServer$3 operationComplete
[pubsub] INFO: Adding handler(s) to newly registered Channel.
[pubsub] Apr 29, 2020 10:37:19 AM io.gapi.emulators.netty.HttpVersionRoutingHandler channelRead
[pubsub] INFO: Detected HTTP/2 connection.
# create subscription logs
[pubsub] Apr 29, 2020 10:37:27 AM io.gapi.emulators.grpc.GrpcServer$3 operationComplete
[pubsub] INFO: Adding handler(s) to newly registered Channel.
[pubsub] Apr 29, 2020 10:37:27 AM io.gapi.emulators.netty.HttpVersionRoutingHandler channelRead
[pubsub] INFO: Detected non-HTTP/2 connection.
[pubsub] Apr 29, 2020 10:37:27 AM io.gapi.emulators.netty.NotFoundHandler handleRequest
[pubsub] INFO: Unknown request URI: /v1/projects/myproject/subscriptions/mysub
Even though creating a topic works with the above command (no content type necessary), in order to send data with the --data '...' option, you also need to send the content type header. So the following command does work:
$ curl -s -X PUT http://localhost:8085/v1/projects/myproject/subscriptions/mysub \
-H 'content-type: application/json' \
--data '{"topic":"projects/myproject/topics/mytopic"}'

AWS EMR Spark job restarts [AsyncEventQueue: Dropping event from queue appStatus.]

My pyspark job (2 hours, processing 20 GB, writing 40MB) restarts the job even that after a successful run (logs) and the data was written to s3. I tried pyspark 2.3.0, 2.3.1 and emr-5.14.0, emr-5.16.0.
The traceback:
18/08/22 17:45:13 ERROR AsyncEventQueue: Dropping event from queue appStatus. This likely means one of the listeners is too slow and cannot keep up with the rate at which tasks are being started by the scheduler.
18/08/22 17:45:13 WARN AsyncEventQueue: Dropped 1 events from appStatus since Thu Jan 01 00:00:00 UTC 1970.
18/08/22 17:46:28 WARN AsyncEventQueue: Dropped 25523 events from appStatus since Wed Aug 22 17:45:13 UTC 2018.
18/08/22 17:47:28 WARN AsyncEventQueue: Dropped 3417 events from appStatus since Wed Aug 22 17:46:28 UTC 2018.
18/08/22 17:48:28 WARN AsyncEventQueue: Dropped 3669 events from appStatus since Wed Aug 22 17:47:28 UTC 2018.
18/08/22 17:49:28 WARN AsyncEventQueue: Dropped 7725 events from appStatus since Wed Aug 22 17:48:28 UTC 2018.
18/08/22 17:50:28 WARN AsyncEventQueue: Dropped 6609 events from appStatus since Wed Aug 22 17:49:28 UTC 2018.
18/08/22 17:53:44 WARN AsyncEventQueue: Dropped 2272 events from appStatus since Wed Aug 22 17:50:28 UTC 2018.
18/08/22 17:54:39 WARN ShutdownHookManager: ShutdownHook '$anon$2' timeout, java.util.concurrent.TimeoutException
java.util.concurrent.TimeoutException
at java.util.concurrent.FutureTask.get(FutureTask.java:205)
at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:67)
18/08/22 17:54:39 ERROR Utils: Uncaught exception in thread pool-4-thread-1
java.lang.InterruptedException
at java.lang.Object.wait(Native Method)
at java.lang.Thread.join(Thread.java:1252)
at java.lang.Thread.join(Thread.java:1326)
at org.apache.spark.scheduler.AsyncEventQueue.stop(AsyncEventQueue.scala:135)
at org.apache.spark.scheduler.LiveListenerBus$$anonfun$stop$1.apply(LiveListenerBus.scala:219)
at org.apache.spark.scheduler.LiveListenerBus$$anonfun$stop$1.apply(LiveListenerBus.scala:219)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at org.apache.spark.scheduler.LiveListenerBus.stop(LiveListenerBus.scala:219)
at org.apache.spark.SparkContext$$anonfun$stop$6.apply$mcV$sp(SparkContext.scala:1922)
at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1360)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1921)
at org.apache.spark.SparkContext$$anonfun$2.apply$mcV$sp(SparkContext.scala:573)
at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:188)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1991)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:188)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
at scala.util.Try$.apply(Try.scala:192)
at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Found the answer here [1].
tldr;
To resolve this issue, explicitly invoke sparkContext.stop() before
exiting the application.
[1] https://community.hortonworks.com/content/supportkb/208452/warn-shutdownhookmanager-shutdownhook-anon2-timeou.html

Spring integration DSL unable to copy files with Sftp Inbound Adapter in java 1.7

After testing my Sftp inbound flow I found that when my local directory is empty then all files from remote directory are successfully copied to my local directory but if atleast one file is present in the local directory then none of the files getting copied from the remote directory. So can any ony one please provide any pointer to it as I am not able to fix it.
This is my Sftp inbound flow....
#Bean
public IntegrationFlow sftpInboundFlow() {
return IntegrationFlows
.from(Sftp.inboundAdapter(this.sftpSessionFactory)
.preserveTimestamp(true).remoteDirectory(remDir)
.regexFilter(".*\\.txt$")
.localFilenameExpression("#this.toUpperCase()")
.localDirectory(new File(localDir))
.remoteFileSeparator("/"),
new Consumer<SourcePollingChannelAdapterSpec>() {
#Override
public void accept(
SourcePollingChannelAdapterSpec e) {
e.id("sftpInboundAdapter")
.autoStartup(true)
.poller(Pollers.fixedRate(1000)
.maxMessagesPerPoll(1));
}
})
.channel(MessageChannels.queue("sftpInboundResultChannel"))
.get();
}
And this is my Test method..
#Test
public void testSftpInboundFlow() {
Message<?> message = ((PollableChannel) sftpInboundResultChannel).receive(60000);
System.out.println("message====" + message);
}
Please find the log for the same ...........
INFO: Authentication succeeded (password).
enter sftpSessionFactory.....org.springframework.integration.sftp.session.SftpSession#10cea2e
enter sftpInboundFlow.....org.springframework.integration.sftp.session.SftpSession#17ebea9
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor start
INFO: Starting beans in phase -2147483648
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor start
INFO: Starting beans in phase 0
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.EventDrivenConsumer logComponentSubscriptionEvent
INFO: Adding {logging-channel-adapter:_org.springframework.integration.errorLogger} as a subscriber to the 'errorChannel' channel
Jul 16, 2015 6:43:21 PM org.springframework.integration.channel.PublishSubscribeChannel adjustCounterIfNecessary
INFO: Channel 'org.springframework.context.support.GenericApplicationContext#13fb256.errorChannel' has 1 subscriber(s).
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.EventDrivenConsumer start
INFO: started _org.springframework.integration.errorLogger
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor start
INFO: Starting beans in phase 1073741823
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.SourcePollingChannelAdapter start
INFO: started sftpInboundAdapter
Jul 16, 2015 6:43:21 PM org.springframework.integration.file.FileReadingMessageSource receive
INFO: Created message: [GenericMessage [payload=D:\local_copy\Test_sftp.txt, headers={timestamp=1437052401259, id=7e6054ea-9fbc-e9f0-b3e2-9c5dede1556a}]]
the channel is sftpInboundResultChannel
the sftpInboundAdapter is org.springframework.integration.dsl.sftp.SftpInboundChannelAdapterSpec#8df4ff
message====GenericMessage [payload=D:\local_copy\Test_sftp.txt, headers={timestamp=1437052401259, id=7e6054ea-9fbc-e9f0-b3e2-9c5dede1556a}]
Jul 16, 2015 6:43:21 PM org.springframework.context.support.GenericApplicationContext doClose
INFO: Closing org.springframework.context.support.GenericApplicationContext#13fb256: startup date [Thu Jul 16 18:43:10 IST 2015]; root of context hierarchy
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor stop
INFO: Stopping beans in phase 1073741823
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.SourcePollingChannelAdapter stop
INFO: stopped sftpInboundAdapter
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor stop
INFO: Stopping beans in phase 0
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.EventDrivenConsumer logComponentSubscriptionEvent
INFO: Removing {logging-channel-adapter:_org.springframework.integration.errorLogger} as a subscriber to the 'errorChannel' channel
Jul 16, 2015 6:43:21 PM org.springframework.integration.channel.PublishSubscribeChannel adjustCounterIfNecessary
INFO: Channel 'org.springframework.context.support.GenericApplicationContext#13fb256.errorChannel' has 0 subscriber(s).
Jul 16, 2015 6:43:21 PM org.springframework.integration.endpoint.EventDrivenConsumer stop
INFO: stopped _org.springframework.integration.errorLogger
Jul 16, 2015 6:43:21 PM org.springframework.context.support.DefaultLifecycleProcessor stop
INFO: Stopping beans in phase -2147483648
Jul 16, 2015 6:43:21 PM org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler shutdown
INFO: Shutting down ExecutorService 'taskScheduler'

Tomcat7 installed on ubuntu but not responding

I installed tomcat7 manually on my ubuntu 14.04 machine using this website: http://www.krizna.com/ubuntu/install-tomcat-7-ubuntu-14-04/#manual (the manual way).
It is installed correctly but when trying to open the default page using http://server-ip:8080 it is not opening and giving me: This webpage is not available
My log file has no errors:
Jan 14, 2015 11:35:25 PM org.apache.coyote.AbstractProtocol pause
INFO: Pausing ProtocolHandler ["http-bio-8080"]
Jan 14, 2015 11:35:25 PM org.apache.coyote.AbstractProtocol pause
INFO: Pausing ProtocolHandler ["ajp-bio-8009"]
Jan 14, 2015 11:35:26 PM org.apache.catalina.core.StandardService stopInternal
INFO: Stopping service Catalina
Jan 14, 2015 11:35:26 PM org.apache.coyote.AbstractProtocol stop
INFO: Stopping ProtocolHandler ["http-bio-8080"]
Jan 14, 2015 11:35:26 PM org.apache.coyote.AbstractProtocol stop
INFO: Stopping ProtocolHandler ["ajp-bio-8009"]
Jan 14, 2015 11:35:26 PM org.apache.coyote.AbstractProtocol destroy
INFO: Destroying ProtocolHandler ["http-bio-8080"]
Jan 14, 2015 11:35:26 PM org.apache.coyote.AbstractProtocol destroy
INFO: Destroying ProtocolHandler ["ajp-bio-8009"]
Jan 14, 2015 11:42:31 PM org.apache.catalina.core.AprLifecycleListener init
INFO: The APR based Apache Tomcat Native library which allows optimal performance in production environments was not found on the java.library.path: /usr/java/packages$
Jan 14, 2015 11:42:32 PM org.apache.coyote.AbstractProtocol init
INFO: Initializing ProtocolHandler ["http-bio-8080"]
Jan 14, 2015 11:42:32 PM org.apache.coyote.AbstractProtocol init
INFO: Initializing ProtocolHandler ["ajp-bio-8009"]
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.Catalina load
INFO: Initialization processed in 713 ms
Jan 14, 2015 11:42:32 PM org.apache.catalina.core.StandardService startInternal
INFO: Starting service Catalina
Jan 14, 2015 11:42:32 PM org.apache.catalina.core.StandardEngine startInternal
INFO: Starting Servlet Engine: Apache Tomcat/7.0.54
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deploying web application directory /usr/local/apache-tomcat-7.0.54/webapps/docs
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/docs has finished in 533 ms
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deploying web application directory /usr/local/apache-tomcat-7.0.54/webapps/host-manager
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/host-manager has finished in 104 ms
Jan 14, 2015 11:42:32 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deploying web application directory /usr/local/apache-tomcat-7.0.54/webapps/manager
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/manager has finished in 59 ms
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/manager has finished in 59 ms
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deploying web application directory /usr/local/apache-tomcat-7.0.54/webapps/ROOT
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/ROOT has finished in 56 ms
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deploying web application directory /usr/local/apache-tomcat-7.0.54/webapps/examples
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.HostConfig deployDirectory
INFO: Deployment of web application directory /usr/local/apache-tomcat-7.0.54/webapps/examples has finished in 424 ms
Jan 14, 2015 11:42:33 PM org.apache.coyote.AbstractProtocol start
INFO: Starting ProtocolHandler ["http-bio-8080"]
Jan 14, 2015 11:42:33 PM org.apache.coyote.AbstractProtocol start
INFO: Starting ProtocolHandler ["ajp-bio-8009"]
Jan 14, 2015 11:42:33 PM org.apache.catalina.startup.Catalina start
INFO: Server startup in 1262 ms

file upload in JSF using myfaces component

I am creating a JSF application where file uploading functionality is required.I have added all the required jar files in my /WEB-INF/lib folder.
jsf-api.jar
jsf-impl.jar
jstl.jar
standard.jar
myfaces-extensions.jar
commons-collections.jar
commons-digester.jar
commons-beanutils.jar
commons-logging.jar
commons-fileupload-1.0.jar
but still when trying to deploy the application on apache 6.0.29 i am getting the following error.
org.apache.catalina.core.StandardContext
addApplicationListener
INFO: The listener "com.sun.faces.config.ConfigureListener" is already
configured for this context. The duplicate definition has been ignored.
org.apache.catalina.core.StandardContext start
SEVERE: Error listenerStart
PM org.apache.catalina.core.StandardContext start
SEVERE: Context [/jsfApplication] startup failed due to previous errors
org.apache.catalina.loader.WebappClassLoader
clearReferencesJdbc
The web application [/jsfApplication] registered the JBDC driver
[com.mysql.jdbc.Driver] but failed to unregister it when the web
application was stopped. To prevent a memory leak, the JDBC Driver has been
forcibly unregistered.
org.apache.catalina.loader.WebappClassLoader
clearReferencesThreads
SEVERE: The web application [/jsfApplication] appears to have started a thread
named [Timer-0] but has failed to stop it. This is very likely to create a
memory leak.
org.apache.catalina.loader.WebappClassLoader
clearReferencesThreads
SEVERE: The web application [/jsfApplication] appears to have started a thread
named [MySQL Statement Cancellation Timer] but has failed to stop it. This
is very likely to create a memory leak.
log4j:ERROR LogMananger.repositorySelector was null likely due to error in
class reloading, using NOPLoggerRepository.
i am using also using hibernate and spring framework for this application.
please help.
thanks,
Update:
This is the complete error message which I am getting whenever I am adding myFaces-extension.jar file to my /WEB-INF/lib folder.
Using CATALINA_BASE: /home/prt/Desktop/apache-tomcat-6.0.29 Using CATALINA_HOME: /home/prt/Desktop/apache-tomcat-6.0.29 Using CATALINA_TMPDIR: /home/prt/Desktop/apache-tomcat-6.0.29/temp Using JRE_HOME: /usr/jdk1.6.0_20 Using CLASSPATH: /home/prt/Desktop/apache-tomcat-6.0.29/bin/bootstrap.jar 8 Jan, 2011 7:08:54 PM org.apache.catalina.core.AprLifecycleListener init INFO: The APR based Apache Tomcat Native library which allows optimal performance in production environments was not found on the java.library.path: /usr/jdk1.6.0_20/jre/lib/i386/client:/usr/jdk1.6.0_20/jre/lib/i386:/usr/jdk1.6.0_20/jre/../lib/i386:/usr/java/packages/lib/i386:/lib:/usr/lib 8 Jan, 2011 7:08:54 PM org.apache.coyote.http11.Http11Protocol init INFO: Initializing Coyote HTTP/1.1 on http-8080 8 Jan, 2011 7:08:54 PM org.apache.catalina.startup.Catalina load INFO: Initialization processed in 643 ms 8 Jan, 2011 7:08:54 PM org.apache.catalina.core.StandardService start INFO: Starting service Catalina 8 Jan, 2011 7:08:54 PM org.apache.catalina.core.StandardEngine start INFO: Starting Servlet Engine: Apache Tomcat/6.0.29 8 Jan, 2011 7:08:54 PM org.apache.catalina.startup.HostConfig deployDescriptor INFO: Deploying configuration descriptor host-manager.xml 8 Jan, 2011 7:08:55 PM org.apache.catalina.startup.HostConfig deployDescriptor INFO: Deploying configuration descriptor manager.xml 8 Jan, 2011 7:08:55 PM org.apache.catalina.startup.HostConfig deployWAR INFO: Deploying web application archive jsfApplication.war 8 Jan, 2011 7:08:55 PM org.apache.catalina.loader.WebappClassLoader validateJarFile INFO: validateJarFile(/home/prt/Desktop/apache-tomcat-6.0.29/webapps/jsfApplication/WEB-INF/lib/servlet-api.jar) - jar not loaded. See Servlet Spec 2.3, section 9.7.2. Offending class: javax/servlet/Servlet.class 8 Jan, 2011 7:08:55 PM org.apache.catalina.core.StandardContext addApplicationListener INFO: The listener "com.sun.faces.config.ConfigureListener" is already configured for this context. The duplicate definition has been ignored. 8 Jan, 2011 7:08:58 PM org.apache.catalina.core.StandardContext start SEVERE: Error listenerStart 8 Jan, 2011 7:08:58 PM org.apache.catalina.core.StandardContext start SEVERE: Context [/jsfApplication] startup failed due to previous errors 8 Jan, 2011 7:08:58 PM org.apache.catalina.loader.WebappClassLoader clearReferencesJdbc SEVERE: The web application [/jsfApplication] registered the JBDC driver [com.mysql.jdbc.Driver] but failed to unregister it when the web application was stopped. To prevent a memory leak, the JDBC Driver has been forcibly unregistered. 8 Jan, 2011 7:08:58 PM org.apache.catalina.loader.WebappClassLoader clearReferencesThreads SEVERE: The web application [/jsfApplication] appears to have started a thread named [Timer-0] but has failed to stop it. This is very likely to create a memory leak. 8 Jan, 2011 7:08:58 PM org.apache.catalina.loader.WebappClassLoader clearReferencesThreads SEVERE: The web application [/jsfApplication] appears to have started a thread named [MySQL Statement Cancellation Timer] but has failed to stop it. This is very likely to create a memory leak. log4j:ERROR LogMananger.repositorySelector was null likely due to error in class reloading, using NOPLoggerRepository. 8 Jan, 2011 7:08:58 PM org.apache.catalina.startup.HostConfig deployDirectory INFO: Deploying web application directory examples 8 Jan, 2011 7:08:58 PM org.apache.catalina.loader.WebappClassLoader loadClass INFO: Illegal access: this web application instance has been stopped already. Could not load java.net.BindException. The eventual following stack trace is caused by an error thrown for debugging purposes as well as to attempt to terminate the thread which caused the illegal access, and has no functional impact. java.lang.IllegalStateException at org.apache.catalina.loader.WebappClassLoader.loadClass(WebappClassLoader.java:1531) at org.apache.catalina.loader.WebappClassLoader.loadClass(WebappClassLoader.java:1491) at com.mysql.jdbc.CommunicationsException.(CommunicationsException.java:161) at com.mysql.jdbc.MysqlIO.send(MysqlIO.java:2759) at com.mysql.jdbc.MysqlIO.quit(MysqlIO.java:1410) at com.mysql.jdbc.Connection.realClose(Connection.java:4947) at com.mysql.jdbc.Connection.cleanup(Connection.java:2063) at com.mysql.jdbc.Connection.finalize(Connection.java:3403) at java.lang.ref.Finalizer.invokeFinalizeMethod(Native Method) at java.lang.ref.Finalizer.runFinalizer(Finalizer.java:83) at java.lang.ref.Finalizer.access$100(Finalizer.java:14) at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:160) 8 Jan, 2011 7:08:58 PM org.apache.catalina.startup.HostConfig deployDirectory INFO: Deploying web application directory docs 8 Jan, 2011 7:08:58 PM org.apache.catalina.startup.HostConfig deployDirectory INFO: Deploying web application directory ROOT 8 Jan, 2011 7:08:58 PM org.apache.coyote.http11.Http11Protocol start INFO: Starting Coyote HTTP/1.1 on http-8080 8 Jan, 2011 7:08:58 PM org.apache.jk.common.ChannelSocket init INFO: JK: ajp13 listening on /0.0.0.0:8009 8 Jan, 2011 7:08:58 PM org.apache.jk.server.JkMain start INFO: Jk running ID=0 time=0/24 config=null 8 Jan, 2011 7:08:58 PM org.apache.catalina.startup.Catalina start INFO: Server startup in 3905 ms
I got also problems with myfaces-extensions using x tags. The conflict is due to the fact that myfaces is a very old solution and if your other libraries are uptodate the versions won't match. As posted at tomahawk inputfileupload uploaded file is null in my answer you should use tomahawk t tag and libraries related to it. Actual solution is posted in the main answer of that very same link.
I'm pretty sure this will do the thing. Tick my answer if the post included the answer to your question in its wiki-link :)

Resources