Emergency! Error: This socket has been ended by the other party - node.js

I revised appium source code, add my code, when i connect to the port that is forwarded to device and send command to port, it comes out:
Error: This socket has been ended by the other party
and my code is like this:
return await new Promise((resolve, reject) => {
try {
this.socketClient = net.connect(this.webSocket);
// Windows: the socket errors out when ADB restarts. Let's catch it to avoid crashing.
this.socketClient.on('error', (err) => {
if (!this.ignoreUnexpectedShutdown) {
//throw new Error(`Android bootstrap socket crashed: ${err}`);
log.debug('//////////////////////////////////')
log.debug(err)
log.debug('//////////////////////////////////')
throw new Error(`Android testbundle socket crashed: ${err}`)
}
});
this.socketClient.once('connect', () => {
log.info("Android bundle socket is now connected");
resolve();
});
} catch (err) {
reject(err);
}
})
after that, I use this.socketClient to send command like this:
async sendCommand(type, extra = {}) {
if (!this.socketClient) {
log.debug('==========socket closed========')
throw new Error('Socket connection closed unexpectedly');
}
return await new B((resolve, reject) => {
let cmd = Object.assign({cmd: type}, extra);
let cmdJson = `${JSON.stringify(cmd)}\n`;
log.debug(`Sending command to android testbundle: ${_.trunc(cmdJson, 1000).trim()}`);
this.socketClient.write(cmdJson);
this.socketClient.setEncoding('utf8');
let streamData = '';
this.socketClient.on('data', (data) => {
try {
streamData = JSON.parse(streamData + data);
// we successfully parsed JSON so we've got all the data,
// remove the socket listener and evaluate
this.socketClient.removeAllListeners('data');
if (streamData.status === 0) {
resolve(streamData.value);
}
log.debug("Received command result from bundle:" + JSON.stringify(streamData));
reject(errorFromCode(streamData.status));
} catch (ign) {
log.debug("Stream still not complete, waiting");
streamData += data;
}
})
})
}
But, I always get the error:
[debug] [bundle] //////////////////////////////////
[debug] [bundle] Error: This socket has been ended by the other party
at Socket.writeAfterFIN [as write] (net.js:291:12)
at ..\../lib/bundle.js:160:31
Anyone can help me...

Related

Why does this behaviour with nodejs and child process happen?

What am I trying to do?
I want to clone multiple git repositories from nodejs.
How do I achieve that?
First I have this method in a separate file:
const exec = require("child_process").exec;
const { printError } = require("./printers");
function execute(command) {
console.log(`Running command: ${command}`);
let result = "";
let savedError = undefined;
return new Promise((resolve, reject) => {
const proc = exec(command, function (error, stdout, stderr) {
savedError = savedError || stderr || error;
result += stdout;
});
proc.on("close", (code) => {
if (code !== 0) {
console.error(
[
"================================================================",
`Command ${command} failed.`,
`Status code: ${code}`,
`Error message: ${savedError}`,
`Output: ${result}`,
"================================================================",
"",
].join("\n")
);
reject(result);
} else {
resolve(result);
}
});
});
Then I use it in my main module (code is abbreviated and bit simplified just so you can get the point:
const repoUrls = ["url1", "url2"]; // imagine these are real urls
async function main() {
const copyOfUrls = [...repoUrls];
while (copyOfUrls.length) {
try {
await execute(
`git clone ${copyOfUrls[0]} repositories/${repoUrlFileFriendly(
copyOfUrls[0]
)}`
);
console.log('fun fact - this console log never happens');
copyOfUrls.shift()
} catch (error) {
console.error("Failed to clone, see error:", error);
}
}
}
What is the problem?
Well, code works BUT after cloning first repo, the process exits (both the child process and the main one) and second repository is never even attempted to be cloned. (note the "console.log('fun fact - this console log never happens');").

Cant load protobuf message with protobuf.js

Im trying to use proto messages with protobuf.js, encode them and send them to a RabbitMQ message broker. I have the following sub-folders inside my project:
- model
- protos
- transactions.proto
- RabitMQ.js
- routes
- rmq-api.js
I added a route which does the following(Using express) in the rmq-api.js file:
const RabbitMQ = require('../model/RabbitMQ');
router.post('/api/transactions' ,function (req,res,next) {
RabbitMQ.PublishTransactionsMessage(DummyMessage).then(() => {
res.status(200).send({message: "OK :)"});
}).catch((e) => {
res.status(500).send({error:e.message});
});
});
In the RabitMQ.js file I have the following code:
module.exports = {
PublishTransactionsMessage: function(message) {
return new Promise((resolve, reject) => {
amqp.connect(RabbitMQConfig.url, function (error, connection) {
if (error) {
console.error("Could not connect to the rabbit message broker on {0} - " +
"Check connection please and try again".format(RabbitMQConfig.url));
console.error("Error message - {0}".format(error));
reject(error)
}
connection.createChannel(function(error, channel) {
if (error) {
console.error("Could Create channel - {0}".format(error.message));
reject(error)
}
const queue = RabbitMQConfig.queue;
channel.assertQueue(queue, {
durable: true
});
// Convert Message to protobuff
protobuf.load("./protos/transactions.proto").then((err, root) => {
if (err) {
reject(err);
}
let ScraperMessageResult = root.lookupType("transactions.ScraperMessageResult");
const errMsg = ScraperMessageResult.verify(message);
if (errMsg)
reject(errMsg);
let buffer = ScraperMessageResult.encode(message).finish();
channel.sendToQueue(queue, buffer);
console.log(`Sent ${message} to queue: ${queue}`);
resolve()
}).catch((err) => {
reject(err);
});
});
});
});
},
};
In the code shown above in the line:
protobuf.load("./protos/transactions.proto").then((err, root) => {
I keep catching the following error:
Inside this catch block:
}).catch((err) => {
reject(err);
});
This seems like a pretty simple problem however I havent found anything on this online so I might be missing something really simple here.
P.S. I tried using __dirname + "/protos/transaction.proto" and still couldnt get this to work.
Please help me figure this out.
The problem is that your function doesn't look at the dist directory,
await protocolBuffer.load(__dirname + '/item.proto'); should look there,
and also you need to copy the file manuly by using copy/cp command, you can add it as part of the package json script like so:
"build": "nest build && COPY src\\reading_list\\protocol_buffer\\*.proto dist\\reading_list\\protocol_buffer\\"

How to fix my stream/writing data closing really slow with NodeJS and promise-ftp

I have to download a json file from a ftp server. Getting the file goes really fast but then the read/writing with stream closes really slow. The file is on my desktop almost instantly but its not closing.
I've tryed promise-ftp & just ftp. I updated my nodejs and node to the latest version so I tryed multiple version.
const getFile = () => {
return new Promise((resolve, reject) => {
console.log('getting file.');
ftp.connect(config)
.then(function (serverMessage) {
return ftp.get(remoteFile + '.json');
}).then(function (stream) {
return new Promise(function (resolve, reject) {
stream.once('close', resolve);
stream.once('error', reject);
stream.pipe(fs.createWriteStream(`${steamid}.json`));
});
}).then(async function () {
console.log('done editing');
await ftp.end();
return resolve();
}).catch(console.log);
});
};
I expect this to download and close in like 1 second. It takes a solid 10 seconds to close the file but I can already see it instantly on my pc and read it. Its about 700kb for 1 file.
EDIT:
The FTP connection closes instantly but the stream.pipe(fs.createWriteStream(${steamid}.json)); takes 10 seconds to call the 'close' event eventhough the file looks fully downloaded instantly.
EDIT 2:
There is a gap of 10 seconds after the 680 (thats the chunk.length) and then the Write stream is finished.
Its like it doesn't know there isn't more data coming.
EDIT 3:
So the files I download are around 700 bytes, its just 1 chunk maximum everytime. I fixed it by adding a "data" event on the readstream and after the 1 chunk I just destroy it. My whole download, edit, upload works in less than a second now. This isn't probably an ideal solution.
Based on the promise-ftp npm docs, if you want to immidiately close the FTP connection you should use the following method:
destroy(): Closes the connection to the server immediately. Returns a boolean indicating whether the connection was connected prior to the call to destroy().
In your case that would be:
const getFile = () => {
return new Promise((resolve, reject) => {
console.log('getting file.');
ftp.connect(config)
.then(function (serverMessage) {
return ftp.get(remoteFile + '.json');
}).then(function (stream) {
return new Promise(function (resolve, reject) {
stream.once('close', resolve);
stream.once('error', reject);
stream.pipe(fs.createWriteStream(`${steamid}.json`));
});
}).then(async function () {
console.log('done editing');
await ftp.destroy();
return resolve();
}).catch(console.log);
});
};
I know this post is old but I have the same issue.
And finally I took up Marcel's idea. Just improved by looking at the size of the downloaded file.
var ftp = require('ftp'); // Client FTP
var fs = require('fs'); // Gestion Fichier
// Initialization
var distantFile = 'example.dat';
var localFile = distantFile;
var downloadedSize = 0;
var fileSize = 0;
var clientFtp = new ftp();
clientFtp.on('ready', function() {
clientFtp.list(distantFile,function(errotList, listeElementDistant) {
if(!errotList)
{
fileSize = listeElementDistant[0].size;
clientFtp.get(distantFile, function(errotGet, stream) {
if(!errotGet)
{
stream.pipe(fs.createWriteStream(localFile));
stream.on('data',function(buffer){
downloadedSize += buffer.length;
console.log(downloadedSize+' '+fileSize+' '+(downloadedSize/fileSize*100).toFixed(1)+'%');
if(downloadedSize==fileSize)
{
clientFtp.end();
stream.destroy();
console.log('Download Complete');
}
});
stream.on('close', function(){
clientFtp.end();
});
}
else
{
console.log('Error Get : '+errotGet);
}
});
}
else
{
console.log('Error List : '+errotList);
}
});
});
clientFtp.connect(/* You Must Define Options Connections */);

Node net socket.end event fires twice?

This event is firing twice. I'm trying to figure out why.
On one client, I have:
import Net from 'net';
import Chalk from 'chalk';
const fallback = [2,5,10,25,50,100,250,500,1000,2000];
class LocalNetworkInterface {
constructor({path}) {
this._sock = new Net.Socket();
this._pending = {};
this._count = 0;
this._retry = 0;
const connect = () => {
this._sock.connect({path});
};
this._sock.on('connect',() => {
this._retry = 0;
console.log(`Connected to ${Chalk.underline(path)}`);
});
this._sock.on('data',buffer => {
let data = JSON.parse(buffer);
this._pending[data.queryId].resolve(data);
delete this._pending[data.queryId];
});
this._sock.on('end', () => {
console.log(`Lost connection to ${Chalk.underline(path)}. Attempting to reconnect...`);
connect();
});
this._sock.on('error', err => {
if(err.code === 'ENOENT') {
let ms = fallback[this._retry];
if(this._retry < fallback.length - 1) ++this._retry;
console.log(`Socket server unavailable. Trying again in ${ms}ms`);
setTimeout(connect, ms);
}
});
connect();
}
// ...
}
And the server:
const sockServer = Net.createServer(c => {
c.on('data', buffer => {
let data = JSON.parse(buffer);
// log('Received',data);
let ql = queryLogger();
runQuery(Object.assign({}, data, {schema})).then(result => {
ql(`${Chalk.magenta('socket')} ${print(data.query).trim()}`);
let response = Object.assign({}, result, {queryId: data.queryId});
c.write(JSON.stringify(response));
});
})
});
sockServer.on('error', serverError => {
if(serverError.code === 'EADDRINUSE') {
let clientSocket = new Net.Socket();
clientSocket.on('error', clientError => {
if(clientError.code === 'ECONNREFUSED') {
FileSystem.unlink(SOCK_FILE, unlinkErr => {
if(unlinkErr) throw unlinkErr;
sockServer.listen(SOCK_FILE, () => {
log(`Sock server improperly shut down. Listening on '${sockServer.address()}'`)
});
});
}
});
clientSocket.connect({path: SOCK_FILE}, () => {
throw new Error(`Server already running`);
});
}
});
['SIGTERM','SIGINT'].forEach(signal => process.on(signal, () => {
console.log(`\rReceived ${Chalk.yellow(signal)}, shutting down ${Chalk.red('❤')}`);
sockServer.close();
process.exit();
}));
sockServer.listen(SOCK_FILE, () => {
log(`Listening on ${Chalk.underline(sockServer.address())}`)
});
When I restart the server, I see "Lost connection" twice on the client. Why?
The documentation says:
Emitted when the other end of the socket sends a FIN packet.
The server isn't sending two "FIN" packets is it? Any way I can verify?
Seeing this in docs in regard to connect...
"...This function is asynchronous. When the 'connect' event is emitted the socket is established. If there is a problem connecting, the 'connect' event will not be emitted, the 'error' event will be emitted with the exception."
The fact that the connect event might simply not be firing simply making it look to you like the end event fired twice? Like #robertklep said, maybe expand that error check for more than specific code.
I think it's because on end, I immediately try to reconnect and then the same event is being caught again. Seems kind of strange that it would do that, but delaying it to the next tick works:
this._sock.on('end', () => {
console.log(`${Chalk.yellow('Lost connection')} to ${Chalk.underline(path)}. Attempting to reconnect...`);
process.nextTick(connect);
});

Catching ECONNRESET error from spawned Node process

I am using the sftps NPM to connect to a SFTP server using username/password authentication and upload a file. This works beautifully when successful; however, if provided invalid authentication information, after several minutes it emits an ECONNRESET error which crashes my entire application.
Looking at the source of the sftps module, it appears to use child_process.spawn to run the shell commands, and looks like it should be capturing any errors gracefully:
var sftp = spawn(shellCmd, shellOpts);
var data = "";
var error = "";
sftp.stdout.on('data', function (res) {
data += res;
});
sftp.stderr.on('data', function (res) {
error += res;
});
function finished(err) {
error = error.split('\n').filter(function(line) {
if (/^Connected to /.test(line)) return false;
return true;
}).join('\n');
data = data.split('\n').filter(function(line) {
if (/^sftp> /.test(line)) return false;
return true;
}).join('\n');
if (callback) {
if (err) {
callback(err, { error: error || null, data: data });
} else if (error) {
callback(null, { error: error, data: data });
} else {
callback(null, { error: null, data: data });
}
callback = null;
}
}
sftp.on('error', finished);
sftp.on('exit', function (code) {
if (code === 0) {
finished();
} else {
finished('Nonzero exit code: ' + code);
}
});
sftp.stdin.write(cmdString, 'utf8');
It looks like it has hooked into the stderr stream as well as capturing the error and exit events on the sub-process.
What else can I do to capture this ECONNRESET error and log it without killing the application?
Don't know if you're having the same problem that I was having, but I eventually solved it by attaching an event to stdin.on("error"). You could try this to see if that's the problem:
sftp.stdin.on("error", function (e)
{
console.log("STDIN ON ERROR");
console.log(e);
});

Resources