I need combine two streams in parallel way. I want to use these streams to write into two files some generated information.
I have done the first stream (stream1). It looks like this:
'use strict';
var fs = require('fs');
var stream1 = fs.createWriteStream("random1.txt");
let ln = 1000000;
let j = 0;
function generateRandom()
{
var reslt = true;
do {
ln--;
j++;
if (ln === 0) {
stream1.write(...write randon number...);
stream1.end();
} else {
reslt = stream1.write(...write randon number...);
}
} while (ln > 0 && reslt);
if (ln > 0) {
stream1.once('drain', generateRandom);
}
}
stream1.on('error', (err) => {throw err;});
var stream1Promise = new Promise((resolve, reject)=>{
try {
generateRandom();
});
} catch (error) {
reject(error);
}
});
stream1Promise
.then(function(callback) {
if (typeof callback === 'function') {
callback();
}
}, function(error) {
console.log("Error occurred: ", error);
stream1.end();
});
But I don't understand how to add another one stream stream2 so that it writes different random information to another file.
I've tried to use process.nextTick in callback of stream1.write to switch my streams, but I get all time "FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - process out of memory" error.
Could anyone help me?
Thanks.
Related
What am I trying to do?
I want to clone multiple git repositories from nodejs.
How do I achieve that?
First I have this method in a separate file:
const exec = require("child_process").exec;
const { printError } = require("./printers");
function execute(command) {
console.log(`Running command: ${command}`);
let result = "";
let savedError = undefined;
return new Promise((resolve, reject) => {
const proc = exec(command, function (error, stdout, stderr) {
savedError = savedError || stderr || error;
result += stdout;
});
proc.on("close", (code) => {
if (code !== 0) {
console.error(
[
"================================================================",
`Command ${command} failed.`,
`Status code: ${code}`,
`Error message: ${savedError}`,
`Output: ${result}`,
"================================================================",
"",
].join("\n")
);
reject(result);
} else {
resolve(result);
}
});
});
Then I use it in my main module (code is abbreviated and bit simplified just so you can get the point:
const repoUrls = ["url1", "url2"]; // imagine these are real urls
async function main() {
const copyOfUrls = [...repoUrls];
while (copyOfUrls.length) {
try {
await execute(
`git clone ${copyOfUrls[0]} repositories/${repoUrlFileFriendly(
copyOfUrls[0]
)}`
);
console.log('fun fact - this console log never happens');
copyOfUrls.shift()
} catch (error) {
console.error("Failed to clone, see error:", error);
}
}
}
What is the problem?
Well, code works BUT after cloning first repo, the process exits (both the child process and the main one) and second repository is never even attempted to be cloned. (note the "console.log('fun fact - this console log never happens');").
when I read more than one file it will be stuck and also hang my pc. I need to restart my pc
on one file or 5 files it will work perfectly but not more than 5 files
if anyone know this issue let me know
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path
const ffmpeg = require('fluent-ffmpeg')
ffmpeg.setFfmpegPath(ffmpegPath)
const testFolder = './videos/';
const fs = require('fs');
fs.readdir(testFolder, async(err, files) => {
try {
for(let i = 0; i < 10; i++){
if(files[i] != '1 Surah Fatiha Dr Israr Ahmed Urdu - 81of81.mp4'){
let converter = await ffmpeg(`./videos/${files[i]}`)
await converter.setStartTime('00:00:00').setDuration('30').output(`./outputfolder/${files[i]}`).on('end', function(err) {
if(err) {
console.log(`err durinng conversation \n ${err}`)
}
else{
console.log(`Done ${files[i]}`);
}
}).on('error', function(err){
console.log(`error: ${files[i]}`, err)
}).run()
}
}
} catch (error) {
console.log(error)
}
});
Your computer is crashing because fluent-ffmpeg is not asynchronous, this makes your code run in a loop running ffmpeg several times without waiting for the previous video to complete processing, consequently consuming all your processing power.
I created an asynchronous function called renderVideo and changed the for loop to the for of loop, as only it is able to wait for an asynchronous function.
The code looked like this:
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path
const ffmpeg = require('fluent-ffmpeg')
ffmpeg.setFfmpegPath(ffmpegPath)
const fs = require('fs');
const testFolder = './videos/';
fs.readdir(testFolder, async(err, videos) => {
try {
for(let video of videos){
if(video != '1 Surah Fatiha Dr Israr Ahmed Urdu - 81of81.mp4'){
await renderVideo(video)
}
}
} catch (error) {
console.log(error)
}
function renderVideo(video){
return new Promise((resolve, reject)=>{
let converter = ffmpeg(`./videos/${video}`)
converter
.setStartTime('00:00:00')
.setDuration('30')
.output(`./outputfolder/${video}`)
.on('end', (done)=> {
resolve(done)
})
.on('error', (err)=> {
reject(`The video ${video} return with error: ${err}`)
})
.run()
})
}
})
I also changed the names of some variables to make sense in the current code.
I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)
I'm having problems handling a file list with async.map. When I pass a list larger than 2045 items, the code give a error. And there is no problem of the actual file because on manual runs and debuging with a few files it runs ok. Maybe its not the best code example that I have, but I'm in learning process
var insertInDb = function (err, book_data) {
count = function(book, cb){
book_db.findOne({identifier:book['identifier']}, function (err, docs) {
if (docs !== null) {
cb('Book exists already',null)
}else{
book_db.insert(book, function(err){
cb(err,book)
})
}
})
}
async.map(book_data, count)
};
var epubData = function (epub, nextEpub) {
var book_data = {};
epubParser.open(epub, function (err, epub_data) {
console.log(epub);
for (var i of epub_data.easy['simpleMeta']) { // <-- ERROR! TypeError: Cannot read property 'easy' of undefined
for (var attrname in i) {
if (attrname.indexOf('dc:') !== -1) {
book_data[attrname.split(':')[1]] = i[attrname];
book_data['file'] = epub;
}
}
}
nextEpub(err, book_data)
})
};
async.map(full_files_path.slice(0, 2045), epubData, insertInDb);
I am using the sftps NPM to connect to a SFTP server using username/password authentication and upload a file. This works beautifully when successful; however, if provided invalid authentication information, after several minutes it emits an ECONNRESET error which crashes my entire application.
Looking at the source of the sftps module, it appears to use child_process.spawn to run the shell commands, and looks like it should be capturing any errors gracefully:
var sftp = spawn(shellCmd, shellOpts);
var data = "";
var error = "";
sftp.stdout.on('data', function (res) {
data += res;
});
sftp.stderr.on('data', function (res) {
error += res;
});
function finished(err) {
error = error.split('\n').filter(function(line) {
if (/^Connected to /.test(line)) return false;
return true;
}).join('\n');
data = data.split('\n').filter(function(line) {
if (/^sftp> /.test(line)) return false;
return true;
}).join('\n');
if (callback) {
if (err) {
callback(err, { error: error || null, data: data });
} else if (error) {
callback(null, { error: error, data: data });
} else {
callback(null, { error: null, data: data });
}
callback = null;
}
}
sftp.on('error', finished);
sftp.on('exit', function (code) {
if (code === 0) {
finished();
} else {
finished('Nonzero exit code: ' + code);
}
});
sftp.stdin.write(cmdString, 'utf8');
It looks like it has hooked into the stderr stream as well as capturing the error and exit events on the sub-process.
What else can I do to capture this ECONNRESET error and log it without killing the application?
Don't know if you're having the same problem that I was having, but I eventually solved it by attaching an event to stdin.on("error"). You could try this to see if that's the problem:
sftp.stdin.on("error", function (e)
{
console.log("STDIN ON ERROR");
console.log(e);
});