How do I close fs.createWriteStream? - node.js

I am trying to use the node module vtt2srt to convert a VTT string to SRT file and save the output. It works once, and my subtitles are saved correctly, but if I hit the endpoint a second time node crashes with this error:
Error: write after end
at writeAfterEnd
I have tried all combinations of .close .on('close')
I send a unique vid and the VTT data from the frontend
router.post('/downloadsubs', function(req,res,next) {
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt')
srtStream.end()
srtStream.pipe(writestream)
res.send(req.body.vid);
})
})

I worked out what my problem was, I hope it can be useful to someone else one day.
Previously I was requiring my module at the head of my router file:
const vtt2srt = require('node-vtt-to-srt');
const srtStream = vtt2srt();
router.post('/downloadsubs', function(req,res,next) {
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
srtStream.end()
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt');
srtStream.pipe(writestream)
writestream.on('finish', function () { res.send(req.body.vid) });
})
})
Now, instead, I am creating a new srtStream in the router method:
const vtt2srt = require('node-vtt-to-srt');
router.post('/downloadsubs', function(req,res,next) {
var srtStream = vtt2srt();
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
srtStream.end()
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt');
srtStream.pipe(writestream)
writestream.on('finish', function () { res.send(req.body.vid) });
})
})
And it works.

Related

Video compression ffmpeg node js in multiple user?

i have one query i am using ffmpeg packaege in node js to compress video in server side. but my question is when multiple user like 100k upload video in same time so it will work compression then upload part. currently have check 2 or 3 user it is working fine. Thanks Advance
const ffmpeg = require('fluent-ffmpeg');
const ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path);
const fs = require('fs');
// var filename = 'videos/big_buck_bunny_480p_10mb.mp4';
(function () {
var ffmpeg = require('fluent-ffmpeg');
function baseName(str) {
var base = new String(str).substring(str.lastIndexOf('/') + 1);
if(base.lastIndexOf(".") != -1) {
base = base.substring(0, base.lastIndexOf("."));
}
return base;
}
var args = process.argv.slice(2);
args.forEach(function (val, index, array) {
var filename = val;
var basename = baseName(filename);
console.log(index + ': Input File ... ' + filename);
ffmpeg(filename)
// .output(basename + '-720x720_8.mp4')
// .videoCodec('libx264')
// //.noAudio()
// // .aspect("1:1")
// //.size('720x720')
// .videoFilters('crop=540:960:540:960')
// .videoFilters('crop=540:960')
//.noAudio()
//.aspect("1:1")
//.aspectRatio("16:9")
//.size('720x720')
//.videoFilters('crop=720:720:580:1000')
//.addOption('-vf scale=-2:720')
//.videoFilters('crop=720:720:0:0.21')
//.addOption('-vf setdar=1')
//.videoFilters('crop=720:720:-540:960')
//.preset('divx')
.output(basename + '-720x720_22.mp4')
.videoCodec('libx264')
.addOption('-vf scale=720:-2') // landscap
//.addOption('-vf scale=-2:720') // portrait
//.addOption('-vf scale=480:480') // rectangle
.on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.on('progress', function(progress) {
console.log('... frames: ' + progress.frames);
})
.on('end', function() {
console.log('Finished processing');
})
.run();
});
})();

Stop nodejs child_process with browser api call

I have vue (axios) making a get call to an express route which triggers a child_process of ffmpeg in an infinite loop. ffmpeg streams one file over udp , on close it re calls itself and streams another file.
I'd like to be able to kill this process from a button on a web page, but can't seem to work it out.
This is my express route code
router.get('/test', function(req, res) {
const childProcess = require('child_process');
const fs = require('fs')
const path = require('path')
//Grabs a random index between 0 and length
function randomIndex(length) {
return Math.floor(Math.random() * (length));
}
function Stream () {
const FILE_SRC = '/path/to/file'
//Read the directory and get the files
const dirs = fs.readdirSync(FILE_SRC)
.map(file => {
return path.join(FILE_SRC, file);
});
const srcs_dup = [];
const hashCheck = {}; //used to check if the file was already added to srcs_dup
var numberOfFiles = dirs.length - 1; //OR whatever # you want
console.log(numberOfFiles)
//While we haven't got the number of files we want. Loop.
while (srcs_dup.length < numberOfFiles) {
var fileIndex = randomIndex(dirs.length-1);
//Check if the file was already added to the array
if (hashCheck[fileIndex] == true) {
continue; //Already have that file. Skip it
}
//Add the file to the array and object
srcs_dup.push(dirs[fileIndex]);
hashCheck[fileIndex] = true;
}
var chosen = "'" + srcs_dup[0] + "'"
var call = "ffmpeg -re -i " + chosen + " -content_type audio/mpeg -f mp3 udp://224.1.2.3:1234"
const stop = childProcess.exec(call, { shell: true });
stop.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
stop.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
stop.on('close', (code) => {
console.log ('child exited with code ' + code)
Stream();
});
stop.on('error', function(err) {
console.log('sh error' + err)
});
}

Download a file from S3 to Local machine

I am trying to download an audio(mp3) file from AWS S3 to local computer. It works fine when I execute on local host, but after after deploying same code onto AWS. It's downloading files to server machine instead of User's local machine.
Tried these two versions. Both are doing in same way
Version 1:
const key = track.audio_transcode_filename.substring(20);
var s3Client = knox.createClient(envConfig.S3_BUCKET_TRACKS);
const os = require('os');
const downloadPath = os.homedir().toString();
const config =require('../../config/environment');
const fs = require('fs');
var filePath=downloadPath + "\\Downloads\\" + track.formatted_title + ".mp3";
if (fs.existsSync(filePath)) {
var date = new Date();
var timestamp = date.getTime();
filePath=downloadPath + "\\Downloads\\" + track.formatted_title + "_" + timestamp + ".mp3";
}
const file = fs.createWriteStream(filePath);
s3Client.getFile(key, function(err, res) {
res.on('data', function(data) { file.write(data); });
res.on('end', function(chunk) { file.end(); });
});
version 2:
var audioStream = '';
s3Client.getFile(key, function(err, res) {
res.on('data', function(chunk) { audioStream += chunk });
res.on('end', function() { fs.writeFile(filePath + track.formatted_title + ".mp3", audioStream, 'binary')})
});
Thanks,
Kanth
Instead of getting the file and sending to client again, how about getting the url of the file and redirecting the client?
Something like:
s3Client.getResourceUrl(key, function(err, resourceUrl) {
res.redirect(resourceUrl);
)};
You'll need to send it to the user. So, I think you have an expressJS and the user can get the element using your API endpoint.
After all you have done in your question, you will need to send it to the user.
res.sendFile('/path/to/downloaded/s3/object')
Thank you both #Rashomon and #Martin do santos.
I'had to add client side script to read response stream and download file in the following way
downloadTrack(track).then((result) =>{
//var convertedBuffer = new Uint8Array(result.data);
const url = window.URL.createObjectURL(new Blob([result.data],{type: 'audio/mpeg'}));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', track.formatted_title + '.mp3');
document.body.appendChild(link);
link.click();
}, (error) =>{
console.error(error);
})

Downloading an audio file with Express API and ytdl

I'm trying to download a Youtube video audio using the ytdl-core module (https://github.com/fent/node-ytdl-core).
I wrote an API using Express which lets me download an audio by its URL:
app.get('/api/downloadYoutubeVideo', function (req, res) {
res.set('Content-Type', 'audio/mpeg');
var videoUrl = req.query.videoUrl;
var videoName;
ytdl.getInfo(videoUrl, function(err, info){
videoName = info.title.replace('|','').toString('ascii');
res.set('Content-Disposition', 'attachment; filename=' + videoName + '.mp3');
});
var videoWritableStream = fs.createWriteStream('C:\\test' + '\\' + videoName); // some path on my computer (exists!)
var videoReadableStream = ytdl(videoUrl, { filter: 'audioonly'});
var stream = videoReadableStream.pipe(videoWritableStream);
});
The problem is that when I call this API I get a 504 error from my server.
I want to be able to save this downloaded audio on my local disk.
Help would be appreciated. Thank you
Well for some reason the videoName was undefined so it messed up my function...
Here is the correct code after a few changes and adding the destination path as query variable.
app.get('/api/downloadYoutubeVideo', function (req, res) {
var videoUrl = req.query.videoUrl;
var destDir = req.query.destDir;
var videoReadableStream = ytdl(videoUrl, { filter: 'audioonly'});
ytdl.getInfo(videoUrl, function(err, info){
var videoName = info.title.replace('|','').toString('ascii');
var videoWritableStream = fs.createWriteStream(destDir + '\\' + videoName + '.mp3');
var stream = videoReadableStream.pipe(videoWritableStream);
stream.on('finish', function() {
res.writeHead(204);
res.end();
});
});
});

On which format send file to save it on gridfs?

Hy every one,
Please , i 'm study on a project using nodeJS, and i would like to know , in which format my node client must send the file to the server ( is it in base64 format or else ?).
my client is :
//client.js
$('#file').on('change', function(e){
encode64(this);
});
function encode64(input) {
if (input.files){
chap.emit('test', { "test" : input.files[0] });
var FR= new FileReader();
FR.readAsDataURL(input.files[0]);
FR.onload = function(e) {
chap.emit('test', { "test" : e.target.result } );
}
}
}
My server side is :
socket.on('test', function(e){
var gs = new gridStore(db, e.test,"w");
gs.writeFile(new Buffer(e.test,"base64"), function(err,calb){
if (!err)
console.log('bien passe');
else
console.log('erreur');
});
});
But this doesn't work , i get this error :
TypeError: Bad argument
at Object.fs.fstat (fs.js:667:11)
Any one could help me ?
Normally this is how you store into gridFs . I have used it to store files. hope it works.
fs = require('fs'),
var gfs = require('gridfs-stream');
var form = new multiparty.Form();
form.parse(req, function (err, fields, files) {
var file = files.file[0];
var filename = file.originalFilename; //filename
var contentType = file.headers['content-type'];
console.log(files)
var tmpPath = file.path ;// temporary path
var writestream = gfs.createWriteStream({filename: fileName});
// open a stream to the temporary file created by Express...
fs.createReadStream(tmpPath)
// and pipe it to gfs
.pipe(writestream);
writestream.on('close', function (file) {
// do something with `file`
res.send(value);
});
})

Resources