Video compression ffmpeg node js in multiple user? - node.js

i have one query i am using ffmpeg packaege in node js to compress video in server side. but my question is when multiple user like 100k upload video in same time so it will work compression then upload part. currently have check 2 or 3 user it is working fine. Thanks Advance
const ffmpeg = require('fluent-ffmpeg');
const ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path);
const fs = require('fs');
// var filename = 'videos/big_buck_bunny_480p_10mb.mp4';
(function () {
var ffmpeg = require('fluent-ffmpeg');
function baseName(str) {
var base = new String(str).substring(str.lastIndexOf('/') + 1);
if(base.lastIndexOf(".") != -1) {
base = base.substring(0, base.lastIndexOf("."));
}
return base;
}
var args = process.argv.slice(2);
args.forEach(function (val, index, array) {
var filename = val;
var basename = baseName(filename);
console.log(index + ': Input File ... ' + filename);
ffmpeg(filename)
// .output(basename + '-720x720_8.mp4')
// .videoCodec('libx264')
// //.noAudio()
// // .aspect("1:1")
// //.size('720x720')
// .videoFilters('crop=540:960:540:960')
// .videoFilters('crop=540:960')
//.noAudio()
//.aspect("1:1")
//.aspectRatio("16:9")
//.size('720x720')
//.videoFilters('crop=720:720:580:1000')
//.addOption('-vf scale=-2:720')
//.videoFilters('crop=720:720:0:0.21')
//.addOption('-vf setdar=1')
//.videoFilters('crop=720:720:-540:960')
//.preset('divx')
.output(basename + '-720x720_22.mp4')
.videoCodec('libx264')
.addOption('-vf scale=720:-2') // landscap
//.addOption('-vf scale=-2:720') // portrait
//.addOption('-vf scale=480:480') // rectangle
.on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.on('progress', function(progress) {
console.log('... frames: ' + progress.frames);
})
.on('end', function() {
console.log('Finished processing');
})
.run();
});
})();

Related

How to zip a single file with Archiver

I am trying to zip a single file using the Archiver npm package located: https://www.npmjs.com/package/archiver
I have been able to use the following to zip a directory:
archive.directory(folderName, false);
But when I try to use either of these nothing seems to happen (ie: no zip is generated, file never finishes zipping):
archive.file(folderName, { name: 'file4.txt' });
archive.file(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
Has anyone run into this issue before? Please let me know what I am doing wrong. Thank you in advance!
edit:
module.exports = async function zipper(user, pass, orgid, s4url, apiToken, newOrgName, file) {
const s4 = require('../testcli/s4');
const fs = require('fs');
const archiver = require('archiver');
const path = require('path');
var parentDirect;
if(file == "./"){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
const newZipFile = parentDirect + '/s4.zip';
var folderName = file;
//Checks for existence of infinite loop
if(path.resolve(parentDirect).length > path.resolve(folderName).length){
console.log(folderName.search(parentDirect));
console.error('\x1b[36m%s\x1b[0m', 'ERROR!!!! : Please adjust where your console is pointed, this will result in an infinite loop. Exiting.');
return;
}
var P = ['\\', '|', '/', '-'];
var x = 0;
var output = fs.createWriteStream(newZipFile);
var archive = archiver('zip');
scansdisplayinterval = setInterval(function () {
twrl();
}, 250);
// listen for all archive data to be written
output.on('close', function () {
console.log('\x1b[36m%s\x1b[0m', archive.pointer() + ' total bytes');
console.log('\x1b[36m%s\x1b[0m', 'archiver has been finalized and the output file descriptor has closed.');
try {
process.stdout.write(newZipFile);
clearInterval(scansdisplayinterval);
s4(user, pass, newZipFile, orgid, s4url, apiToken, newOrgName);
} catch (e) {
console.log(e);
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
throw err;
});
// This event is fired when the data source is drained no matter what was the data source.
output.on('end', function() {
console.log('\x1b[36m%s\x1b[0m', 'Data has been drained');
});
// pipe archive data to the file
archive.pipe(output);
//Checks -f for file extension
let singleFileCheck = path.extname(file);
//If file has extension
if(singleFileCheck.length <= 4 && singleFileCheck != ''){
//Append single file
console.log('singleFile', path.resolve(file));
archive.file(path.resolve(file), { name: 'file4.txt' });
// archive.append(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
//Else = folder
}else{
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(folderName, false);
}
// archive.directory(folderName, false);
console.log('\x1b[36m%s\x1b[0m', "Zipping: " + folderName + " To: " + newZipFile);
console.log('\x1b[36m%s\x1b[0m', "Zipping To: " + path.resolve(newZipFile));
archive.finalize();
function twrl() {
process.stdout.write('\rZipping Folder ... ' + P[x++]);
x &= 3;
}
return(newZipFile);
};
The issue came from how I was defining the parentDirect var.
Solution:
let singleFileCheck = path.extname(file);
if(file == "./" || singleFileCheck.length <= 4 && singleFileCheck != ''){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}

How do I close fs.createWriteStream?

I am trying to use the node module vtt2srt to convert a VTT string to SRT file and save the output. It works once, and my subtitles are saved correctly, but if I hit the endpoint a second time node crashes with this error:
Error: write after end
at writeAfterEnd
I have tried all combinations of .close .on('close')
I send a unique vid and the VTT data from the frontend
router.post('/downloadsubs', function(req,res,next) {
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt')
srtStream.end()
srtStream.pipe(writestream)
res.send(req.body.vid);
})
})
I worked out what my problem was, I hope it can be useful to someone else one day.
Previously I was requiring my module at the head of my router file:
const vtt2srt = require('node-vtt-to-srt');
const srtStream = vtt2srt();
router.post('/downloadsubs', function(req,res,next) {
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
srtStream.end()
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt');
srtStream.pipe(writestream)
writestream.on('finish', function () { res.send(req.body.vid) });
})
})
Now, instead, I am creating a new srtStream in the router method:
const vtt2srt = require('node-vtt-to-srt');
router.post('/downloadsubs', function(req,res,next) {
var srtStream = vtt2srt();
var vttObj = webvtt.compile(req.body.data);
fs.unlink(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt', function(){
srtStream.write(vttObj);
srtStream.end()
var writestream = fs.createWriteStream(__dirname + '/../static/videos/'+req.body.vid+'/subtitles.srt');
srtStream.pipe(writestream)
writestream.on('finish', function () { res.send(req.body.vid) });
})
})
And it works.

Stop nodejs child_process with browser api call

I have vue (axios) making a get call to an express route which triggers a child_process of ffmpeg in an infinite loop. ffmpeg streams one file over udp , on close it re calls itself and streams another file.
I'd like to be able to kill this process from a button on a web page, but can't seem to work it out.
This is my express route code
router.get('/test', function(req, res) {
const childProcess = require('child_process');
const fs = require('fs')
const path = require('path')
//Grabs a random index between 0 and length
function randomIndex(length) {
return Math.floor(Math.random() * (length));
}
function Stream () {
const FILE_SRC = '/path/to/file'
//Read the directory and get the files
const dirs = fs.readdirSync(FILE_SRC)
.map(file => {
return path.join(FILE_SRC, file);
});
const srcs_dup = [];
const hashCheck = {}; //used to check if the file was already added to srcs_dup
var numberOfFiles = dirs.length - 1; //OR whatever # you want
console.log(numberOfFiles)
//While we haven't got the number of files we want. Loop.
while (srcs_dup.length < numberOfFiles) {
var fileIndex = randomIndex(dirs.length-1);
//Check if the file was already added to the array
if (hashCheck[fileIndex] == true) {
continue; //Already have that file. Skip it
}
//Add the file to the array and object
srcs_dup.push(dirs[fileIndex]);
hashCheck[fileIndex] = true;
}
var chosen = "'" + srcs_dup[0] + "'"
var call = "ffmpeg -re -i " + chosen + " -content_type audio/mpeg -f mp3 udp://224.1.2.3:1234"
const stop = childProcess.exec(call, { shell: true });
stop.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
stop.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
stop.on('close', (code) => {
console.log ('child exited with code ' + code)
Stream();
});
stop.on('error', function(err) {
console.log('sh error' + err)
});
}

Saving blobs as a single webm file

I'm recording the users screen via webrtc, and then posting video blobs every x seconds using MediaStreamRecorder. On the server side I have an action set up in sails which saves the blob as a webm file.
The problem is that I can't get it to append the data, and create one large webm file. When it appends the file size increases like expected, so the data is appending, but when I go to play the file it'll either play the first second, not play at all, or play but not show the video.
It would be possible to merge the files with ffmpeg, but I'd rather avoid this if at all possible.
Here's the code on the client:
'use strict';
// Polyfill in Firefox.
// See https://blog.mozilla.org/webrtc/getdisplaymedia-now-available-in-adapter-js/
if (typeof adapter != 'undefined' && adapter.browserDetails.browser == 'firefox') {
adapter.browserShim.shimGetDisplayMedia(window, 'screen');
}
io.socket.post('/processvideo', function(resData) {
console.log("Response: " + resData);
});
function handleSuccess(stream) {
const video = document.querySelector('video');
video.srcObject = stream;
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function (blob) {
console.log("Sending Data");
//var rawIO = io.socket._raw;
//rawIO.emit('some:event', "using native socket.io");
io.socket.post('/processvideo', {"vidblob": blob}, function(resData) {
console.log("Response: " + resData);
});
};
mediaRecorder.start(3000);
}
function handleError(error) {
errorMsg(`getDisplayMedia error: ${error.name}`, error);
}
function errorMsg(msg, error) {
const errorElement = document.querySelector('#errorMsg');
errorElement.innerHTML += `<p>${msg}</p>`;
if (typeof error !== 'undefined') {
console.error(error);
}
}
if ('getDisplayMedia' in navigator) {
navigator.getDisplayMedia({video: true})
.then(handleSuccess)
.catch(handleError);
} else {
errorMsg('getDisplayMedia is not supported');
}
Code on the server:
module.exports = async function processVideo (req, res) {
var fs = require('fs'),
path = require('path'),
upload_dir = './assets/media/uploads',
output_dir = './assets/media/outputs',
temp_dir = './assets/media/temp';
var params = req.allParams();
if(req.isSocket && req.method === 'POST') {
_upload(params.vidblob, "test.webm");
return res.send("Hi There");
}
else {
return res.send("Unknown Error");
}
function _upload(file_content, file_name) {
var fileRootName = file_name.split('.').shift(),
fileExtension = file_name.split('.').pop(),
filePathBase = upload_dir + '/',
fileRootNameWithBase = filePathBase + fileRootName,
filePath = fileRootNameWithBase + '.' + fileExtension,
fileID = 2;
/* Save all of the files as different files. */
/*
while (fs.existsSync(filePath)) {
filePath = fileRootNameWithBase + fileID + '.' + fileExtension;
fileID += 1;
}
fs.writeFileSync(filePath, file_content);
*/
/* Appends the binary data like you'd expect, but it's not playable. */
fs.appendFileSync(upload_dir + '/' + 'test.file', file_content);
}
}
Any help would be greatly appreciated!
I decided this would be difficult to develop, and wouldn't really fit the projects requirements. So I decided to build an electron app. Just posting this so I can resolve the question.

Node.js How to delete first line in file

I'm making simple Node.js app and I need to delete first line in file. Please is any way how to do it? I think that It will be possible with fs.write, but how?
Here is streamed version of removing first line from file.
As it uses streams, means you don't need to load whole file in memory, so it is way more efficient and fast, as well can work on very large files without filling memory on your hardware.
var Transform = require('stream').Transform;
var util = require('util');
// Transform sctreamer to remove first line
function RemoveFirstLine(args) {
if (! (this instanceof RemoveFirstLine)) {
return new RemoveFirstLine(args);
}
Transform.call(this, args);
this._buff = '';
this._removed = false;
}
util.inherits(RemoveFirstLine, Transform);
RemoveFirstLine.prototype._transform = function(chunk, encoding, done) {
if (this._removed) { // if already removed
this.push(chunk); // just push through buffer
} else {
// collect string into buffer
this._buff += chunk.toString();
// check if string has newline symbol
if (this._buff.indexOf('\n') !== -1) {
// push to stream skipping first line
this.push(this._buff.slice(this._buff.indexOf('\n') + 2));
// clear string buffer
this._buff = null;
// mark as removed
this._removed = true;
}
}
done();
};
And use it like so:
var fs = require('fs');
var input = fs.createReadStream('test.txt'); // read file
var output = fs.createWriteStream('test_.txt'); // write file
input // take input
.pipe(RemoveFirstLine()) // pipe through line remover
.pipe(output); // save to file
Another way, which is not recommended.
If your files are not large, and you don't mind loading them into memory, load file, remove line, save file, but it is slower and wont work well on large files.
var fs = require('fs');
var filePath = './test.txt'; // path to file
fs.readFile(filePath, function(err, data) { // read file to memory
if (!err) {
data = data.toString(); // stringify buffer
var position = data.toString().indexOf('\n'); // find position of new line element
if (position != -1) { // if new line element found
data = data.substr(position + 1); // subtract string based on first line length
fs.writeFile(filePath, data, function(err) { // write file
if (err) { // if error, report
console.log (err);
}
});
} else {
console.log('no lines found');
}
} else {
console.log(err);
}
});
Here is another way:
const fs = require('fs');
const filePath = './table.csv';
let csvContent = fs.readFileSync(filePath).toString().split('\n'); // read file and convert to array by line break
csvContent.shift(); // remove the the first element from array
csvContent = csvContent.join('\n'); // convert array back to string
fs.writeFileSync(filePath, csvContent);
Thanks to #Lilleman 's comment, I've made an amendment to the original solution, which requires a 3rd-party module "line-by-line" and can prevent memory overflow and racing condition while processing very large file.
const fs = require('fs');
const LineReader = require('line-by-line');
const removeLines = function(srcPath, destPath, count, cb) {
if(count <= 0) {
return cb();
}
var reader = new LineReader(srcPath);
var output = fs.createWriteStream(destPath);
var linesRemoved = 0;
var isFirstLine = true;
reader.on('line', (line) => {
if(linesRemoved < count) {
linesRemoved++;
return;
}
reader.pause();
var newLine;
if(isFirstLine) {
newLine = line;
isFirstLine = false;
} else {
newLine = '\n' + line;
}
output.write(newLine, () => {
reader.resume();
});
})
.on('error', (err) => {
reader.pause();
return cb(err);
})
.on('close', () => {
return cb();
})
}
---------------- original solution below---------------
Inspired by another answer, here is a revised stream version:
const fs = require('fs');
const readline = require('readline');
const removeFirstLine = function(srcPath, destPath, done) {
var rl = readline.createInterface({
input: fs.createReadStream(srcPath)
});
var output = fs.createWriteStream(destPath);
var firstRemoved = false;
rl.on('line', (line) => {
if(!firstRemoved) {
firstRemoved = true;
return;
}
output.write(line + '\n');
}).on('close', () => {
return done();
})
}
and it can be easily modified to remove certain amount of lines, by changing the 'firstRemoved' into a counter:
var linesRemoved = 0;
...
if(linesRemoved < LINES_TO_BE_REMOVED) {
linesRemoved++;
return;
}
...
Here is a naive solution using the Promise-based file system APIs.
const fs = require('node:fs/promises')
const os = require('node:os')
async function removeLines(path, numLinesToRemove) {
const data = await fs.readFile(path, { encoding: 'utf-8' })
const newData = data
.split(os.EOL) // split data into array of strings
.slice(numLinesToRemove) // remove first N lines of array
.join(os.EOL) // join array into a single string
// overwrite original file with new data
return fs.writeFile(path, newData)
}

Resources