YouTube - MP4 to MP3 messes up - node.js

It's supposed to download all the videos in the playlist and convert them to mp3. But all this does is make the mp4's and 1 empty mp3 with a number higher than the max mp4. My newbie brain doesn't know how to fix this...
var ytpl = require('ytpl');
var fs = require('fs-extra');
var path = require('path');
var ffmpeg = require('fluent-ffmpeg');
var binaries = require('ffmpeg-static');
var ytdl = require('ytdl-core');
var output_dir = path.join(__dirname+"/dl");
ytpl("PL8n8S4mVUWvprlN2dCAMoIo6h47ZwR_gn", (err, pl) => {
if(err) throw err;
let c = 0;
pl.items.forEach((i) => {
ytdl(i.url_simple+"", { filter: 'audioonly' }).pipe(fs.createWriteStream(output_dir+"/"+c+".mp4")).on('finish', () => {
console.log("Finished MP4 DL, starting conversion...");
ffmpeg(output_dir+"/"+c+".mp4")
.setFfmpegPath(binaries.path)
.format('mp3')
.audioBitrate(320)
.output(fs.createWriteStream(output_dir+"/"+c+".mp3"))
.on('end', () => {
console.log("Finished MP3 Convert...");
})
.run();
});
c++;
});
});

Fixed using an example by ytdl-core Link
Fixed code:
var ytpl = require('ytpl');
var fs = require('fs-extra');
var path = require('path');
var ffmpeg = require('fluent-ffmpeg');
var binaries = require('ffmpeg-static');
var sanitize = require('sanitize-filename');
var ytdl = require('ytdl-core');
var output_dir = path.join(__dirname+"/dl");
ytpl("PL8n8S4mVUWvprlN2dCAMoIo6h47ZwR_gn", (err, pl) => {
if(err) throw err;
for (const i of pl.items) {
let stream = ytdl(i.id, {
filter: 'audioonly'
});
ffmpeg(stream)
.audioBitrate(320)
.save(`${output_dir}/${sanitize(i.title + " (by " + i.author.name + ")")}.mp3`)
.on('end', () => {
console.log("Done! Downloaded \""+i.title + " (by " + i.author.name + ")"+"\"");
});
}
});

Related

I have a problem with node js rtsp streaming server

I made rtsp cctv streaming server with nodjs.
But it is not stable.
Some cctvs works well but others are not.
First I thought rtsp url has its own problem, but it may not.
Because the url worked well in vlc player.
I don't know what I'm missing.
below is my whole code related cctv streaming.
var express = require('express');
var router = express.Router();
var kill = require('tree-kill');
var fs = require('fs');
var path = require('path');
var ffmpeg = require('fluent-ffmpeg');
var ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path)
var streams = {};
//start cctv
router.post('/', (req, res) => {
var cname = req.body.cname;
var url = req.body.url;
//if there is same cctv name
if(streams[cname] != null) {
res.status(409).send("duplicate name");
return;
};
//create dir as given cctv name;
mkdir(cname);
stream = ffmpeg(url).addOptions([
'-hls_time 5',
'-hls_list_size 10',
'-hls_flags delete_segments',
'-f hls'
]).output('./public/video/' + cname + '/' + cname + '.m3u8'); //save path
console.log("Start cctv streaming");
stream.on('error', function(err, stdout, stderr) {
console.log("cctv has been stoped");
console.log(err);
});
stream.run();
streams[cname] = stream;
res.status(201).send("OK");
});
//bring cctv pid by cctv name
router.get('/:cname', (req, res) => {
var cname = req.params.cname;
if(streams[cname] == null) {
res.status(404).send("not found such a cctv");
return;
};
var pid = streams[cname].ffmpegProc.pid;
res.send({"pid": pid});
});
//stop cctv by pid
router.delete('/:cname', async (req, res) => {
var cname = req.params.cname;
//no cctv
if(streams[cname] == null) {
res.status(404).send("not found such a cctv");
return;
};
//del dir
var filePath = './public/video/' + cname;
fs.rmdir(filePath, { recursive: true }, (err) => {
if (err) {
console.log(err)
} else {
console.log('dir is deleted.');
}
});
//var pid = streams[cname].ffmpegProc.pid;
streams[cname].kill();
res.status(204).send("OK");
});
const mkdir = (name) => {
var root = './public/video/';
if(!fs.existsSync(root + name)){
fs.mkdirSync(root + name);
}
}
And this is ts file save folder.
cctv1 dosen't work well but cctv2 works well.
(cctv1 started first but created less ts file than cctv2.)

sent file using axios using passthrough stream module in nodejs

Imports
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
const FfmpegCommand = require('fluent-ffmpeg');
const fs = require('fs');
const path = require('path');
const streamNode = require('stream');
const FormData = require('form-data');
const axios = require('axios').default;
Code here
async function audios() {
let stream = fs.createReadStream(path.join(__dirname, '../videos/video.mp4'));
let writeStream = fs.createWriteStream(path.join(__dirname, '../response/audios/' + +new Date() + '.wav'));
let pass = new streamNode.PassThrough();
let outputFile = path.join(__dirname, '../response/audios/' + +new Date() + '.wav');
const ffmpeg = FfmpegCommand(file);
ffmpeg
.setFfmpegPath(ffmpegPath)
.format('mp4')
.toFormat('wav')
.on('end', function () {
console.log('file has been converted successfully');
})
.on('error', function (err, stdout, stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function() {
console.log('Processing finished !');
})
.stream(pass, { end: false })
var bodyFormData = new FormData();
bodyFormData.append('file', pass);
let headers = bodyFormData.getHeaders();
try {
const jdata = await axios.post('http://localhost:4080/video',bodyFormData, { maxContentLength: Infinity,
maxBodyLength: Infinity,validateStatus: (status) => true ,headers:headers });
console.log(jdata.data);
} catch (error) {
console.log("error" ,error.message);
}
}
I am getting errors to sent passthrough stream through formdata ;
issue is ffmpeg not creating readstrem so I am created passthrough from it and passed in formdata but not working right now

make pdf file from jsPDF and send pdf file to server node js

i have code to make pdf and succeeded in downloading and opening it, but i want to send pdf to my server on node js, and i have made app.post on server but i can't make pdf become base64 and save it on server
in frontend
<script type="text/javascript">
function genPDF() {
html2canvas(document.getElementById('testDiv')).then(function (canvas) {
var img = canvas.toDataURL('image/png');
var doc = new jsPDF('landscape');
doc.addImage(img, 'png', 10, 10);
var temp = doc.save('test.pdf');
var post = new XMLHttpRequest();
post.open("POST", "/receive");
post.send(temp);
}
</script>
Download PDF
in server
app.post('/receive', function (request, respond) {
var body = '';
var filePath = './static' + '/document/Document.pdf';
//
request.on('data', function (data) {
body += data;
});
request.on('end', function () {
var data = body.replace(/^data:image\/\w+;base64,/, "");
var buf = new Buffer(data, 'base64');
fs.writeFile(filePath, buf, function (err) {
if (err) throw err
respond.end();
});
});
});
how to send var temp = doc.save('test.pdf'); server and generate pdf to base64?
Use the below code this will help you.
IN FE
<script type = "text/javascript">
function genPDF() {
html2canvas(document.getElementById('testDiv')).then(function (canvas) {
var img = canvas.toDataURL('image/png');
var doc = new jsPDF('landscape');
doc.addImage(img, 'png', 10, 10);
var temp = doc.save('test.pdf');
var data = new FormData();
data.append("pdf_file", temp);
var post = new XMLHttpRequest();
post.open("POST", "/receive");
post.send(data);
}
</script>
<a href = "javascript:genPDF()" > Download PDF </a>
IN BE
const fs = require('fs');
const multipartMiddleware = require('connect-multiparty')();
const express = require('express');
const app = express();
const port = 8000;
const filePath = './static' + '/document/Document.pdf';
app.post('/', multipartMiddleware, (request, response) => {
fs.readFile(request.files.pdf_file.path, (err, data) => {
fs.writeFile(filePath, data, function (err) {
if (err) throw err;
response.send('Done')
});
})
})
app.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`server is listening on ${port}`)
});

node.js - Archiving folder using archiver generate an empty zip

I am trying to archive a folder using archiver, the path of the folder that i'd like to archive look like this :
Project
| app.js
| tmp
|
folderToArchive
│file1.txt
│file2.txt
│file3.txt
my server side code where the zip file will be generated look like this :
var archiver = require("archiver");
app.get("/download/:folder", (req, res) => {
var FolderName = req.params.folder;
var zipName = FolderName + ".zip";
var source = path.join(__dirname, "tmp", FolderName);
var out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
return new Promise((resolve, reject) => {
archive
.directory(source, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
console.log("zip file created");
});
});
The issue is when i run the app it will create an empty zip file in the right destination.
This is totally new for me and I'd like to understand why the zip file is empty ?
Regards
It could likely be the same reason that I had an empty archive, which is the main process ending before the archive get to write into zip.
Hence it should be wrapped in a promise, something like this:
async function zipDirectory(source, dest) {
const stream = fs.createWriteStream(dest);
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('error', function(err) {
throw err;
});
await new Promise((resolve, reject) => {
archive.pipe(stream);
archive.directory(source, false);
archive.on('error', err => {throw err;});
archive.finalize();
stream
.on('close', function() {
console.log(`zipped ${archive.pointer()} total bytes.`);
resolve();
});
});
}
Tested the following code and it works:
const path = require('path');
const archiver = require('archiver');
const fs = require('fs');
const FolderName = "fol";
const zipName = FolderName + ".zip";
const source = path.join(__dirname, "tmp", FolderName);
const out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
archive
.directory(source, false)
.on('error', err => {throw err;})
.pipe(stream);
stream.on('close', () => console.log("closed"));
archive.finalize();
console.log("zip file created");
So I suggest:
const path = require('path');
const archiver = require('archiver');
const fs = require('fs');
const archive = (folderName) => {
const zipName = folderName + ".zip";
const source = path.join(__dirname, "tmp", folderName);
const out = path.join(__dirname, "tmp", zipName);
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
archive
.directory(source, false)
.on('error', err => {throw err;})
.pipe(stream);
stream.on('close', () => console.log("closed"));
archive.finalize();
console.log("zip file created");
};
app.get("/download/:folder", (req, res) => {
const folderName = req.params.folder;
archive(folderName);
});

How to merge two csv files rows in node js

I have 2 csv files which have different different data but having a same header
eg. FILE 1 data is
"CODE","NAME","SUB_USER","SCORE"
"01","TEST","1","5"
"01","TEST","2","6"
other file FILE2 have data like this
"CODE","NAME","SUB_USER","SCORE"
"02","TEST2","3","5"
"02","TEST2","4","6"
so i want to merge both file create FILE3 output like this
"CODE","NAME","SUB_USER","SCORE"
"01","TEST","1","5"
"01","TEST","2","6"
"02","TEST2","3","5"
"02","TEST2","4","6"
I have tried below code
var express = require('express');
var router = express.Router();
var fs = require('fs');
var parse = require('csv-parse');
var async = require('async');
var csv = require("fast-csv");
var file1 = appRoot + '\\csvFiles\\details1.csv';
var file2 = appRoot + '\\csvFiles\\details2.csv';
var stream = fs.createReadStream(file1);
var stream2 = fs.createReadStream(file2);
var fileData1 = [],
fileData2 = [];
csv
.fromStream(stream)
.on("data", function(data) {
fileData1.push(data);
})
.on("end", function() {
console.log("done");
});
csv
.fromStream(stream2)
.on("data", function(data) {
fileData2.push(data);
})
.on("end", function() {
console.log("done");
});
var fileData3 = fileData1.concat(fileData2);
csv.writeToPath("outputfile.csv", fileData3).on("finish", function() {
console.log("END");
});
But not working don't know why?? Please help me
///**********************************************************************//
Thax for help but i got new problem here
After some changes above code start working
var file1 = appRoot + '\\csvFiles\\details1.csv';
var file2 = appRoot + '\\csvFiles\\idetails2.csv';
var stream = fs.createReadStream(file1);
var stream2 = fs.createReadStream(file2);
var fileData1 = [],
fileData2 = [],
i = 0;
csv.fromStream(stream).on("data", function(data) {
fileData1.push(data);
}).on("end", function() {
csv.fromStream(stream2).on("data", function(data) {
if (i != 0) {
fileData2.push(data);
}
i++;
}).on("end", function() {
console.log("done");
var fileData3 = fileData1.concat(fileData2);
csv.writeToPath("outputfile.csv", fileData3).on("finish", function() {
res.send('Done merge');
});
});
});
But problem is that what if my number of file increase then how i will handle that thing
The biggest problem here is a quite common one. You do async tasks but you don't wait for them to finish before you are using their result.
You concat the file data before the "end" callback for each tasks was called.
The solution is to wait for every callback to be called and THEN working with the data.
I created a small example using Promises
const file1 = 'one.csv';
const file2 = 'two.csv';
const stream = fs.createReadStream(file1);
const stream2 = fs.createReadStream(file2);
const fileData1 = [];
const fileData2 = [];
const file1Promise = new Promise((resolve) => {
csv
.parseFile(file1, {headers: true})
.on('data', function(data) {
fileData1.push(data);
})
.on('end', function() {
console.log('done');
resolve();
});
});
const file2Promise = new Promise((resolve) => {
csv
.parseFile(file2, {headers: true})
.on('data', function(data) {
fileData2.push(data);
})
.on('end', function() {
console.log('done');
resolve();
});
});
Promise.all([
file1Promise,
file2Promise,
])
.then(() => {
const fileData3 = fileData1.concat(fileData2);
console.log(fileData3);
const csvStream = csv.format({headers: true});
const writableStream = fs.createWriteStream('outputfile.csv');
writableStream.on('finish', function() {
console.log('DONE!');
});
csvStream.pipe(writableStream);
fileData3.forEach((data) => {
csvStream.write(data);
});
csvStream.end();
});
I created a function with which you can easily merge multiple files:
function concatCSVAndOutput(csvFilePaths, outputFilePath) {
const promises = csvFilePaths.map((path) => {
return new Promise((resolve) => {
const dataArray = [];
return csv
.parseFile(path, {headers: true})
.on('data', function(data) {
dataArray.push(data);
})
.on('end', function() {
resolve(dataArray);
});
});
});
return Promise.all(promises)
.then((results) => {
const csvStream = csv.format({headers: true});
const writableStream = fs.createWriteStream(outputFilePath);
writableStream.on('finish', function() {
console.log('DONE!');
});
csvStream.pipe(writableStream);
results.forEach((result) => {
result.forEach((data) => {
csvStream.write(data);
});
});
csvStream.end();
});
}
example usage
concatCSVAndOutput(['one.csv', 'two.csv'], 'outputfile.csv')
.then(() => ...doStuff);

Resources