I am trying to programm an converter which can take any video source and convert it to mp3. The mp3 should be saved on my hard drive, or in an buffer to send it via telegram.
It works good so far, the only problem I am facing is that it can only take one video at a time, and I don't know why.
// IMPORTS
var fs = require('fs');
var https = require('https');
var child_process = require('child_process');
// EVENTEMITER (Not used so far)
var util = require('util');
var EventEmitter = require('events').EventEmitter;
// STREAMHANDLER
var StreamHandler = function(url, name){
// VARIABLES
self = this;
this.url = url;
this.name = name;
// CREATE FFMPEG PROCESS
var spawn = child_process.spawn;
var args = ['-i', 'pipe:0', '-f', 'mp3', '-ac', '2', '-ab', '128k', '-acodec', 'libmp3lame', 'pipe:1'];
this.ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
https.get(url, function(res) {
res.pipe(self.ffmpeg.stdin);
});
// WRITE TO FILE
this.ffmpeg.stdout.pipe(fs.createWriteStream(name));
//DEBUG
this.ffmpeg.stdout.on("data", function (data) {
console.error(self.name);
});
}
util.inherits(StreamHandler, EventEmitter);
// TESTING
var test1 = new StreamHandler(vidUrl, "test1.mp3");
test1.ffmpeg.on("exit", function (code, name, signal) {
console.log("Finished: " + test1.name);
});
var test2 = new StreamHandler(vidUrl, "test2.mp3");
test2.ffmpeg.on("exit", function (code, name, signal) {
console.log("Finished: " + test2.name);
});
It skips test1.mp3 and only converts test2.mp3, but 2 ffmpeg processes were created:
After test2.mp3 is converted the other ffmpeg thread stays open, but does nothing, and the node program gets stuck waiting (i guess so) for it to send something.
I hope someone can help me :)
Using your code, I had the same problem. It would hang at the end and only output data for the test2.mp3 file. I'm not exactly sure what caused the problem, but I changed it a bit and this works for me:
// IMPORTS
var fs = require('fs');
//var https = require('https');
var http = require('http');
var child_process = require('child_process');
// EVENTEMITER (Not used so far)
var util = require('util');
var EventEmitter = require('events').EventEmitter;
// These never change...
var spawn = child_process.spawn;
var args = ['-i', 'pipe:0', '-f', 'mp3', '-ac', '2', '-ab', '128k', '-acodec', 'libmp3lame', 'pipe:1'];
// STREAMHANDLER
var StreamHandler = function(url, name){
// CREATE FFMPEG PROCESS
var ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
http.get(url, function(res) {
res.pipe(ffmpeg.stdin);
});
// WRITE TO FILE
ffmpeg.stdout.pipe(fs.createWriteStream(name));
ffmpeg.on("exit", function() {
console.log("Finished:", name);
});
//DEBUG
ffmpeg.stdout.on("data", function(data) {
console.error(name, "received data");
});
}
util.inherits(StreamHandler, EventEmitter);
// TESTING
var vidUrl = 'http://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_1mb.mp4';
var test1 = new StreamHandler(vidUrl, "test1.mp3");
var test2 = new StreamHandler(vidUrl, "test2.mp3");
I am using http instead of https, because I didn't have a sample video at an https url available. It shouldn't make a difference.
I moved the spawn and args variables out of the object, because they don't change. I also do not use this to store the local variables. I just use a normal closure instead. Finally, I moved the exit event handling code inside the object. I just think it's better to group all that stuff together -- plus, it's only declared once rather than for each new process you create.
Running this gives me the following output (I saved the script as ffmpeg.js):
$ node ffmpeg.js
test2.mp3 received data
Finished: test2.mp3
test1.mp3 received data
Finished: test1.mp3
Also, just a tip. If you want to use the this object inside StreamHandler, I would recommend using arrow functions if your version of Node supports them. This code also works:
var StreamHandler = function(url, name){
// CREATE FFMPEG PROCESS
this.ffmpeg = spawn('ffmpeg', args);
// GRAB STREAM
http.get(url, (res) => {
res.pipe(this.ffmpeg.stdin);
});
// WRITE TO FILE
this.ffmpeg.stdout.pipe(fs.createWriteStream(name));
this.ffmpeg.on("exit", () => {
console.log("Finished:", name);
});
//DEBUG
this.ffmpeg.stdout.on("data", (data) => {
console.error(name, "received data");
});
}
Notice that with arrow functions, I don't have to use var self = this; Avoiding that is pretty much the reason arrow functions were added to javascript.
Hope this helps!
-- EDIT --
Ok, I figured it out. The problem in your code is this line:
self = this;
It should be:
var self = this;
Without the var specifier, you are creating a global variable. So, the second time you are calling new StreamHandler, you are overwriting the self variable. That's why the test1.mp3 file hangs and the test2.mp3 file is the only one finishing. By adding var, your original script now works for me.
Related
I'm trying to make an extension that uses chrome native messaging to communicate with youtube-dl using a node.js host script. I've been able to successfully parse the stdin from the extension & also been able to run a child process (i.e. touch file.dat), but when I try to exec/spawn youtube-dl it hangs on the command. I've tried the host script independently of chrome native input and it works fine. I think the problem may have something to do with 1MB limitations on buffer size of chrome native messaging. Is there a way around reading the buffer?
#! /usr/bin/env node
"use strict";
const fs = require('fs');
const exec = require('child_process').execSync;
const dlPath = '/home/toughluck/Music';
let first = true;
let buffers = [];
process.stdin.on('readable', () => {
let chunk = process.stdin.read();
if (chunk !== null) {
if (first) {
chunk = chunk.slice(4);
first = false;
}
buffers.push(chunk);
}
});
process.stdin.on('end', () => {
const res = Buffer.concat(buffers);
const url = JSON.parse(res).url;
const outTemplate = `${dlPath}/%(title)s.%(ext)s`;
const cmdOptions = {
shell: '/bin/bash'
};
const cmd = `youtube-dl --extract-audio --audio-format mp3 -o \"${outTemplate}\" ${url}`;
// const args = ['--extract-audio', '--audio-format', 'mp3', '-o', outTemplate, url];
// const cmd2 = 'youtube-dl';
process.stderr.write('Suck it chrome');
process.stderr.write('stderr doesnt stop host');
exec(cmd, cmdOptions, (err, stdout, stderr) => {
if (err) throw err;
process.stderr.write(stdout);
process.stderr.write(stderr);
});
process.stderr.write('\n Okay....');
});
The full codebase can be found at https://github.com/wrleskovec/chrome-youtube-mp3-dl
So I was right about what was causing the problem. It had to do with 1 MB limitation on host to chrome message. You can avoid this by redirecting the stdout/stderr to a file.
const cmd = `youtube-dl --extract-audio --audio-format mp3 -o \"${outTemplate}\" ${url} &> d.txt`;
This worked for me. To be honest I'm not entirely why the message is considered > 1 MB and if someone can give a better explanation that would be great.
I've been playing with rxjs for some time now and I like how I can use it's operators for logic rather than imperative programming.
However, I also like node's stream which are also highly composable so my obvious reaction was to use them both but I haven't seen it being mentioned a lot (actually, I haven't at all) besides the binding for it in rxjs's book.
So, my question really is, how do I make use of all the transform streams that are in npm on RxJS? Or, is it even possible?
Example:-
var fs = require('fs');
var csv = require('csv-parse')({delimiter:';'});
var src = fs.createReadStream('./myFile.csv');
src.pipe(csv).pipe(process.stdout);
Essentially, I would want to do this:-
var fs = require('fs');
var csv = require('csv-parse')({delimiter:';'});
var rx= require('rx-node');
var src = fs.createReadStream('./myFile.csv');
var obj = rx.fromReadableStream(src);
obj.pipe(csb).map(x=>console.log(x));
I've been told to use highland in the past but I'm strictly looking for rxjs solution here.
You don't have to use rx-node but you can! Remember: All streams are event emitters!.
Prepare:
input.txt
Hello World!
Hello World!
Hello World!
Hello World!
Hello World!
Run:
npm install through2 split2 rx rx-node
And in the index.js:
var Rx = require('rx');
Rx.Node = require('rx-node');
var fs = require('fs');
var th2 = require('through2');
var split2 = require('split2');
var file = fs.createReadStream('./input.txt').on('error', console.log.bind(console, 'fs err'));
var transform = th2(function(ch, en, cb) {
cb(null, ch.toString());
}).on('error', function(err) {
console.log(err, err.toString());
});
// All streams are event emitters ! (one way without using rx-node)
// var subs = Rx.Observable.fromEvent(transform, 'data').share();
// subs
// .map(value => 'Begin line: ' + value)
// .subscribe(value => console.log(value));
// rx-node has convenience functions (another way)
var subs = Rx.Node.fromTransformStream(transform).share()
.map(value => 'Begin line: ' + value)
.subscribe(value => console.log(value));
file.pipe(split2()).pipe(transform);
Output:
Begin line: Hello World!
Begin line: Hello World!
Begin line: Hello World!
Begin line: Hello World!
Begin line: Hello World!
EdinM gave a great general example of using RxJS with a node transform stream, but your original question remains unanswered. Since I had nearly the same question a few days ago, I wanted to make an effort at answering it for anyone who isn't familiar with using RxJS with Node. Instead of using the csv-parse module, I'm going with csv-streamify. Let's set up the basic structure:
test_data.csv:
thing,name,owner,loc
chair,sitty,billy,san fran
table,setty,bryan,new oak
Run:
$ npm install rx rx-node csv-streamify
index.js:
"use strict";
const Rx = require('rx');
Rx.Node = require('rx-node');
const fs = require('fs');
const csv = require('csv-streamify');
//Setting up the transform-stream CSV parser
let config = {
delimiter: ',', // comma, semicolon, whatever
newline: '\n', // newline character (use \r\n for CRLF files)
quote: '"', // what's considered a quote
empty: '', // empty fields are replaced by this
//objectMode: true, //parses csv table into an array of objects
//columns: true //uses column headers for the object fields
};
let parseCsv = csv(config);
//Setting up the RxJS Observer
function onNext (x) {
//do your side-effects here, after the data has
//gone through the observables operator chain
console.log('Next: ' + x);
};
function onError (err) {
console.log('Error: ' + err);
};
function onComplete () {
console.log('Completed');
};
let readStream = fs.createReadStream('test_files/test_data.csv');
readStream.pipe(parseCsv);
let subscription = Rx.Node.fromTransformStream(parseCsv)
//do something with the data with an operator such as:
//.map()
.subscribe(onNext, onError, onComplete);
Now let's run the code:
$ node index.js
And we'll get this output:
Next: ["thing","name","owner","loc\r"]
Next: ["chair","sitty","billy","san fran\r"]
Next: ["table","setty","bryan","new oak"]
Completed
Epilogue
If you set objectMode and columns to true in the csv config object, and then project this sideEffect function with the map operator like so:
function sideEffect (v){
console.log(v)
return v;
};
let subscription = Rx.Node.fromTransformStream(parseCsv)
.map(sideEffect)
.subscribe(onNext, onError, onComplete);
You'll get this output:
{ thing: 'chair',
name: 'sitty',
owner: 'billy',
'loc\r': 'san fran\r' }
Next: [object Object]
{ thing: 'table',
name: 'setty',
owner: 'bryan',
'loc\r': 'new oak' }
Next: [object Object]
Completed
I'm trying to have a spawned Powershell process on Windows with NodeJS, which i can send some commands, and the the output. I've did that with the stdio configuration form the parent node process, but i want to have it separated, and i cant achieve any of this to correctly work.
In this code, i was hoping to get the output of $PSTableVersion powershell variable on the psout.txt file, but it gets never written.
The simple code is:
var
express = require('express'),
fs = require('fs'),
spawn = require("child_process").spawn;
var err = fs.openSync('pserr.txt', 'w');
var out = fs.openSync('psout.txt', 'w');
var writer = fs.createWriteStream(null, {
fd: fs.openSync('psin.txt', 'w')
});
var child = spawn("powershell.exe", ["-Command", "-"], {
detached: true,
stdio: [writer, out, err]
});
writer.write('$PSTableVersion\n');
writer.end();
child.unref();
// just wait
var app = express();
app.listen(3000);
Update
I've tried with the code on this node github issue tracker: https://github.com/joyent/node/issues/8795
It seems calling .NET vs calling native executable are different things...
I would like to pipe some streaming data to feedgnuplot in order to graph it in real time
The following works:
// index.js
readableStream.pipe(process.stdout)
// in bash
$ node index.js | feedgnuplot --stream
But the following doesn't work (and this is what I want to make work):
// index.js
var feedgnuplot = require('child_process').spawn('feedgnuplot', ['--stream'])
readableStream.pipe(feedgnuplot.stdin)
//in bash
$ node index.js
I get an ECONNRESET error
EDIT: example of a readable stream as requested:
var util = require('util')
var Readable = require('stream').Readable
util.inherits(SomeReadableStream, Readable)
function SomeReadableStream () {
Readable.call(this)
}
SomeReadableStream.prototype._read = function () {
this.push(Math.random().toString()+'\n')
}
var someReadableStream = new SomeReadableStream()
someReadableStream.pipe(process.stdout)
For me, with node.js v0.10.26, your script works fine:
Script index.js:
var util = require('util')
var Readable = require('stream').Readable
util.inherits(SomeReadableStream, Readable)
function SomeReadableStream () {
Readable.call(this)
}
SomeReadableStream.prototype._read = function () {
this.push(Math.random().toString()+'\n')
}
var someReadableStream = new SomeReadableStream()
var feedgnuplot = require('child_process').spawn('feedgnuplot', ['--stream'])
someReadableStream.pipe(feedgnuplot.stdin)
And call if from a terminal as nodejs index.js.
I'm experimenting with the knox module for node.js as a way of managing some small files in an Amazon S3 bucket. Everything works fine stand-alone: I can upload a file, download a file, etc. However, I want to be able to download a file on recurring schedule. When I modify the code to run on an interval, I'm getting the downloaded file appending to the previous instance instead of overwriting.
I'm not sure if I've made a mistake in the file write code or in the knox handling code. I've tried several different write approaches (writeFile, writeStream, etc.) and I've looked at the knox source code. Nothing obvious to me stands out as a problem. Here's the code I'm using:
knox = require('knox');
fs = require('fs');
var downFile = DOWNFILE;
var downTxt = '';
var timer = INTERVAL;
var path = S3PATH + downFile;
setInterval(function()
{
var s3client = knox.createClient(
{
key: '********************',
secret: '**********************************',
bucket: '********'
});
s3client.get(path).on('response', function(response)
{
response.setEncoding('ascii');
response.on('data', function(chunk)
{
downTxt += chunk;
});
response.on('end', function()
{
fs.writeFileSync(downFile, downTxt, 'ascii');
});
}).end();
},
timer);
The problem is with your placement of var downTxt = '';. That is the only place you set downTxt to blank, so every time you retrieve more data, you add it to the data that you got in the previous request because you never clear the data from the previous request. The simplest fix is to move that line to just before the setEncoding line.
However, the way you are processing the data is unnecessarily complicated. Try something like this instead. You don't need to recreate the client every time, and setting the encoding will just break things if you are downloading non-text files, and it won't make a difference with text files. Next, you shouldn't manually collect the data, you can immediately start writing it to the file as you receive it. Lastly, since request is a standard stream, you don't need to monitor the 'data' event because you can just use pipe.
var knox = require('knox'),
fs = require('fs'),
downFile = DOWNFILE,
timer = INTERVAL,
path = S3PATH + downFile,
s3client = knox.createClient({
key: '********************',
secret: '**********************************',
bucket: '********'
});
(function downloadFile() {
var str = fs.createWriteStream(downFile);
s3client.get(path).pipe(str);
str.on('close', function() {
setTimeout(downloadFile, timer);
});
})();