I have the following script it works however I can't figure out the best solution that when a metadata tag is triggered it is to stop/pause the stream play an mp3 URL and then reconnect to the stream (as a new connection).
My first idea worked, however, it seemed to pause the Icecast stream and then insert the mp3 and after that play, it just continued playing from the paused spot (that is not wanted). What I would like is if the mp3 is 2minutes long then the Icecast stream should also have been 2 minutes skipped.
var http = require('http'),request = require('request');
var url = 'http://stream.radiomedia.com.au:8003/stream'; // URL to a known Icecast stream
var icecast = require('icecast-stack');
var stream = icecast.createReadStream(url);
// var radio = require("radio-stream");
// var stream = radio.createReadStream(url);
var clients = [];
stream.on("connect", function() {
console.error("Radio Stream connected!");
//console.error(stream.headers);
});
// Fired after the HTTP response headers have been received.
stream.on('response', function(res) {
console.error("Radio Stream response!");
console.error(res.headers);
});
// When a chunk of data is received on the stream, push it to all connected clients
stream.on("data", function (chunk) {
if (clients.length > 0){
for (client in clients){
clients[client].write(chunk);
};
}
});
// When a 'metadata' event happens, usually a new song is starting.
stream.on('metadata', function(metadata) {
var title = icecast.parseMetadata(metadata).StreamTitle;
console.error(title);
});
// Listen on a web port and respond with a chunked response header.
var server = http.createServer(function(req, res){
res.writeHead(200,{
"Content-Type": "audio/mpeg",
'Transfer-Encoding': 'chunked'
});
// Add the response to the clients array to receive streaming
clients.push(res);
console.log('Client connected; streaming');
});
server.listen("9000", "127.0.0.1");
console.log('Server running at http://127.0.0.1:9000');
You can't simply arbitrarily concatenate streams like this. With MP3, the bit reservoir will bite you. Generally, it will be a small stream glitch, but you can have more picky clients just outright drop the connection.
To do what you want to do, you actually need to decode everything to PCM, mix the audio as you see fit, and then re-encode a fresh stream.
As an added bonus, you won't be tied to a particular codec and bitrate, and can offer an appropriate array of choices to your listeners. You also won't have to worry about the timing of MPEG frames, as your final stream can be sample-accurate.
Related
I've been using the package node-icy to grab metadata from an icecast stream.
What is doing is is grabbing the metadata from the stream. Then, the stream is decoded using lame and played on the Speaker.
server.listen(port, ()=>{
icy.get(url, (res)=> {
// log HTTP responses headers
console.error(res.headers);
//log any "metadata" events that happen
res.on('metadata', (metadata)=>{
var parsed = icy.parse(metadata);
console.log('Metadata event');
console.error(parsed);
});
// Let's play the music (assuming MP3 data).
// lame decodes and Speaker sends to speakers!
res.pipe(new lame.Decoder())
.pipe(new Speaker());
});
console.log(`Server on port: ${port}`);
});
This will give me an output of the titles of the songs:
Metadata event
{ StreamTitle: 'ruby the hatchet - planetary space child - killer' }
If I remove
res.pipe(new lame.Decoder())
.pipe(new Speaker());
Then the metadata is grabbed only once. My guess is that the Speaker() function keeps running and when metadata changes, then icy.get will run res.on('metadata', ...).
I'm handling the streamer on the server and then send it to the Client on Angular 5. Is there a way to keep the icy.get(...) listening without using Speaker(). I'm fairly new to streams. Any help would be appreciated.
I was able to solve this problem by using
var icy = require('icy');
var devnull = require('dev-null');
icy.get(url, function (res) {
// log any "metadata" events that happen
res.on('metadata', function (metadata) {
const parsed = icy.parse(metadata);
console.log('metadata', parsed);
});
res.pipe(devnull());
});
You can see it here:
https://github.com/TooTallNate/node-icy/issues/16
I am having some difficulty streaming a video file with socket.io and node. My video file is on my server, and I am using the fs module to read it into a readStream. I am them passing chunks of data to a mediasource on the client side, which feeds into an html 5 video tag.
Although the client is receiving the chunks (I'm logging them), and I am appending the chunks to the buffer of the media source, nothing shows up in the video tag.
Anyone know how to fix this?
Here's my code:
Client side:
var mediaSource = new MediaSource();
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
document.getElementById('video').src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(event) {
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
console.log(sourceBuffer);
socket.on('chunk', function (data) {
if(!sourceBuffer.updating){
sourceBuffer.appendBuffer(data);
console.log(data);
}
});
socket.emit('go',{})
});
Server side:
var stream = fs.createReadStream(window.currentvidpath);
socket.on('go', function(){
console.log('WENT');
stream.addListener('data',function(data){
console.log('VIDDATA',data);
socket.emit('chunk',data);
})
})
Thanks a lot.
The problem is the fact that you only append the source buffer if there it is not updating
if(!sourceBuffer.updating){
sourceBuffer.appendBuffer(data);
console.log(data);
}
Heres my console after I added a else and log the times it don't append
SourceBuffer {mode: "segments", updating: false, buffered: TimeRanges, timestampOffset: 0, appendWindowStart: 0…}
site.html:24 connect
site.html:17 ArrayBuffer {}
30 site.html:20 not appending
So it appended the one chunk of the video and ignored 30
You should store the ones that aren't appended in a array. Then just make a loop with set Interval
I have an internet audio stream that's constantly being broadcast (accessible via http url), and I want to somehow record that with NodeJS and write files that consist of one-minute segments.
Every module or article I find on the subject is all about streaming from NodeJS to the browser. I just want to open the stream and record it (time block by time block) to files.
Any ideas?
I think the project at https://github.com/TooTallNate/node-icy makes this easy, just do what you need to with the res object, in the example it is sent to the audio system:
var icy = require('icy');
var lame = require('lame');
var Speaker = require('speaker');
// URL to a known ICY stream
var url = 'http://firewall.pulsradio.com';
// connect to the remote stream
icy.get(url, function (res) {
// log the HTTP response headers
console.error(res.headers);
// log any "metadata" events that happen
res.on('metadata', function (metadata) {
var parsed = icy.parse(metadata);
console.error(parsed);
});
// Let's play the music (assuming MP3 data).
// lame decodes and Speaker sends to speakers!
res.pipe(new lame.Decoder())
.pipe(new Speaker());
});
I'm creating a relay server for my streaming app. Basically, it should work like this:
Client A streams microphone audio to server through sockets
Server a gets stream and maybe stores it somewhere temporarily?(not sure)
Client B gets a stream from server and plays it.
Basically, I have 1st part done(sending mic audio to server):
while(isStreaming)
{
minBufSize = recorder.read(buffer, 0, buffer.length);
mSocket.emit("stream", Arrays.toString(buffer));
}
And 3rd part done, simply playing audio:
mediaplayer.reset();
mediaplayer.setDataSource("http://192.168.1.2:1337/stream");
mediaplayer.prepare();
mediaplayer.start();
Now I'm not sure how to bridge incoming byte array and streaming. Here is my current server code:
var ms = require('mediaserver');
// from server to Client B
exports.letsStream = function(req, res, next) {
ms.pipe(req, res, "sample_song_music_file.mp3");
};
// from Client A to server
exports.handleSocketConnection = function(socket)
{
console.log("connected");
socket.on('stream', function(data)
{
var bytes = JSON.parse(data);
console.log("GETTING STREAM:" + bytes);
});
}
Any suggestions? How can I directly stream that byte array?
The mediaserver module only supports streaming an existing audio, rather than a "live" stream. This won't work.
One way to achieve the three tasks would be:
https://www.npmjs.com/package/microphone to read the microphone audio as a byte stream.
http://binaryjs.com/ to handle transmitting the byte stream over websockets to the server and then sending to the client. If you have two separate paths set up, one for sending the data, one for receiving. Send the data from one stream to the other.
Use https://github.com/TooTallNate/node-speaker to play the incoming PCM data stream on Client B
I'm trying to write a Meteor.JS app that uses peer to peer "radio" communication. When a user presses a button it broadcasts their microphone output to people.
I have some code that gets permission to record audio, and it successfully gets a MediaStream object, but I can't figure out how to get the data from the MediaStream object as it is being recorded.
I see there is an method defined somewhere for getting all of the tracks of the recorded audio. I'm sure I could find a way to write some kind of loop that notifies me when audio has been added, but it seems like there should be a native, event-driven way to retrieve the audio from getUserMedia. Am I missing something? Thanks
What you will want to do is to access the stream through the AudioAPI(for the recording part). This is after assigning a var to your stream that was grabbed through getUserMedia (I call it localStream). So, you can create as many MediaStreamsource nodes as you want from one stream, so you can record it WHILE sending it to numerous people through different rtcpeerconnections.
var audioContext = new webkitAudioContext() || AudioContext();
var source = audioContext.createMediastreamSource(localStream);
var AudioRecorder = function (source) {
var recording = false;
var worker = new Worker(WORKER_PATH);
var config = {};
var bufferLen = 4096;
this.context = source.context;
this.node = (this.context.createScriptProcessor ||
this.context.createJavaScriptNode).call(this.context,
bufferLen, 2, 2);
this.node.onaudioprocess = function (e) {
var sample = e.inputBuffer.getChannelData(0);
//do what you want with the audio Sample, push to a blob or send over a WebSocket
}
source.connect(this.node);
this.node.connect(this.context.destination);
};
Here is a version I wrote/modified to send audio over websockets for recording on a server.
For sending the audio only when it is available, you COULD use websockets or a webrtc peerconnection.
You will grab the stream through getUserMedia success object(you should have a global variable that will be the stream for all your connection). And when it becomes available, you can use a signalling server to forward the requesting SDPs to the audio supplier. You can set it the requesting SDPs to receive only and your connection.
PeerConnection example 1
PeerConnection example 2
Try with code like this:
navigator.webkitGetUserMedia({audio: true, video: false},
function(stream) { // Success Callback
var audioElement = document.createElement("audio");
document.body.appendChild(audioElement);
audioElement.src = URL.createObjectURL(stream);
audioElement.play();
}, function () { // Error callback
console.log("error")
});
You may use the stream from success callback to create a object URL and pass it into an HTML5 audio element.
Fiddle around in http://jsfiddle.net/veritas/2B9Pq/