I am using hls js for radio playback. This works well. My question is is there a way to retrive currently playing song data (title, artist)? I tried using some callbacks like (MANIFEST_PARSED, MANIFEST_LOADED) but no success finding this information.
var audio = document.querySelector('#audio');
var url = 'https://stream.ram.radio/audio/ram.stream_aac/playlist.m3u8'
if (Hls.isSupported()) {
var hls = new Hls();
hls.attachMedia(audio);
hls.on(Hls.Events.MEDIA_ATTACHED, function () {
hls.loadSource(url);
});
hls.on(Hls.Events.MANIFEST_PARSED, function (event, data) {
console.log(data)
});
hls.on(Hls.Events.MANIFEST_LOADED, function (event, data) {
console.log(data)
});
}
<audio preload="true" id="audio" controls crossorigin>
You could try something like this:
*edit: referencing the docs here: https://github.com/video-dev/hls.js/blob/master/docs/API.md -- This event should be fired when the id3 tags are parsed.
hls.on(Hls.Events.FRAG_PARSING_METADATA, function(event, data) {
if (data) {
console.log("Data", data);
}
});
Related
I am recording screen with mediaRecorder in 10 second chunks of video.
Then, with node.js, we put those videos together in a single webm.
The problem is that we can't see the duration of the video when we play it.
We tried the package webm-duration-fix but I can only apply it to the video fragments, and when joining them it is broken
I'm using Javascript on the front and node js on the back. In the back I get the video snippet and then apply fs.appendFileSync
app.post('/upload/', async (req, res) => {
let dir = './videos';
let testeador = req.body.testeador;
let cliente = req.body.cliente;
let id = req.body.id;
let fragmento = req.body.fragmento;
let idVideo = generateRandomCode()
res.send('Archivo subido')
fs.mkdirSync(`${dir}/${cliente}/${testeador}`, {
recursive: true
});
fs.appendFileSync(`${dir}/${cliente}/${testeador}/${testeador}.webm`, req.files.video.data, (err) => {
if (err) throw err;
});
})
I'm trying to record a webcam stream via my node server into a new file. To get the webm Blobs I use the mediaRecorder api and then send those via binaryJs to my node server. Then I don't know how to proceed. My current code in node looks like this, but I don't know how to write the chunks to a file:
var binaryServer = require('binaryjs').BinaryServer;
var serverBinaryVideo = binaryServer({port: 9002});
serverBinaryVideo.on('connection', function(client) {
client.on('stream', function(stream, meta) {
stream.on('data', function(chunk) {
console.log(chunk);
});
stream.on('end', function() {
});
});
});
Does it work like this, or should I convert the blob to base64 on my client?
Client Js looks like this for now:
var clientVideo = new BinaryClient('ws://localhost:9002/');
clientVideo.on('open', function() {
// for the sake of this example let's put the stream in the window
window.StreamVideo = clientVideo.createStream();
});
var options = {
audioBitsPerSecond : 128000,
videoBitsPerSecond : 2500000,
mimeType : 'video/webm;codecs=h264' //mp4 not supported in chrome. Original: 'video/webm'
}
var mediaRecorder = new MediaRecorder(audiovideostream, options);
mediaRecorder.start(500); //call every 500ms
mediaRecorder.ondataavailable = function(e) {
// console.log("mediarecorderData");
// console.log(e);
reader.readAsDataURL(e.data);
videoProcess(e.data)
// chunks.push(e.data);
}
mediaRecorder.onerror = function(error) {
console.log(error);
// chunks.push(e.data);
}
If I query the box REST API and get back a readable stream, what is the best way to handle it? How do you send it to the browser?? (DISCLAIMER: I'm new to streams and buffers, so some of this code is pretty theoretical)
Can you pass the readStream in the response and let the browser handle it? Or do you have to stream the chunks into a buffer and then send the buffer??
export function getFileStream(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
res.type('application/octet-stream');
console.log('stream', stream);
return res.status(200).send(stream);
});
}
Will ^^ work, or do you need to do something like:
export function downloadFile(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
const buffers = [];
const document = new Buffer();
console.log('stream', stream);
stream.on('data', (chunk) => {
buffers.push(buffer);
})
.on('end', function(){
const finalBuffer = Buffer.concat(buffers);
return res.status(200).send(finalBuffer);
});
});
}
The first example would work if you changed you theoretical line to:
- return res.status(200).send(stream);
+ res.writeHead(200, {header: here})
+ stream.pipe(res);
That's the nicest thing about node stream. The other case would (in essence) work too, but it would accumulate lots of unnecessary memory.
If you'd like to check a working example, here's one I wrote based on scramjet, express and browserify:
https://github.com/MichalCz/scramjet/blob/master/samples/browser/browser.js
Where your streams go from the server to the browser. With minor mods it'll fit your problem.
Im using node webkit 0.7.5 (which is the only version so far which allows me to display video in a correct way).
I'm trying to download a big video file (200+ mb) and sometimes, not always, node webkit crashes when the file is done downloading. Writestream.end() is called, and that is when the crash happens.
My code:
var fs = require('fs');
var request = require("request");
var req = request(self.requestOptions(), function (err) {
if (err) {
//handle error
}
});
req.on("response", function (response) {
writeFile(response);
});
function writeFile(response) {
var file = fs.createWriteStream(filename);
var self = this;
response.on('data', function (data) {
file.write(data);
});
response.on('end', function () {
log.debug("response end");
file.end();//crash happens after calling this
});
response.on('error', function (err) {
//handle error, not called in this situation
});
file.on('finish', function () {
log.debug("file finish");
});
}
In general this method works excellent, but with some files and situations node webkit just decides to call it quits.
Am I doing something obviously wrong? I've searched the net for possible clues or solutions, but I haven't found anything.
I am trying to upload multiple images with the help of multiparty module. I want to upload only a particular kind of images, say whose names are 'image.jpg'. But it's not working when the image does not meet the criteria and I don't get any response. Here is my code.
req.form.on('part', function(part) {
if (part.filename === 'image.jpg') {
var out = fs.createWriteStream('image/' + part.filename);
part.pipe(out);
} else {
//Here i want to clear the stream for the next 'part'
}
});
req.form.on('close', function() {
res.send('uploaded!');
});
I think I'm not able to clear the readable stream which contains 'part'. I can write that stream and then delete, it works then. But, I don't want to write any image on my file system if it doesn't meet the criteria. How can I achieve that?
To complete robertklep's answer, here is another possibility using a custom Writable stream that will blackhole data. When you pipe() the part stream to the blackhole, it will exhaust its source. This just keeps your code consistent by using streams everywhere instead of using raw read() function calls.
req.form.on('part', function(part) {
var out;
if (part.filename === 'image.jpg') {
out = fs.createWriteStream('image/' + part.filename);
} else {
out = new stream.Writable();
out._write = function (chunk, encoding, done) {
done(); // Don't do anything with the data
};
}
part.pipe(out);
});
Not entirely sure if this is going to work, but I think you can read the entire part stream until it's exhausted:
req.form.on('part', function(part) {
if (part.filename === 'image.jpg') {
var out = fs.createWriteStream('image/' + part.filename);
part.pipe(out);
} else {
while (part.read() !== null);
}
});