I do a website to record webcam in real-time on my server.
To do this, I use Web Socket to send data of my webcam to my Node.js Server.
The record is working, but my video didn't have a duration. In other words, I can't navigate in the video. I can just read the file second by second and can't skip segment or going back in the timer. (see the GIF below)
my server code to save video :
server.on("connection", (socket) => {
console.log("Client connected");
let writeStream = null;
let id = Date.now();
let filename = `${folderStream}/video-${id}.${extensionFile}`; // example : ./video/video-1620000000000.mp4
socket.on("message", (message) => {
if (!writeStream) {
console.log("Stream started");
writeStream = fs.createWriteStream(filename);
}
writeStream.write(message);
});
socket.on("close", () => {
console.log("Stream closed");
writeStream.end();
});
});
I try to use ffmpeg, but my video is already in one file and not segmented. (maybe bad usage by me)
For the future users who want to do the same thing, do this :
Create a temporary file who takes all raw data :
server.on("connection", (socket) => {
console.log("Client connected");
let tempFile = null;
let id = Date.now();
let filename = `video-${id}`; // exemple : video-1620000000000
socket.on("message", (message) => {
if (!tempFile) {
console.log("Stream started");
tempFile = fs.createWriteStream(`${folderStream}/${filename}.tmp`);
}
tempFile.write(message);
});
And with the library "fluent-ffmpeg";
npm i fluent-ffmpeg
const ffmpeg = require("fluent-ffmpeg");
Don't forget to install ffmpeg in your computer.
You can create a file transcoded after the ending stream.
socket.on("close", () => {
console.log("Fin d'un stream");
tempFile.end();
ffmpeg()
.input(`${folderStream}/${filename}.tmp`)
.format(extensionFile) // const extensionFile = "mp4"
.output(`${folderTranscode}/${filename}.${extensionFile}`)
.on("end", () => {
console.log("Conversion complete");
})
.run();
});
});
Related
This is my client code, I'm trying to send this message using pipes to a dotnet application (which uses "Message" as a tranmission medium, using bytes is not an option for me). The dotnet application is able to receive a message but the message seems to be malformed like below:
{琠楨獩愠猠牴湩Ⅷ}
`var net = require('net');
var PIPE_NAME = "TestServicePipeSender";
var PIPE_PATH = "\\\\.\\pipe\\" + PIPE_NAME;
var client = net.connect(PIPE_PATH, function () {
const str = "Hey. this is a string!";
client.write(str, "utf-8", () => {
console.log("Message Sent!")
})
})
client.on('data', function (data) {
console.log('Client: on data:', data.toString());
client.end('Thanks!');
});
client.on('end', function () {
console.log('Client: on end');
})`
I'm working on a project which requires reading a log file, The log file can get massive, so I only want to read additional data added to the file and not the old data to avoid potential performance issues.
I tried fs.createReadStream but it didn't seem to work:
code:
let stream = fs.createReadStream(`/path/`, { autoClose: true, encoding: "utf-8" });
stream.on('ready', () => {
console.log("Loaded log file!");
})
stream.on("data", chunk => {
// Data stream
});
stream.on("end", () => {
stream.pause();
console.log("ended");
// Going thru the data
setTimeout(() => {
stream.resume();
stream.read();
}, 10000);
});
With that code, the "end" event only triggers once, although the stream is set to not close automatically, and no additional data is going thru.
After messing a bit with the readStream and thanks to #KyleRifqi I was able to come up with this solution, storing the last amount of bytes and updating it everytime the stream ends.
let lastBytes = 0;
let text = "";
let stream = fs.createReadStream(`/path/`, { autoClose: true, encoding: "utf-8", start: lastBytes });
stream.on('ready', () => {
console.log("Loaded log file!");
});
stream.on("data", chunk => {
text += chunk;
});
stream.on("end", () => {
let read = Buffer.byteLength(text, "utf-8");
lastBytes += read;
//Code here
});
I'm trying to end a IPC stream from my node app. I can't seem to figure out which character represents end-of-stream though and my stream never finishes.
const net = require("net");
const server = net.createServer((stream) => {
console.log("Server: on connection");
stream.on("data", (buffer) => {
console.log("Got data:", buffer.toString());
stream.write("This message ends here ->.");
});
stream.on("end", () => {
console.log("Stream ended");
server.close();
});
});
server.listen("\\\\.\\pipe\\myPipe", () => {
console.log("Listening...");
});
I have tried to add a null character (\u0000) to the end of my message and check for that on the receiving end, but that feels a bit hacky. Is there a way to send something like a end-of-stream character(s)?
Thanks!
I'm trying to record a webcam stream via my node server into a new file. To get the webm Blobs I use the mediaRecorder api and then send those via binaryJs to my node server. Then I don't know how to proceed. My current code in node looks like this, but I don't know how to write the chunks to a file:
var binaryServer = require('binaryjs').BinaryServer;
var serverBinaryVideo = binaryServer({port: 9002});
serverBinaryVideo.on('connection', function(client) {
client.on('stream', function(stream, meta) {
stream.on('data', function(chunk) {
console.log(chunk);
});
stream.on('end', function() {
});
});
});
Does it work like this, or should I convert the blob to base64 on my client?
Client Js looks like this for now:
var clientVideo = new BinaryClient('ws://localhost:9002/');
clientVideo.on('open', function() {
// for the sake of this example let's put the stream in the window
window.StreamVideo = clientVideo.createStream();
});
var options = {
audioBitsPerSecond : 128000,
videoBitsPerSecond : 2500000,
mimeType : 'video/webm;codecs=h264' //mp4 not supported in chrome. Original: 'video/webm'
}
var mediaRecorder = new MediaRecorder(audiovideostream, options);
mediaRecorder.start(500); //call every 500ms
mediaRecorder.ondataavailable = function(e) {
// console.log("mediarecorderData");
// console.log(e);
reader.readAsDataURL(e.data);
videoProcess(e.data)
// chunks.push(e.data);
}
mediaRecorder.onerror = function(error) {
console.log(error);
// chunks.push(e.data);
}
If I query the box REST API and get back a readable stream, what is the best way to handle it? How do you send it to the browser?? (DISCLAIMER: I'm new to streams and buffers, so some of this code is pretty theoretical)
Can you pass the readStream in the response and let the browser handle it? Or do you have to stream the chunks into a buffer and then send the buffer??
export function getFileStream(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
res.type('application/octet-stream');
console.log('stream', stream);
return res.status(200).send(stream);
});
}
Will ^^ work, or do you need to do something like:
export function downloadFile(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
const buffers = [];
const document = new Buffer();
console.log('stream', stream);
stream.on('data', (chunk) => {
buffers.push(buffer);
})
.on('end', function(){
const finalBuffer = Buffer.concat(buffers);
return res.status(200).send(finalBuffer);
});
});
}
The first example would work if you changed you theoretical line to:
- return res.status(200).send(stream);
+ res.writeHead(200, {header: here})
+ stream.pipe(res);
That's the nicest thing about node stream. The other case would (in essence) work too, but it would accumulate lots of unnecessary memory.
If you'd like to check a working example, here's one I wrote based on scramjet, express and browserify:
https://github.com/MichalCz/scramjet/blob/master/samples/browser/browser.js
Where your streams go from the server to the browser. With minor mods it'll fit your problem.