I want to turn AC3-audio videos playable on browsers. For this, I decided to use fluent-ffmpeg to real-time convert the videos and stream them. It works as livestreaming/piping pretty well, but you can't even go back in the videos.
app.get('/video', function (req, res) {
var path = 'show.mkv';
ffmpeg(path)
.outputOptions(arr)
.on('end', function () {
console.log('file has been converted succesfully');
})
.on('error', function (err) {
console.log('an error happened: ' + err.message);
})
.pipe(res);
});
So I need to build a sort of buffer for the conversion, this is, to let the user go back and forth in the video. I've found some code that does exactly what I need, although it doesn't convert:
app.get('/video', function(req, res) {
const path = 'assets/sample.mp4'
const stat = fs.statSync(path)
const fileSize = stat.size
const range = req.headers.range
if (range) {
const parts = range.replace(/bytes=/, "").split("-")
const start = parseInt(parts[0], 10)
const end = parts[1]
? parseInt(parts[1], 10)
: fileSize-1
const chunksize = (end-start)+1
const file = fs.createReadStream(path, {start, end})
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
res.writeHead(206, head)
file.pipe(res)
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
res.writeHead(200, head)
fs.createReadStream(path).pipe(res)
}
})
(from https://github.com/daspinola/video-stream-sample)
I've been trying to make fluent-ffmpeg work with buffering, with no success, and I pretty much have no clue on what to do. If buffering isn't possible, is there an similar alternative excpect for pre-converting the videos? Thanks already.
Related
I've been messing around with ytdl and nextjs lately, I want to play audio from a video but I'm having issues with being able to forward/rewind the audio. I found some posts that helped a bit but still doesn't seem to be working properly. So, I thought I'd open a new post and see if anyone could help me out a bit :)
import ytdl from "ytdl-core";
const toArray = require("stream-to-array");
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const parts = await toArray(stream);
const buffers = parts.map((part) =>
Buffer.isBuffer(part) ? part : Buffer.from(part)
);
const compressedBuffer = Buffer.concat(buffers);
const total = compressedBuffer.length;
if (stream.headers && stream.headers.range) {
const range = req.headers.range;
const parts = range.replace(/bytes=/, "").split("-");
const partialstart = parts[0];
const partialend = parts[1];
const start = parseInt(partialstart, 10);
const end = partialend ? parseInt(partialend, 10) : total - 1;
const chunksize = end - start + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
} else {
res.writeHead(200, {
"Content-Length": total,
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
}
} catch (err) {
console.log(err);
}
}
Alternative way (doesn't work):
import ytdl from "ytdl-core";
const toArray = require("stream-to-array");
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const parts = await toArray(stream);
const buffers = parts.map((part) =>
Buffer.isBuffer(part) ? part : Buffer.from(part)
);
const compressedBuffer = Buffer.concat(buffers);
const total = compressedBuffer.length;
res.writeHead(200, {
"accept-ranges": "bytes",
"Content-Length": Math.ceil(total / 8),
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
} catch (err) {
console.log(err);
}
}
Temporary solution (Very slow data read):
import ytdl from "ytdl-core";
const ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
const ffmpeg = require("fluent-ffmpeg");
ffmpeg.setFfmpegPath(ffmpegPath);
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const newStream = ffmpeg(stream).audioBitrate("128").toFormat("mp3");
res.writeHead(200, {
Connection: "keep-alive",
"Content-Type": "audio/mpeg",
});
newStream.pipe(res, {end: true});
} catch (err) {
console.log(err);
}
}
I'm trying to stream video by chunks , video is stored in mongodb using grid fs ,
here is the API :
videoRoute.get('/:id', (req, res) => {
GFS.findById(req.params.id, (err: any, file: any) => {
// Check if file
if (!file || file.length === 0) {
return res.status(404).json({
err: 'No file exists',
});
}
// Read output to browser
const range = req.headers.range;
if (!range) {
res.status(400).send('Requires Range header');
} else {
// Create response headers
const start = Number(range.replace(/\D/g, ''));
const end = file.length - 1;
const contentLength = end - start + 1;
const headers = {
'Content-Range': `bytes ${start}-${end}/${file.length}`,
'Accept-Ranges': 'bytes',
'Content-Length': contentLength,
};
// HTTP Status 206 for Partial Content
res.writeHead(206, headers);
const bucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
bucketName: 'uploads',
});
const readstream = bucket.openDownloadStream(file._id, {
start,
end,
});
readstream.pipe(res);
}
});
});
When I use only the final part ( readStream and pipe it ) , the video works fine but it starts playing until it fully loads , but in order to implement the loading by chunks I had to do the other stuff but it doesn't work , here's what I get :
Click to see image
I am trying to upload chunks of base64 to node js server and save those chunks into one file
let chunks = [];
app.post('/api', (req, res) => {
let {blob} = req.body;
//converting chunks of base64 to buffer
chunks.push(Buffer.from(blob, 'base64'));
res.json({gotit:true})
});
app.post('/finish', (req, res) => {
let buf = Buffer.concat(chunks);
fs.writeFile('finalvideo.webm', buf, (err) => {
console.log('Ahh....', err)
});
console.log('SAVED')
res.json({save:true})
});
Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either
UPDATE - I
Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.
client.js
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
console.log(binary) // logging typed Uint8Array
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
};
server.js
let chunks = [];
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(binary) // This code throwing Error
chunks.push(chunkBuff);
console.log(chunkBuff)
res.json({gotit:true})
});
//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
console.log('Combinig the files',chunks.length);
let buf = Buffer.concat(chunks);
console.log(buf) //empty buff
fs.writeFile('save.webm', buf, (err) => {
console.log('Ahh....', err)
});
res.json({save:true})
});
UPDATE - II
I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.
code
const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(new Uint8Array(binary));
writeMyStream.write(chunkBuff);
res.json({gotit:true})
});
UPDATE - III
my client code | Note: I've tried other ways to upload blobs I've commented out
customRecordStream = stream => {
let recorder = new MediaStreamRecorder(stream);
recorder.mimeType = 'video/webm;codecs=vp9';
recorder.ondataavailable = this.postBlob
recorder.start(INT_REC)
};
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
// let binaryUi8 = new Uint8Array(arrayBuffer);
// let binArr = Array.from(binaryUi8);
// // console.log(new Uint8Array(arrayBuffer))
//
// console.log(blob);
// console.log(binArr)
// let formData = new FormData();
// formData.append('fname', 'test.webm')
// formData.append("file", blob);
//
// console.log(formData,'Checjk Me',blob)
// axios({
// method:'post',
// url:'/api',
// data:formData,
// config: { headers: {'Content-Type': 'multipart/form-data' }}
// }).then(res => {
// console.log(res,'FROM SERBER')
//
// })
//
//
// .then(res => {
// console.log(res)
// })
// this.blobToDataURL(blob, (blobURL) => {
//
// axios.post('/api',{blob:blobURL})
// .then(res => {
// console.log(res)
// })
// })
};
I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:
I'm using fetch because I didn't want to pull in axios.
When using fetch, you have to make sure you use bodyParser on the backend
I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.
Full working example below:
Front End:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;
function customRecordStream(stream) {
// should actually check to see if the given mimeType is supported on the browser here.
let options = { mimeType: 'video/webm;codecs=vp9' };
recorder = new MediaRecorder(window.stream, options);
recorder.ondataavailable = postBlob
recorder.start(INT_REC)
};
function postBlob(event){
if (event.data && event.data.size > 0) {
sendBlobAsBase64(event.data);
}
}
function handleSourceOpen(event) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
}
function sendBlobAsBase64(blob) {
const reader = new FileReader();
reader.addEventListener('load', () => {
const dataUrl = reader.result;
const base64EncodedData = dataUrl.split(',')[1];
console.log(base64EncodedData)
sendDataToBackend(base64EncodedData);
});
reader.readAsDataURL(blob);
};
function sendDataToBackend(base64EncodedData) {
const body = JSON.stringify({
data: base64EncodedData
});
fetch('/api', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body
}).then(res => {
return res.json()
}).then(json => console.log(json));
};
Back End:
const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));
app.post('/api', (req, res) => {
try {
const { data } = req.body;
const dataBuffer = new Buffer(data, 'base64');
const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
fileStream.write(dataBuffer);
console.log(dataBuffer);
return res.json({gotit: true});
} catch (error) {
console.log(error);
return res.json({gotit: false});
}
});
Inspired by #willascend answer:
Backend-side:
app.use(express.raw());
app.post('/video-chunck', (req, res) => {
fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
res.sendStatus(200);
});
Frontend-side:
mediaRecorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
fetch(this.serverUrl + '/video-chunck', {
method: 'POST',
headers: {'Content-Type': 'application/octet-stream'},
body: event.data
});
}
};
My express version is 4.17.1
i faced the same problem today
as a solution in back-end i used fs.appendfile
fs.appendFile(Path, rawData, function (err) {
if (err) throw err;
console.log('Chunck Saved!');
})
We have an application that sometimes serves a MP4 file which is stored on S3, since only specific people should be able to see each file, the file is private and inside our service we will only show it to authorised people.
The movie starts playing correctly (in the browser's built in video tag), however if we seek to a point in the movie that hasn't been buffered yet, the player will buffer for a bit, then stop playing. Afterwards clicking Play will cause the movie to start from the beginning. If I make the file public and access it directly form S3 seeking to an unbuffered point works correctly.
I created a standalone node program that reproduces this problem. I tried to make the response headers identical to those that S3 sends but the problem remains.
const http = require("http");
const AWS = require("aws-sdk");
const proxy = require("proxy-agent");
Object.assign(process.env, {
AWS_ACCESS_KEY_ID: "REDACTED",
AWS_SECRET_ACCESS_KEY: "REDACTED",
AWS_EC2_REGION: "us-west-2"
});
const s3 = new AWS.S3({
s3ForcePathStyle: 'true',
signatureVersion: 'v4',
httpOptions: { timeout: 300000 },
endpoint: 'https://s3.us-west-2.amazonaws.com',
region: 'us-west-2'
});
const objectParams = {
Bucket: 'REDACTED',
Key: 'some-movie.mp4'
};
let request = 0;
function serve(req, res) {
console.log("Handling request", ++request, req.url);
s3.headObject(objectParams, (err, data) => {
if (err)
throw err;
const { ContentType: type, ContentLength: length} = data;
console.log("Got", data);
if (data.ETag)
res.setHeader("ETag", data.ETag);
const range = req.headers.range;
if (range) {
console.log("Serving range", range);
const parts = range.replace("bytes=", "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1]? parseInt(parts[1], 10): length -1;
let headers = {
"Content-Range": `bytes ${start}-${end}/${length}`,
"Accept-Ranges": "bytes",
"Content-Type": type,
"Content-Length": end - start + 1,
"Last-Modified": data.LastModified,
};
if (req.headers["if-range"]) {
console.log("Setting if-range to", req.headers["if-range"]);
headers["If-Range"] = req.headers["if-range"];
}
res.writeHead(206, headers);
}
else {
console.log("Whole file");
res.setHeader("Accept-Ranges", "bytes");
res.setHeader("Content-Type", type);
res.setHeader("Content-Length", length);
res.setHeader("Last-Modified", data.LastModified);
}
const stream = s3.getObject(objectParams).createReadStream();
stream.on("error", err => console.error("stream error:", err));
stream.pipe(res).on("finish", data => {
console.log("Finished streaming");
});
});
}
http.createServer(serve).listen(1234);
What am I missing?
Here is the code with seekbar working just fine. You can test by integrating the below code and just open the api url in the browser.
import mime from 'mime-types';
const key = 'S3_BUCKET KEY';
const params = { Key: key, Bucket: AWS_BUCKET };
//s3 here refers to AWS.S3 object.
s3.headObject(params, function (err, data) {
if (err) {
console.error(err);
return next(err);
}
if (req.headers.range) {
const range = req.headers.range;
const bytes = range.replace(/bytes=/, '').split('-');
const start = parseInt(bytes[0], 10);
const total = data.ContentLength;
const end = bytes[1] ? parseInt(bytes[1], 10) : total - 1;
const chunkSize = end - start + 1;
res.set('Content-Range', 'bytes ' + start + '-' + end + '/' + total);
res.set('Accept-Ranges', 'bytes');
res.set('Content-Length', chunkSize.toString());
params['Range'] = range;
console.log('video buffering - range, total, start, end ,params', range, total, start, end, params);
} else {
res.set('Content-Length', data.ContentLength.toString());
console.log('video buffering - ,params', params);
}
res.status(206);
res.set('Content-Type', mime.lookup(key));
res.set('Last-Modified', data.LastModified.toString());
res.set('ETag', data.ETag);
const stream = s3.getObject(params).createReadStream();
stream.on('error', function error(err) {
return next(err);
});
stream.on('end', () => {
console.log('Served by Amazon S3: ' + key);
});
stream.pipe(res);
});
Trying to implement the app where user can select and play any video from the list. For the first run it works very smoothly the streaming and all but as soon as I click logout or back and play another video just getting black video player.
Below is the code to play the video:
app.get('/video', function(req, res) {
var getVideoID = req.headers["referer"];
getVideoID = getVideoID.substring(getVideoID.indexOf("=")+1,getVideoID.Length);
const path = video_path[getVideoID];//'assets/electronic_effects.flv'
const stat = fs.statSync(path);
const fileSize = stat.size;
const range = req.headers.range;
if (range) {
const parts = range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
var end = parts[1]
? parseInt(parts[1], 10)
: fileSize-1;
var chunksize = (end-start)+1;
var maxChunk = 1024 * 1024; // 1MB at a time
if (chunksize > maxChunk) {
end = start + maxChunk - 1;
chunksize = (end - start) + 1;
}
file = fs.createReadStream(path, {start, end, autoClose: true})
.on("open", function() {
file.pipe(res);
})
.on('end', function () {
console.log('stream end');
})
.on('close', function () {
console.log('stream close');
});
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
res.writeHead(206, head);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
res.writeHead(200, head)
fs.createReadStream(path).pipe(res)
}
})
Black screen of horror on second video access.
The black video player
I strongly feel like the stream is not getting close. Any help will be appreciated. Using node for the first time here. Pardon for the silly mistakes ;)
Thanks in advance.