Nextjs ytdl stream music with ability to fast forward/rewind - node.js

I've been messing around with ytdl and nextjs lately, I want to play audio from a video but I'm having issues with being able to forward/rewind the audio. I found some posts that helped a bit but still doesn't seem to be working properly. So, I thought I'd open a new post and see if anyone could help me out a bit :)
import ytdl from "ytdl-core";
const toArray = require("stream-to-array");
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const parts = await toArray(stream);
const buffers = parts.map((part) =>
Buffer.isBuffer(part) ? part : Buffer.from(part)
);
const compressedBuffer = Buffer.concat(buffers);
const total = compressedBuffer.length;
if (stream.headers && stream.headers.range) {
const range = req.headers.range;
const parts = range.replace(/bytes=/, "").split("-");
const partialstart = parts[0];
const partialend = parts[1];
const start = parseInt(partialstart, 10);
const end = partialend ? parseInt(partialend, 10) : total - 1;
const chunksize = end - start + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
} else {
res.writeHead(200, {
"Content-Length": total,
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
}
} catch (err) {
console.log(err);
}
}
Alternative way (doesn't work):
import ytdl from "ytdl-core";
const toArray = require("stream-to-array");
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const parts = await toArray(stream);
const buffers = parts.map((part) =>
Buffer.isBuffer(part) ? part : Buffer.from(part)
);
const compressedBuffer = Buffer.concat(buffers);
const total = compressedBuffer.length;
res.writeHead(200, {
"accept-ranges": "bytes",
"Content-Length": Math.ceil(total / 8),
"Content-Type": "audio/mpeg",
});
stream.pipe(res);
} catch (err) {
console.log(err);
}
}
Temporary solution (Very slow data read):
import ytdl from "ytdl-core";
const ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
const ffmpeg = require("fluent-ffmpeg");
ffmpeg.setFfmpegPath(ffmpegPath);
export default async function handler(req, res) {
try {
const stream = ytdl("mzB1VGEGcSU", {
filter: "audioonly",
quality: "highestaudio",
});
const newStream = ffmpeg(stream).audioBitrate("128").toFormat("mp3");
res.writeHead(200, {
Connection: "keep-alive",
"Content-Type": "audio/mpeg",
});
newStream.pipe(res, {end: true});
} catch (err) {
console.log(err);
}
}

Related

How To LongPool in Node.js and Javascript without not making alot of network request

I make a Longpolling for this little project but alot of Network Request are been sent and later ther browser froze
.
Is there anything that i am doing wrong here
On the server side
server.js
`
//...
const PORT = 3000;
let subscribers = Object.create(null);
const server = createServer((req, res) => {
const parsedUrl = parse(req.url, true);
const urlPath = parsedUrl.pathname;
const queryStringObject = parsedUrl.query;
const method = req.method.toLowerCase();
if (urlPath === "/" && method === "get") {
res.writeHead(200, {
"Content-Type": "text/html",
"Access-Control-Allow-Origin": "*",
});
res.end(`<h1>Home Page of the API</h1>`);
return;
}
if (urlPath === "/datas" && method === "get") {
res.writeHead(200, {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
"Cache-Control": "no-cache, must-revalidate",
});
let id = Math.random();
subscribers[id] = res;
fs.readFile(`${process.cwd()}/data/datas.json`, "utf-8", (err, data) => {
res.end(data);
});
req.on("close", function () {
delete subscribers[id];
// console.log(`${id} deleted`);
});
return;
}
});
server.listen(PORT, (err) => {
console.log(`Server up and running,\nYou can now visit http://localhost:3000`);
});
`
and on the client Side
app.js
`
const prices = document.querySelector(".prices");
function createEl(resultDatas) {
prices.innerHTML = "";
resultDatas.forEach((resultData) => {
const div = document.createElement("div");
div.classList.add("element-container");
const p = document.createElement("p");
p.innerHTML = resultData.title;
if (resultData.title.toLowerCase() === "silver") {
p.classList.add(`silver`);
}
p.classList.add(`title`);
div.appendChild(p);
const h3 = document.createElement("h3");
h3.innerHTML = resultData.price;
h3.classList.add("price");
div.appendChild(h3);
prices.append(div);
});
}
let tickerData = null;
const fetchTickerValue = async () => {
try {
const response = await fetch("http://localhost:3000/datas");
const data = await response.json();
createEl(data);
} catch (error) {
console.error(error?.message ?? "ticker call failed");
} finally {
fetchTickerValue();
}
};
fetchTickerValue();
if (tickerData) console.log(tickerData);
`
I want only one request to be send while waiting for the response from the server

Get progress of firebase admin file upload

I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}

How to send selected files to a node server using react native fs?

I am using https://github.com/itinance/react-native-fs to upload files from a react-native client, but it's not getting received in my nodejs server. By the way, I have used react-native-document-picker https://github.com/Elyx0/react-native-document-picker to select the files from the Android file system. Here is my client app's code;
async uploadToNode() {
let testUrl = this.state.multipleFile[0].uri; //content://com.android.providers.media.documents/document/image%3A7380
const split = testUrl.split('/');
const name = split.pop();
const setFileName = "Img"
const inbox = split.pop();
const realPath = `${RNFS.TemporaryDirectoryPath}${inbox}/${name}`;
const uploadUrl = "http://localhost:8082/uploadToIpfs";
var uploadBegin = (response) => {
const jobId = response.jobId;
console.log('UPLOAD HAS BEGUN! JobId: ' + jobId);
};
var uploadProgress = (response) => {
const percentage = Math.floor((response.totalBytesSent/response.totalBytesExpectedToSend) * 100);
console.log('UPLOAD IS ' + percentage + '% DONE!');
};
RNFS.uploadFiles({
toUrl: uploadUrl,
files: [{
name: setFileName,
filename:name,
filepath: realPath,
}],
method: 'POST',
headers: {
'Accept': 'application/json',
},
begin: uploadBegin,
beginCallback: uploadBegin, // Don't ask me, only way I made it work as of 1.5.1
progressCallback: uploadProgress,
progress: uploadProgress
})
.then((response) => {
console.log(response,"<<< Response");
if (response.statusCode == 200) { //You might not be getting a statusCode at all. Check
console.log('FILES UPLOADED!');
} else {
console.log('SERVER ERROR');
}
})
.catch((err) => {
if (err.description) {
switch (err.description) {
case "cancelled":
console.log("Upload cancelled");
break;
case "empty":
console.log("Empty file");
default:
//Unknown
}
} else {
//Weird
}
console.log(err);
});
}
I'm not sure if the nodejs code is correct to get the files from the client app. And here is my Server code;
app.post('/uploadToIpfs', (req, res) => {
// network.changeCarOwner(req.body.key, req.body.newOwner)
// .then((response) => {
// res.send(response);
// });
// var fileName = "Img";
// if(req.name == fileName){
// console.log(req.filename);
// res.send("Passed")
// }else{
// res.send("failed")
// }
console.log(req.files[0].filename);
res.send("Passed")
});
app.listen(process.env.PORT || 8082);

Using axios to get an external image and then saving it to the file system?

I have the following function that is called on every request.
async function checkForNewData() {
var now = moment();
var lastUpdateUnix = fs.readFileSync('.data/last-update.txt').toString();
var lastUpdate = moment.duration(lastUpdateUnix, 'seconds');
now.subtract(lastUpdate);
if (now.hour() >= 1) {
console.log("Last update is over 1 hour old. Getting new data.");
// Schedule
console.log("Getting new schedule.")
let res = await axios.get('https://splatoon2.ink/data/schedules.json', { headers: { "User-Agent": "Splatoon2.ink caching server at glitch.com/~splatoon2-ink-cache" } });
fs.writeFileSync('.data/rotations.json', JSON.stringify(res.data));
// Image
console.log("Getting new image.");
let resImage = await axios.get('https://splatoon2.ink/twitter-images/schedule.png', { headers: { "User-Agent": "Splatoon2.ink caching server at glitch.com/~splatoon2-ink-cache" }, responseType: 'stream' });
const path = Path.resolve(__dirname, '.data', 'image.png');
const writer = fs.createWriteStream(path);
resImage.data.pipe(writer);
fs.writeFileSync('.data/last-update.txt', moment().unix());
console.log("Data is now up to date.");
return;
}
}
On every Express route I have something similar to this.
app.get('/', function(request, response) {
console.log("Request for schedule.");
checkForNewData().then(function() {
console.log("Sending schedule.");
response.sendFile(__dirname + '/.data/rotations.json');
});
});
My goal is to run the function which will check for if the current data is outdated (using the moment library) and if it is, then it gets new data. But for some reason, when using axios to get the image part, it won't be written to the file system. I've tried using different approaches to saving it but everything I've tried won't work.
Here's my full file.
// server.js
// where your node app starts
// init project
const express = require('express');
const app = express();
const moment = require('moment');
const fs = require("fs");
const Path = require('path');
const axios = require("axios")
fs.writeFileSync('.data/last-update.txt', '0');
fs.writeFileSync('.data/rotations.json', '{}');
// we've started you off with Express,
// but feel free to use whatever libs or frameworks you'd like through `package.json`.
// http://expressjs.com/en/starter/static-files.html
app.use(express.static('public'));
async function checkForNewData() {
var now = moment();
var lastUpdateUnix = fs.readFileSync('.data/last-update.txt').toString();
var lastUpdate = moment.duration(lastUpdateUnix, 'seconds');
now.subtract(lastUpdate);
if (now.hour() >= 1) {
console.log("Last update is over 1 hour old. Getting new data.");
// Schedule
console.log("Getting new schedule.")
let res = await axios.get('https://splatoon2.ink/data/schedules.json', { headers: { "User-Agent": "Splatoon2.ink caching server at glitch.com/~splatoon2-ink-cache" } });
fs.writeFileSync('.data/rotations.json', JSON.stringify(res.data));
// Image
console.log("Getting new image.");
let resImage = await axios.get('https://splatoon2.ink/twitter-images/schedule.png', { headers: { "User-Agent": "Splatoon2.ink caching server at glitch.com/~splatoon2-ink-cache" }, responseType: 'stream' });
const path = Path.resolve(__dirname, '.data', 'image.png');
const writer = fs.createWriteStream(path);
resImage.data.pipe(writer);
fs.writeFileSync('.data/last-update.txt', moment().unix());
console.log("Data is now up to date.");
return;
}
}
// http://expressjs.com/en/starter/basic-routing.html
app.get('/', function(request, response) {
console.log("Request for schedule.");
checkForNewData().then(function() {
console.log("Sending schedule.");
response.sendFile(__dirname + '/.data/rotations.json');
});
});
app.get('/image', function(request, response) {
console.log("Request for image.");
checkForNewData().then(function() {
console.log("Sending image.");
response.type('png');
response.sendFile(__dirname + '/.data/image.png');
});
});
// listen for requests :)
const listener = app.listen(process.env.PORT, function() {
console.log('Your app is listening on port ' + listener.address().port);
});

When piping a movie from S3 the file isn't seekable

We have an application that sometimes serves a MP4 file which is stored on S3, since only specific people should be able to see each file, the file is private and inside our service we will only show it to authorised people.
The movie starts playing correctly (in the browser's built in video tag), however if we seek to a point in the movie that hasn't been buffered yet, the player will buffer for a bit, then stop playing. Afterwards clicking Play will cause the movie to start from the beginning. If I make the file public and access it directly form S3 seeking to an unbuffered point works correctly.
I created a standalone node program that reproduces this problem. I tried to make the response headers identical to those that S3 sends but the problem remains.
const http = require("http");
const AWS = require("aws-sdk");
const proxy = require("proxy-agent");
Object.assign(process.env, {
AWS_ACCESS_KEY_ID: "REDACTED",
AWS_SECRET_ACCESS_KEY: "REDACTED",
AWS_EC2_REGION: "us-west-2"
});
const s3 = new AWS.S3({
s3ForcePathStyle: 'true',
signatureVersion: 'v4',
httpOptions: { timeout: 300000 },
endpoint: 'https://s3.us-west-2.amazonaws.com',
region: 'us-west-2'
});
const objectParams = {
Bucket: 'REDACTED',
Key: 'some-movie.mp4'
};
let request = 0;
function serve(req, res) {
console.log("Handling request", ++request, req.url);
s3.headObject(objectParams, (err, data) => {
if (err)
throw err;
const { ContentType: type, ContentLength: length} = data;
console.log("Got", data);
if (data.ETag)
res.setHeader("ETag", data.ETag);
const range = req.headers.range;
if (range) {
console.log("Serving range", range);
const parts = range.replace("bytes=", "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1]? parseInt(parts[1], 10): length -1;
let headers = {
"Content-Range": `bytes ${start}-${end}/${length}`,
"Accept-Ranges": "bytes",
"Content-Type": type,
"Content-Length": end - start + 1,
"Last-Modified": data.LastModified,
};
if (req.headers["if-range"]) {
console.log("Setting if-range to", req.headers["if-range"]);
headers["If-Range"] = req.headers["if-range"];
}
res.writeHead(206, headers);
}
else {
console.log("Whole file");
res.setHeader("Accept-Ranges", "bytes");
res.setHeader("Content-Type", type);
res.setHeader("Content-Length", length);
res.setHeader("Last-Modified", data.LastModified);
}
const stream = s3.getObject(objectParams).createReadStream();
stream.on("error", err => console.error("stream error:", err));
stream.pipe(res).on("finish", data => {
console.log("Finished streaming");
});
});
}
http.createServer(serve).listen(1234);
What am I missing?
Here is the code with seekbar working just fine. You can test by integrating the below code and just open the api url in the browser.
import mime from 'mime-types';
const key = 'S3_BUCKET KEY';
const params = { Key: key, Bucket: AWS_BUCKET };
//s3 here refers to AWS.S3 object.
s3.headObject(params, function (err, data) {
if (err) {
console.error(err);
return next(err);
}
if (req.headers.range) {
const range = req.headers.range;
const bytes = range.replace(/bytes=/, '').split('-');
const start = parseInt(bytes[0], 10);
const total = data.ContentLength;
const end = bytes[1] ? parseInt(bytes[1], 10) : total - 1;
const chunkSize = end - start + 1;
res.set('Content-Range', 'bytes ' + start + '-' + end + '/' + total);
res.set('Accept-Ranges', 'bytes');
res.set('Content-Length', chunkSize.toString());
params['Range'] = range;
console.log('video buffering - range, total, start, end ,params', range, total, start, end, params);
} else {
res.set('Content-Length', data.ContentLength.toString());
console.log('video buffering - ,params', params);
}
res.status(206);
res.set('Content-Type', mime.lookup(key));
res.set('Last-Modified', data.LastModified.toString());
res.set('ETag', data.ETag);
const stream = s3.getObject(params).createReadStream();
stream.on('error', function error(err) {
return next(err);
});
stream.on('end', () => {
console.log('Served by Amazon S3: ' + key);
});
stream.pipe(res);
});

Resources