I'm using node.js and torrent-stream to create a readable stream of a downloading audio file. I've piped this into an audio file in my directory.
My question is, how would I stream this audio file in node.js. Is there some standard way to play a file as it downloads?
I don't know of a way to play an audio file as it is downloaded, but here is some code that might work for what you are trying to do:
var http = require('http'),
fs = require('fs'),
filePath = 'myfile.mp3',
stat = fs.statSync(filePath);
http.createServer(function(request, response) {
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
fs.createReadStream(filePath).pipe(response);
})
.listen(2000);
Hope this helps!
I tried this out and it seems to work:
const http = require('http')
const util = require('util')
const mime = require('mime')
const PORT = process.env.PORT || 8080
const onFinished = require('on-finished')
const parseRange = require('range-parser')
const torrentStream = require('torrent-stream')
const engine = torrentStream('magnet:?xt=urn:btih:b42f85d4d976f31d9edde30a101c79928e1353f6')
engine.on('ready', (files) => {
http.createServer(function (req, res) {
const index = parseInt(req.url.slice(1), 10)
if (!files.hasOwnProperty(index)) return res.end()
const file = files[index || 0]
console.log(`${file.name} ${req.headers.range ? req.headers.range : ''}`)
send(req, res, file)
}).listen(PORT, function () { console.log('Server listening on: http://localhost:%s', PORT) })
})
function send (req, res, file) {
let stream
let len = file.length
const type = mime.lookup(file.path)
if (type) {
const charset = mime.charsets.lookup(type)
res.setHeader('Content-Type', type + (charset ? '; charset=' + charset : ''))
}
res.setHeader('Content-Length', len)
res.setHeader('Accept-Ranges', 'bytes')
if (req.headers.range) {
const ranges = parseRange(len, req.headers.range, { combine: true })
if (ranges === -1) {
res.setHeader('Content-Length', 0)
res.setHeader('Content-Range', contentRange('bytes', len))
res.statusCode = 416
return res.end()
}
if (ranges !== -2 && ranges.length === 1) {
res.statusCode = 206
res.setHeader('Content-Range', contentRange('bytes', len, ranges[0]))
len = ranges[0].end - ranges[0].start + 1
res.setHeader('Content-Length', len)
if (req.method === 'HEAD') return res.end()
stream = file.createReadStream(ranges[0])
}
} else {
if (req.method === 'HEAD') return res.end()
stream = file.createReadStream()
}
onFinished(res, () => stream && stream.destroy())
stream.pipe(res)
return stream
}
function contentRange (type, size, range) {
return type + ' ' + (range ? range.start + '-' + range.end : '*') + '/' + size
}
Related
I want to store PDF using PostgreSQL bytea in Node.js
How can I convert PDF to datatype bytea ?
My code is:
request.get('https://www.cbconnexus.com/upload/settings/black/data.pdf', function (error, response, body) {
if (!error && response.statusCode == 200) {
data = "data:" + response.headers["content-type"] + ";base64," + Buffer.from(body).toString('base64');
console.log(data);
}
});
A node http server accepting a post of a PDF and storing it in a postgresql table. Listens on fixed IP address 192.168.1.3, port 8888 by default but you can specify the port as the first parameter.
Uses pg and formidable.
var myArgs = process.argv.slice(2);
var http = require('http');
var fs = require('fs');
var port = 8888;
const { IncomingForm } = require('formidable');
const { Client } = require('pg');
const dbc = new Client({
host: '127.0.0.1',
user: 'ghp',
password: 'zevrepdm',
database: 'ghp',
port: 5434,
});
dbc.connect();
if (myArgs[0]) {
port = myArgs[0];
}
http.createServer(function (req, res) {
const chunks = [];
if (req.url === '/' && req.method.toLowerCase() === 'post') {
const form = new IncomingForm({ multiples: true });
form.parse(req, (err, fields, files) => {
if (files.userfile.type == "application/pdf") {
var rawData = fs.readFileSync(files.userfile.path) ;
dbc
.query(
'insert into files(name,type,size,data) values ($1,$2,$3,$4)'
+ ' returning name',
[ files.userfile.name, files.userfile.type, files.userfile.size,
rawData ]
).then(res => {
var insMessage = 'file ' + res.rows[0].name + ' inserted' ;
console.log( insMessage );
}).catch(err => {
var insMessage = 'Error inserting file ' + files.userfile.name ;
console.error( insMessage );
console.error( err.stack );
});
}
});
}
req.on('data', chunk => chunks.push(chunk));
req.on('end', () => {
res.writeHead(200, [['Content-Type', 'text/html'],
['Cache-Control', 'maxage=120, s-maxage=60, public'],
['X-Accel-Expires', '86400'],
['X-BEVar', req.url] ]);
res.write('Hello ' + req.headers['user-agent'] + '!<br>');
res.write('<br>I\'m the one listening on port ' + port + '!<br>');
res.write(" <FORM ACTION='");
res.write(req.url);
res.write("' METHOD='post' enctype='multipart/form-data'> \
<input type='hidden' name='MAX_FILE_SIZE' value='128000000'> \
<input name='userfile' type='file' size='50'> \
<input type='submit' value='Send File'> \
</FORM>");
res.end('Do you know it\'s already ' + Date() + '?');
})
}).listen(port,"192.168.1.3");
Every time a user enter to our URL he get the response of the last user + his new response.
I'm new with nodejs and I think I don't understand the way this server works :)
Anyway, here you can see the app:
https://s1-nodejs.herokuapp.com (You will need to reload the page to see the duplicate response every time you reload).
And this is the code I deployed to Heroku:
const start = Date.now();
// using the http module
var http = require('http');
var https = require('https');
var request = require('request');
var domain;
var apiKey = 'example';
var domainResultJson = [];
var promises = [];
// look for PORT environment variable,
// else look for CLI argument,
// else use hard coded value for port 8080
port = process.env.PORT || process.argv[2] || 8080;
// create a simple server
let server = http.createServer(function (req, res) {
for (var i = 0 ; i < 10 ; i++ ) {
var domain = 'example-'+i+'.com';
promises.push(CheckIfDomainAvailable(https,domain,apiKey,domainResultJson));
}
Promise.all(promises).then(function(values) {
//console.log(values);
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
for (var i in values) {
val = values[i];
res.write(val);
}
res.end();
}).catch(
console.error
);
function CheckIfDomainAvailable(https,domain,apiKey,domainResultJson) {
return new Promise(function (success, failure) {
request('https://example.com/api/v1?'
+ 'apiKey=' + apiKey + '&d=' + domain + '', function (error, response, body) {
if (!error && response.statusCode == 200) {
domainResultJson.push('1');
success(body);
} else {
failure(error);
}
});
});
}
});
// listen on the port
server.listen(port, function () {
console.log('app up on port: ' + port);
});
Try to declare let promises = []; inside function (req, res) { ... }, like this:
// ...
let server = http.createServer(function (req, res) {
let promises = []; // <= Define promises container here.
for (var i = 0 ; i < 10 ; i++ ) {
var domain = 'example-'+i+'.com';
promises.push(CheckIfDomainAvailable(https,domain,apiKey,domainResultJson));
}
// ...
I'm using the node module form-data to send multipart/forms to a server, but i'm not sure how to calculate the content-length correctly for the request. The form-data github documentation seems a bit hand-wavy about it and the server i'm sending to requires the content-length header to be included.
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
_postKey(key, serviceName) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const extension = (keyName == 'publicKey') ? 'pub' : '';
const filePath = (extension.length > 0) ? __dirname + '/data/'+serviceName+'.'+extension : __dirname + '/data/'+serviceName;
const file = fs.createWriteStream(filePath, {flags: 'wx'});
file.write(key);
file.end();
const form = new FormData();
form.append('file', fs.createReadStream(filePath));
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', extension);
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
// options.headers['Content-Length'] = key.length;
console.log(options);
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
console.log(data);
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', reject);
form.pipe(post);
});
}
}
We have requirement where we need to write a node application which can read URL of image from database (approx more than million). Use image-size npm package to retrieve image meta data like height, width. Here should be an API which can list out result.
I am able to console log data but when i convert it to API, i need to chunk data so it can start appearing on browser and i'm unable to do that and need help. Here is my code
var express = require('express');
var url = require('url');
var http = require('http');
var sizeOf = require('image-size');
const sql = require('mssql');
var app = express();
var port = process.env.PORT || 3000;
const hostname = 'localhost';
var config1 = {
user: '*********',
password: '*********',
server: '*********',
database: '*******',
port: 1433,
debug: true,
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
app.get('/', function(req, res){
//res.writeHead(200, { 'Content-Type': 'application/json' });
var finalResult = [];
sql.close();
sql.connect(config1, function (err) {
if (err) console.log(err);
const request = new sql.Request()
var myQuery = `select imagename from media`;
request.stream = true;
request.query(myQuery);
request.on('row', row => {
//console.log('Image : ' + row.ImageUrl);
if (row.ImageUrl != ''){
if (row.ImageUrl.indexOf('http') < 0)
row.ImageUrl = "http:" + row.ImageUrl;
var options = url.parse(row.ImageUrl);
http.get(options, function (response) {
if (response.statusCode == 200)
{
var chunks = [];
response.on('data', function (chunk) {
chunks.push(chunk);
}).on('end', function() {
var buffer = Buffer.concat(chunks);
//console.log(options.href);
//console.log(sizeOf(buffer).height);
var result = {};
result.MaskUrl = row.MaskUrl;
result.ImageUrl = options.href;
result.Height = sizeOf(buffer).height;
result.Width = sizeOf(buffer).width;
result.statusCode = 200;
finalResult.push(result);
//console.log(result);
console.log(finalResult);
res.write(result, function(){
res.end();
});
});
}
else
{
var result = {};
result.MaskUrl = row.MaskUrl;
result.ImageUrl = options.href;
result.Height = 0;
result.Width = 0;
result.statusCode = response.statusCode;
finalResult.push(result);
console.log(result);
res.write(result, function(){
res.end();
});
}
});
}
})
request.on('error', err => {
console.log ('Error for ' + row.ImageUrl );
})
request.on('done', err => {
console.log('Last Time' + finalResult.length);
})
// request.query(myQuery,(err,result) =>{
// console.log(result);
// });
});
console.log('Last Time' + finalResult.length);
res.send(finalResult);
});
app.listen(port, hostname, function(){
console.log('ImageSize running on PORT: ' + port);
});
I tried res.write, res.end without any success.
The probable reason for your problem is that here:
res.write(result, function(){
res.end();
});
You end and close the request just after the first image is read.
I would rewrite the code a little and use some functional framework, like scramjet, to stream the data straight from the DB. As Nicholas pointed out it's not super easy to run your code so I'm writing blindly - but if you fix any of my obvious error this should just work:
First:
npm install scramjet JSONStream node-fetch
Next, try this code:
var express = require('express');
var sizeOf = require('image-size');
const sql = require('mssql');
var app = express();
var port = process.env.PORT || 3000;
const hostname = 'localhost';
const {DataStream} = require('scramjet');
const fetch = require('node-fetch');
const JSONStream = require('JSONStream');
var config1 = {
user: '*********',
password: '*********',
server: '*********',
database: '*******',
port: 1433,
debug: true,
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
app.get('/', function(req, res, next){
// you should consider not doing these two lines on each request,
// but I don't want to mess you code...
sql.close();
sql.connect(config1, function (err) {
if (err) next(err);
res.writeHead(200, { 'Content-Type': 'application/json' });
const request = new sql.Request();
var myQuery = `select imagename from media`;
request.stream = true;
request.query(myQuery);
const stream = new DataStream();
request.on('row', row => stream.write(row));
stream.filter(
row => row.ImageUrl !== ''
)
.map(
async row => {
if (row.ImageUrl.indexOf('http') !== 0) // url must start with http.
row.ImageUrl = "http:" + row.ImageUrl;
const response = await fetch(row.ImageUrl);
let size = {width:0, height:0};
if (response.status === 200) {
const buffer = await response.buffer();
size = sizeOf(buffer);
}
return {
MaskUrl: row.MaskUrl,
ImageUrl: row.ImageUrl,
Height: size.height,
Width: size.width,
statusCode: response.status
};
}
)
.pipe(
JSONStream.stringify()
).pipe(
res
);
request.on('error', () => {
res.writeHead(500, { 'Content-Type': 'application/json' });
stream.end("{error:true}");
});
request.on('done', () => stream.end());
});
});
app.listen(port, hostname, function(){
console.log('ImageSize running on PORT: ' + port);
});
I'm experimenting with ways to stream video file to browser client with nodejs. i got a working code that does the job and everything works great
var express = require('express');
var app = express();
var path = require('path');
var fs = require('fs');
app.get('/',function(req, res){
res.sendFile(__dirname+'/index.html');
});
app.get('/clips/:name',function(req,res){
var file = path.resolve(__dirname,req.params.name);
fs.stat(file, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// 404 Error if file not found
return res.sendStatus(404);
}
res.end(err);
}
var range = req.headers.range;
if (!range) {
// 416 Wrong range
return res.sendStatus(416);
}
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
var stream = fs.createReadStream(file, { start: start, end: end })
.on("open", function() {
stream.pipe(res);
}).on("error", function(err) {
res.end(err);
});
});
});
app.listen(80);
The only problem with this code is when multiple client browser tabs are connected to the server it blocks the next requests until a stream from a previous request is ended.