Get audio file duration and size in node js - node.js

I am using musicmetadata module of node js to read duration and size but it is not working for all mp3 files.getting below error:-
Error: Could not find metadata header
at /var/www/html/Live/node_modules/musicmetadata/lib/id3v1.js:13:19
at Stream.done (/var/www/html/Live/node_modules/musicmetadata/lib/common.js:31:5)
at emitNone (events.js:91:20)
at Stream.emit (events.js:185:7)
at drain
(/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:34:23)
at Stream.stream.queue.stream.push (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:45:5)
at Stream.end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:15:35)
at _end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:65:9)
at Stream.stream.end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:74:5)
at ReadStream.onend (_stream_readable.js:512:10)
at ReadStream.g (events.js:286:16)
at emitNone (events.js:91:20)
at ReadStream.emit (events.js:185:7)
at endReadableNT (_stream_readable.js:975:12)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
+ Error: NODE_ENV is not defined! Using default development environment
code example:it contain required code to understand issue.
var fs = require('fs');
var musicData = require('musicmetadata');
//working for "01 .Brown Rang -Mp3 Songs -[320kbps]-[Exclusive]~~~[CooL GuY] {{a2zRG}}.mp3"
var parser = musicData(fs.createReadStream('03 RANG DE CHUNRIYA.mp3'),{ duration: true }, function (err, metadata) {
var talkItem = {};
if (err)
{
console.log('err:',err);
}
else{
if(metadata.title === undefined || metadata.title === "")
{
talkItem.title ='';
}
else{
talkItem.title = metadata.title;
}
talkItem.duration = metadata.duration;
console.log('talkItem:',talkItem);
}
});
you can see code mp3 file here

Related

Howo to get IPP endpoint

I'm using the ipp npm module to send a print job from a google cloud function. I believe I have set up the printer correctly but I don't know how I'm supposed to know the exact uri for sending the print job.
The printer model is Brother MFC-L3770CDW
Here is how my settings look in the web view for the printer configuration.
And here is the function code.:
var ipp = require('ipp');
var PDFDocument = require('pdfkit');
var doc = new PDFDocument;
doc.text("Hello World");
var buffers = [];
doc.on('data', buffers.push.bind(buffers));
doc.on('end', function () {
var printer = ipp.Printer("https://10.0.0.55:443");
var file = {
"operation-attributes-tag":{
"requesting-user-name": "User",
"job-name": "Print Job",
"document-format": "application/pdf"
},
data: Buffer.concat(buffers)
};
printer.execute("Print-Job", file, function (err, res) {
if(err) {
console.log(err);
}
else{
console.log("Printed: "+res.statusCode);
}
});
console.log('executing');
});
doc.end();
console.log('finished executing');
I have tried various uris such as
https://10.0.0.55:631
https://10.0.0.55:443
https://10.0.0.55:631/ipp
https://10.0.0.55:631/ipp/printer
Sometimes I get an error like:
"Error: socket hang up
at TLSSocket.onHangUp (_tls_wrap.js:1148:19)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at TLSSocket.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickCallback (internal/process/next_tick.js:181:9)
code: 'ECONNRESET',
path: null,
host: '10.0.0.55',
port: '631',
localAddress: undefined }"

Why "queue closed error" occurs at when using archiver.file module to compress files

I am trying to download multiple files as zip in nodejs using archiver. This is my code:
exports.downloadAllFiles = function(req,res){
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
var output = fs.createWriteStream( "/home/files/Downloads/demo.zip");
archive.pipe(output);
demoDb.findOne({ caseguid: req.params.id }, function(err, data) {
if (err) {
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
data.Files.forEach(function(singleDoc){
archive.append(fs.createReadStream(singleDoc.filePath), { name: singleDoc.fileName })
})
}
}
})
archive.finalize();
};
This is the error stack:
{ Error: queue closed
at Archiver.append (/home/node_modules/archiver/lib/core.js:552:24)
at Promise.<anonymous> (/home/server/controllers/caseController.js:1722:25)
at Promise.<anonymous> (/home/node_modules/mpromise/lib/promise.js:177:8)
at emitOne (events.js:96:13)
at Promise.emit (events.js:188:7)
at Promise.emit (/home/node_modules/mpromise/lib/promise.js:84:38)
at Promise.fulfill (/home/node_modules/mpromise/lib/promise.js:97:20)
at /home/node_modules/mongoose/lib/query.js:1406:13
at model.Document.init (/home/node_modules/mongoose/lib/document.js:254:11)
at completeOne (/home/node_modules/mongoose/lib/query.js:1404:10)
at Immediate.cb (/home/node_modules/mongoose/lib/query.js:1158:11)
at Immediate.<anonymous> (/homenode_modules/mquery/lib/utils.js:137:16)
at runCallback (timers.js:672:20)
at tryOnImmediate (timers.js:645:5)
at processImmediate [as _immediateCallback] (timers.js:617:5) message: 'queue closed', code: 'QUEUECLOSED', data: undefined }
Probably the line archive.finalize() is being executed before your callback be completed. Try to move your archive.finalize to inside your callback.

How to search via elasticsearch in Node.js?

I have indexed data from firebase via elasticSearch .
And Its Working Properly .
Now I am Searching data via firebase below cloud function :
exports.searchIt = functions.database.ref('/search/{key}')
.onUpdate(event => {
let key=event.params.key;
let elasticSearchConfig = functions.config().elasticsearch;
const esClient = new elastic.Client({
host: 'http://35.198.221.164',
log: 'error'
});
console.log('client Created');
let searchBody = {
size: 20,
from: 0,
query: {
match_all: {}
}
};
esClient.search({index: 'offers', body: searchBody})
.then(results => {
console.log('Successfully Entered');
results.hits.hits.forEach(
(hit, index) => console.log(hit)
)
})
.catch(console.error);
});
But this gives error below :
textPayload: "{ Error: Not Found at respond
(/user_code/node_modules/elasticsearch/src/lib/transport.js:307:15) at
checkRespForFailure
(/user_code/node_modules/elasticsearch/src/lib/transport.js:266:7) at
HttpConnector.
(/user_code/node_modules/elasticsearch/src/lib/connectors/http.js:159:7)
at IncomingMessage.bound
(/user_code/node_modules/elasticsearch/node_modules/lodash/dist/lodash.js:729:21)
at emitNone (events.js:91:20) at IncomingMessage.emit
(events.js:185:7) at endReadableNT (_stream_readable.js:974:12) at
_combinedTickCallback (internal/process/next_tick.js:80:11) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
And on changing host to any other it still create client.
Why is this happening?

Node server is getting down with Error: write after end

We are streaming files from s3, its running well but sometime (randomly in few days) its gives error and node instance is getting down. our code is
s3Utils.headObject(assetId, function (err, metadata) {
if (err && err.code === 'NotFound') {
console.log("file not found on s3: ", assetId) ;
res.send({"success":false, "message":"file not found on s3: "+ assetId});
} else {
var download_file_name = assetId;
if(metadata.Metadata && metadata.Metadata.filename) {
download_file_name = metadata.Metadata.filename;
}
if(metadata.ContentLength) {
res.setHeader('Content-Length',metadata.ContentLength);
}
res.setHeader('Content-disposition', 'attachment; filename=' +download_file_name);
res.attachment(download_file_name);
var fileStream = s3Utils.getObjectAsReadStream(assetId);
fileStream.pipe(res);
}
});
we are getting below error in logs
events.js:154
throw er; // Unhandled 'error' event
^
Error: write after end
at writeAfterEnd (_stream_writable.js:166:12)
at PassThrough.Writable.write (_stream_writable.js:211:5)
at IncomingMessage.ondata (_stream_readable.js:536:20)
at emitOne (events.js:90:13)
at IncomingMessage.emit (events.js:182:7)
at IncomingMessage.Readable.read (_stream_readable.js:368:10)
at flow (_stream_readable.js:751:26)
at resume_ (_stream_readable.js:731:3)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
Server is getting down after this error.
Please suggest.

Node.js Error: Max redirects exceeded

how I can ignore pages with cycle redirects?
I use this code to fetching pages:
var libxml = require("libxmljs"),
http = require('follow-redirects').http,
url = require("url");
var request = http.request( { "host": host, "path": URL, "port": 80 }, function( response ) {
var str = '';
response.on( 'data', function( chunk ) {
str += chunk;
});
response.on( 'end', function() {
callback( str, response.statusCode );
}).on( 'error', function ( err ) {
console.log( err );
});
}).end();
It will not go to 'error' block, and I've got an exception:
events.js:85
throw er; // Unhandled 'error' event
^
Error: Max redirects exceeded.
at ClientRequest.cb (/var/parsing/node_modules/follow-redirects/create.js:55:19)
at ClientRequest.g (events.js:199:16)
at ClientRequest.emit (events.js:107:17)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:426:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:111:23)
at Socket.socketOnData (_http_client.js:317:20)
at Socket.emit (events.js:107:17)
at readableAddChunk (_stream_readable.js:163:16)
at Socket.Readable.push (_stream_readable.js:126:10)
at TCP.onread (net.js:538:20)
The error is being thrown by the request object, not the response object, so you need to add an (additional) error listener to request;
var request = http.request(...).on('error', function(err) {
...
}).end();
Looking at the docs for the package you are using (https://www.npmjs.com/package/follow-redirects), it looks like it just has a maxRedirects option. Directly from the linked page:
require('follow-redirects').maxRedirects = 10; // Has global affect (be careful!)
https.request({
host: 'bitly.com',
path: '/UHfDGO',
maxRedirects: 3 // per request setting
}, function (res) {/* ... */});

Resources