Read files from folders inside a folder - node.js

So basically I have a folder, which holds other folders and each folder has it's own set of images, that I would like to display in a 'ul'. The problem is since I'm using readdir, which is async, how can I write the response, without getting a "Write after .end() error". Here is how my code looks like.
var fs = require('fs'),
url = require('url');
module.exports = function(req, res) {
req.pathName = req.pathName || url.parse(req.url).pathname;
if(req.pathName === '/gallery') {
fs.readdir('./content/images/public', function(err, filenames) {
if (err) {
console.log(err);
return;
}
if(filenames.length) {
var list_content = '';
for (var i = 0; i < filenames.length; i++) {
fs.readdir('./content/images/public/' + filenames[i], function(err, images) {
if (err) {
console.log(err);
return;
}
for (var i = 0; i < images.length; i++) {
list_content += '<li>' + images[i] + '</li>';
}
var list = '<ul>' + list_content + '</ul>Go back to homepage';
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.send(list);
});
}
} else {
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.write('<p>There are no images in the gallery</p>Go back to homepage');
res.end();
}
});
} else {
return true;
}
}
res.end();
}
});

Instead of using res.write() and res.end(), try using res.send(), which does both for you. UPDATE: Also, make sure this code is within a request callback where req, and res object is parsed correctly.
for (var i = 0; i < images.length; i++) {
list_content += '<li>' + images[i] + '</li>';
}
var list = '<ul>' + list_content + '</ul>Go back to homepage';
res.send(list);
then on the front end, receive it in the ajax response callback, like
$.post('xxxx', function(response){
alert(response); //list
});
UPDATE: after seeing your updated code,
module.exports = function(req, res) {
doesnt make sense, it needs to be something like
exports.functionName = function(req,res){
read this
https://www.sitepoint.com/understanding-module-exports-exports-node-js/
module.exports is an object of functions. Not a function. It looks like this
module.exports = {
funcExample: function() {
return 2;
},
funcOtherExample: function() {
return 1;
}

Related

How to get code to execute in order in node.js

I am trying to finish my script, but for some reason i don't know, it refuses to execute in the order i put it in.
I've tried placing a 'wait' function between the JoinRequest update function and the following code, but when run, it acts as if the function call and wait function were the other way round, countering the point of the wait().
const Roblox = require('noblox.js')
var fs = require('fs');
var joinRequests = []
...
function wait(ms) {
var d = new Date();
var d2 = null;
do { d2 = new Date(); }
while(d2-d < ms*1000);
};
...
function updateJReqs() {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
});
}
function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
check();
} else {
updateJReqs(); //for some reason this function is executed alongside the below, not before it.
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
check();
}
});
}, 400)
}
check();
The script is supposed to wait until a file is created (accomplished), update the list of join requests (accomplished) and then create a new file with the list of join requests in(not accomplished).
if I understand your code you work with async code, you need to return a promise in updateJReqs and add a condition of leaving from the function because you have an infinite recursion
function updateJReqs() {
return new Promise(resolve => {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
resolve();
});
}
}
async function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
await check();
} else {
await updateJReqs();
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
// you dont have an exit from your function check();
return 'Success';
}
});
}, 400)
}
check().then(res => console.log(res));

multiple http get calls nodejs

Thanks for looking into the code.
Here I am fetching some data using feed parser and taking out id's in navcodes array variable and wants to use these Id to make http call.Please find code below.
function processNavCode(){
var mfId = [53];
var preTitle = '';
var navCodes = [];
mfId.forEach(function(id){
var query = "http://portal.xyz.com/Rss.aspx?mf="+id;
feed(query, function(err, feeds) {
if (err) {
throw err;
}
feeds.forEach(function(feed){
var link = feed.link;
var title = feed.title;
var navCode = link.substr(link.length - 6);
if(title.split('-')[0].trim() != preTitle){
preTitle = title;
counter ++;
}
if(parseInt(navCode) != '')
navCodes.push = parseInt(navCode);
});
});
async.eachSeries(navCodes,insertbulkMFValues,function(){
console.log('I am done');
});
// insertbulkMFValues(navCode);
//Directly call insertbulkMFValues function
});
}
I have also tried to call the insertbulkMFValues directly as commented now but due to async nature of nodejs, I am getting the error of either 'Socket hang up' or 'read ECONNRESET'. I checked and used async but not able to work with that also.
var insertbulkMFValues =function(navCode,callback){
var options = {
host: 'www.quandl.com',
path: '/api/v3/datasets/AMFI/'+navCode+'.json?api_key=123456789&start_date=2013-08-30',
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
}
var req1 = https.request(options, function(response) {
var body = '';
response.setEncoding('utf8');
response.on('data', function(chunk) {
body += chunk;
});
response.on('end', function() {
if(typeof body === "string") {
var json = JSON.parse(body);
}
var mfData = json.dataset.data;
var schemeId = json.dataset.dataset_code;
var schemeName = json.dataset.name;
var isinCode = json.dataset.description;
var valueData=[];
for (var k = 0; k < mfData.length; k++) {
var myDate = new Date(mfData[k][0]);
valueData.push({
date:myDate,
NAV:parseFloat(mfData[k][1]).toFixed(2)
});
}
var query = { "navCode": schemeId };
var newData = {
createdDate: Date.now(),
navCode: schemeId,
schemeCode:count,
schemeName:schemeName,
ISINCode:isinCode,
values:valueData
};
MHistory.findOneAndUpdate(query, newData , {upsert:true}, function(err, doc){
if(err)
console.log('Errorr');
else
console.log('Success');
});
});
});
req1.on('error', function(e) {
console.log('problem with request: ' + e.message);
callback(true);
});
req1.end();
}
Thanks in advance..
J
You can directly call insertbulkMFValues for each navCode like:
if(parseInt(navCode) != '') {
insertbulkMFValues(navCode, function () {
console.log('something'}
});
}
Anything that you intend to do must be within the callback of the asynchronous function.
One option for you is to use the waterfall or parallel method of the async library to retrieve all feeds for each id and then invoke
async.eachSeries(navCodesAccumulated,insertbulkMFValues,function(){
console.log('I am done');
});
within the final result callback using the codes obtained.

Nodejs/Async: How does callback work in iteratee function for async.map as mentioned in code snippet

Being new to nodejs ans async following is the code that I came across.
app = express();
/*
other express use calls like - app.use(bodyParser.json());
*/
var async = require("async");
var server;
app.post('/callType/call', function(req, res) {
var startTime = Date.now();
server = req.body.server;
//async.map asynchronuously call enrollStep1 for every element in the req.body.nodes array
//HOW DOES THIS WORK??!! - WHERE IS THE CALLBACK DEFINED OR SOURCED FROM???
//******************************************************
async.map(req.body.nodes, function(node, callback) {
someFunc(node.property1,node.property2,callback)
},
//This function is called when every task triggered by async.map has called its callback.
function(err, results) {
var response = {};
if (err) {
response.success = false;
response.error = err;
console.log("ERROR returned: " + JSON.stringify(response));
res.json(response);
} else {
var returnResults = [];
//Results is an array of array - flatten it
var flattenedResults = [].concat.apply([], results);
//then remove duplicates
for (var i = 0; i < flattenedResults.length; i++){
var obj = flattenedResults[i];
var isDup = returnResults.some(function(element) {
return element.tid === obj.tid;
});
if (!isDup) {
returnResults.push(obj);
}
}
response.success = true;
response.results = returnResults;
res.json(response);
}
});
});
function someFunc(property1, property2, callback) {
var url = '/'+callTypes +'/'+ call +'/'+ property1 +'/'+ property2
urClient
.get(server + url)
.header('Content-Type', 'application/json')
.end(
function(response) {
if (response.code !== 200) {
callback("Error " + ". Code: " + response.code + " Response: " + JSON.stringify(response));
} else {
callback("Success " + ". Code: " + response.code + " Response: " + JSON.stringify(response));
}
}
);
}
The iteratee function for async.map has a definition starting function(node, callback) { but the callback function is never assigned. How does the callback work over here.
Isn't it supposed to be assigned somewhere like callback = myCallbackFunction;
The async.map takes 3 arguments, the array/object, the function to map the data and the callback function, so your code should be:
async.map(req.body.nodes, someFunc , function(err, results) {
if (err) return console.log(err);
console.log(results);
});
And your someFunc should be:
function someFunc(item, callback) {
// do something with item
// it's each item in the original array/object
callback('The results');
}
This is a basic example: http://code.runnable.com/UyR-6c2DZZ4SmfSh/async-map-example-for-node-js

Video from Mongo Grid fs is not playing on Safari browser (also on Cordova app)

I am using Mongodb to store video files as grid fs. It surprised me today when I came to know that video is not playing on Safari browser. However video read from Gridfs is playing fine on Chrome & Firefox. Following are two approach to read video files back from Grid fs. Both approach has same problem. I do the that correct mime type is getting set.
Approach 1:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.open(function (err, gridStore) {
var stream = gridStore.stream(true);
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
stream.on("data", function (chunk) {
log.debug("Chunk of file data");
res.write(chunk);
});
stream.on("end", function () {
log.debug("EOF of file");
res.end();
});
stream.on("close", function () {
log.debug("Finished reading the file");
});
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Approach 2:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.read(function (err, data) {
if (!err) {
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Following is screen of Safari for reference.
Please help
Try this GIST (by https://gist.github.com/psi-4ward)
It makes use of the byte range header
https://gist.github.com/psi-4ward/7099001
Although it does not work for me with safari, it makes sure that the correct hears are set and the correct content is delivered. It could narrow down your problem
EDIT
I've updated the GIST. It works now fine with Safari for me
https://gist.github.com/derMani/218bd18cc926d85a57a1
This should solve your problem
function StreamGridFile(req, res, GridFile) {
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialstart = parts[0];
var partialend = parts[1];
var start = parseInt(partialstart, 10);
var end = partialend ? parseInt(partialend, 10) : GridFile.length -1;
var chunksize = (end-start)+1;
res.writeHead(206, {
'Content-disposition': 'filename=xyz',
'Accept-Ranges': 'bytes',
'Content-Type': GridFile.contentType,
'Content-Range': 'bytes ' + start + '-' + end + '/' + GridFile.length,
'Content-Length': chunksize
});
// Set filepointer
GridFile.seek(start, function() {
// get GridFile stream
var stream = GridFile.stream(true);
// write to response
stream.on('data', function(buff) {
// count data to abort streaming if range-end is reached
// perhaps theres a better way?
if(start >= end) {
// enough data send, abort
GridFile.close();
res.end();
} else {
res.write(buff);
}
});
});
} else {
// stream back whole file
console.log('No Range Request');
res.header('Content-Type', GridFile.contentType);
res.header('Content-Length', GridFile.length);
var stream = GridFile.stream(true);
stream.pipe(res);
}
}
Regards
Rolf

Nodejs MongoDB updating documents with asynchronous function

I'm new to NodeJS and MongoDB. Is there a way to update a document using asynchronous functions in nodejs? I have a collection of websites with documents: _id, name and registrant. I want to insert a value in registrar using an asynchronous function. The function is a system call, and I just parse the output that I want. Running the code, the async function logs the information I want but does not store in the database. Any suggestions on how I can solve this? Thanks.
/*
* { _id: 53448014b15c693931000002,
* name: 'google.com',
* registrant: 'defaultval' } */
var MongoClient = require('mongodb').MongoClient;
var ObjectID = require('mongodb').ObjectID;
var id = '53448014b15c693931000002';
var domain = 'google.com';
MongoClient.connect('mongodb://127.0.0.1:27017/mydb', function(err, db) {
if (err) throw err;
var collection = db.collection('websites');
collection.findAndModify({_id:new ObjectID(id)},
{},
{$set: {registrant: test(domain, function (output) {
var t = output.split("\n");
for (var i = 0; i < t.length; ++i) {
if (t[i].indexOf("Registrant Organization:") != -1) {
console.log(t[i].substring(t[i].indexOf(":") + 2, t[i].length));//prints correct value, need this to store in registrant doc
return t[i].substring(t[i].indexOf(":") + 2, t[i].length);
}
}
})}},
{},
function(err, object) {
if (err) console.log(err.message);
else {
console.log(object);
}
db.close();
});
});
var test = function(domain, cb) {
var sys = require('sys');
var exec = require('child_process').exec;
var child = exec('whois ' + domain, function(error, stdout, stderr) {
cb(stdout);
});
}
Start one process on core:
var cluster = require('cluster')
, numCPUs = require('os').cpus().length
, windows = require('os').platform() == 'win32';
if(cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.pid + ' died');
});
return;
}
Create function to be async:
function mongodbDriverQuery(callback) {
collection.findOne({ id: getRandomNumber()}, function(err, world) {
callback(err, world);
});
}
http.createServer(function (req, res) {
// JSON response object
var hello = {message: "Hello, World!"};
var helloStr = "Hello, World!";
var path = url.parse(req.url).pathname;
// mysql on windows is not supported
if (windows && (path.substr(0, 3) == '/my' || path == '/update')) {
path = '/doesntexist';
}
switch (path) {
case '/mongodbdriver':
// Database Test
var values = url.parse(req.url, true);
var queries = values.query.queries || 1;
var queryFunctions = new Array(queries);
for (var i = 0; i < queries; i += 1) {
queryFunctions[i] = mongodbDriverQuery;
}
res.writeHead(200, {'Content-Type': 'application/json; charset=UTF-8'});
// and run it
async.parallel(queryFunctions, function(err, results) {
if (queries == 1) {
results = results[0];
}
res.end(JSON.stringify(results));
});
break;

Resources