For a school project I'm creating a portal for KVM using NodeJS and Express.
I need to adjust an XML file and then use that XML File to create an VM.
So i created 2 functions
CreateXML:
function createXML(req, res, next) {
var parses = new xml2js.Parser();
fs.readFile('Debian7.xml', function(err, data){
parser.parseString(data, function (err, result){
result.domain.name = req.body.name;
result.domain.memory[0]['$'].unit = "GB";
result.domain.memory[0]['_'] = req.body.ram;
result.domain.currentMemory[0]['$'].unit = "GB";
result.domain.currentMemory[0]['_'] = req.body.ram;
result.domain.vcpu = req.body.cpus;
var builder = new xml2js.Builder({headless: true});
var xml = builder.buildObject(result);
fs.writeFile('./xmlfiles/' + req.body.name + '.xml', xml, function(err, data){
if(err) console.log(err);
});
});
});
};
CreateDomain:
function createDomain(req, res){
var domainXML = fs.readFileSync('./xmlfiles/' + req.body.name + '.xml', 'utf8');
hypervisor.connect(function(){
hypervisor.createDomainAsync(domainXML).then(function (domain){
console.log('Domain Created');
res.json({success: true, msg: 'succesfully created domain'})
});
});
}
then I call these functions as middleware in my post request
apiRoutes.post('/domainCreate', createXML, createDomain);
But then when I use Postman on the api route I get the following error:
Error: ENOENT: no such file or directory, open './xmlfiles/rickyderouter23.xml'
After the error it still creates the XML file and when I create the XML file before I use postman it works fine. It's like it needs to execute both functions before the creation of the XML file, how do I create the XML file after the first function and then use it in the second function.
The answer is "it's asynchronous" (just like many, many problems in node.js/javascript).
The fs.readFile function is asynchronous: when you call it, you give it a callback function which it will call when it finishes loading the file.
The parser.parseString is asynchronous - it will call your callback function when it finishes parsing the XML.
The fs.writeFile is the same - it will call your callback function when it finishes writing the file.
The hypervisor.connect function is the same - it will call your callback function when it finishes connecting.
The middleware functions are called in order, but they both contain code that may not have completed before they return. So when your code calls createDomain and tries to read the XML file created in createXML, the XML file probably doesn't exist yet. The fs.readFile might not be finished yet; even if it is, the parser.parseString function might not be finished yet; even if that one is finished, the fs.writeFile might not be finished yet.
One way to solve this would be to put the functionality of the createXML and createDomain functions together into one middleware function. That would allow you to rewrite it so that all the function calls that depend on previous asynchronous function calls could actually wait for those calls to complete before executing. A simple way to do it would be this:
function createXML(req, res, next) {
var parses = new xml2js.Parser();
fs.readFile('Debian7.xml', function(err, data){
parser.parseString(data, function (err, result){
result.domain.name = req.body.name;
result.domain.memory[0]['$'].unit = "GB";
result.domain.memory[0]['_'] = req.body.ram;
result.domain.currentMemory[0]['$'].unit = "GB";
result.domain.currentMemory[0]['_'] = req.body.ram;
result.domain.vcpu = req.body.cpus;
var builder = new xml2js.Builder({headless: true});
var xml = builder.buildObject(result);
fs.writeFile('./xmlfiles/' + req.body.name + '.xml', xml, function(err, data){
if(err) console.log(err);
// notice the call to createDomain here - this ensure
// that the connection to the hypervisor is not started
// until the file is written
createDomain(req, res);
});
});
});
};
And change your route to:
apiRoutes.post('/domainCreate', createXML);
Now, that's pretty ugly. I don't like the idea of lumping those two middleware functions into one and I'd prefer to rewrite it to use a promise-based approach, but that's the basic the idea.
Related
router.post("/application_action", function(req,res){
var Employee = req.body.Employee;
var conn = new jsforce.Connection({
oauth2 : salesforce_credential.oauth2
});
var username = salesforce_credential.username;
var password = salesforce_credential.password;
conn.login(username, password, function(err, userInfo, next) {
if (err) { return console.error(err); res.json(false);}
// I want this conn.query to execute first and then conn.sobject
conn.query("SELECT id FROM SFDC_Employee__c WHERE Auth0_Id__c = '" + req.user.id + "'" , function(err, result) {
if (err) { return console.error(err); }
Employee["Id"] = result.records[0].Id;
});
//I want this to execute after the execution of above query i.e. conn.query
conn.sobject("SFDC_Emp__c").update(Employee, function(err, ret) {
if (err || !ret.success) { return console.error(err, ret);}
console.log('Updated Successfully : ' + ret.id);
});
});
I have provided my code above. I need to modify Employee in the conn.query and use it in conn.sobject. I need to make sure that my first query executes before 2nd because I am getting value from 1st and using in the 2nd. Please do let me know if you know how to accomplish this.
New Answer Based on Edit to Question
To execute one query based on the results of the other, you put the second query inside the completion callback of the first like this:
router.post("/application_action", function (req, res) {
var Employee = req.body.Employee;
var conn = new jsforce.Connection({
oauth2: salesforce_credential.oauth2
});
var username = salesforce_credential.username;
var password = salesforce_credential.password;
conn.login(username, password, function (err, userInfo, next) {
if (err) {
return console.error(err);
res.json(false);
}
// I want this conn.query to execute first and then conn.sobject
conn.query("SELECT id FROM SFDC_Employee__c WHERE Auth0_Id__c = '" + req.user.id + "'", function (err, result) {
if (err) {
return console.error(err);
}
Employee["Id"] = result.records[0].Id;
//I want this to execute after the execution of above query i.e. conn.query
conn.sobject("SFDC_Emp__c").update(Employee, function (err, ret) {
if (err || !ret.success) {
return console.error(err, ret);
}
console.log('Updated Successfully : ' + ret.id);
});
});
});
});
The only place that the first query results are valid is inside that callback because otherwise, you have no way of knowing when those asynchronous results are actually available and valid.
Please note that your error handling is unfinished since you don't finish the response in any of the error conditions and even in the success case, you have not yet actually sent a response to finish the request.
Original Answer
First off, your code shows a route handler, not middleware. So, if you really intend to ask about middleware, you will have to show your actual middleware. Middleware that does not end the request needs to declare next as an argument and then call it when it is done with it's processing. That's how processing continues after the middleware.
Secondly, your console.log() statements are all going to show undefined because they execute BEFORE the conn.query() callback that contains the code that sets those variables.
conn.query() is an asynchronous operation. It calls its callback sometime IN THE FUTURE. Meanwhile, your console.log() statements execute immediately.
You can see the results of the console.log() by putting the statements inside the conn.query() callback, but that is probably only part of your problem. If you explain what you're really trying to accomplish, then we could probably help with a complete solution. Right now, you're just asking questions about flawed code, but not explaining the higher level problem you're trying to solve so you're making it hard for us to give you the best answer to your actual problem.
FYI:
app.locals - properties scoped to your app, available to all request handlers.
res.locals - properties scoped to a specific request, available only to middleware or request handlers involved in processing this specific request/response.
req.locals - I can't find any documentation on this in Express or HTTP module. There is discussion of this as basically serving the same purpose as res.locals, though it is not documented.
Other relevants answers:
req.locals vs. res.locals vs. res.data vs. req.data vs. app.locals in Express middleware
Express.js: app.locals vs req.locals vs req.session
You miss the basics of the asynchronous flow in javascript. All the callbacks are set to the end of event loop, so the callback of the conn.query will be executed after console.logs from the outside. Here is a good article where the the basic concepts of asynchronous programming in JavaScript are explained.
var userLat = db.collection('users', function (err, document){
document.findOne({_id: loggedUserID}, function(err, docs) {
console.log(docs.currentUserLat);
})
});
This is my code, I'm trying to get the value that's console logged into the variable. I just can't find the correct syntax to do this. The console log does return the correct value just need to drop it into the variable. Grateful for some help.
What do you want to do with 'docs.currentUserLat'?
You can do what you need to do without saving docs.currentUserLat to a variable that has scope outside of your db.collection call. Some examples:
If you simply want to change the document in your database, take advantage of the many methods specified in the Collections API: http://mongodb.github.io/node-mongodb-native/2.0/api/Collection.html. For example, to update the document and simultaneously resave it in the database:
db.collection('users', function (err, document){
document.findOneAndUpdate({_id: loggedUserID},
{currentUserLat: [updated value]},
function(err, docs) {
if(err) console.log(err);
}
)
});
If you just wanted to use docs.currentUserLat inside some node function, you'll need to properly nest the document.findOne function inside a callback (or vice versa). For example, to write currentUserLat to a file using the fs module:
var fs = require('fs');
db.collection('users', function (err, document){
document.findOne({_id: loggedUserID}, function(err, docs) {
fs.writeFile("pathToYourFile", docs.currentUserLat, function(err) {
if(err) {return console.log(err);}
});
});
});
Or, if you want to send it in response to a simple http request:
var http = require('http');
http.createServer(function(request,response){
db.collection('users', function (err, document){
document.findOne({_id: loggedUserID}, function(err, docs) {
response.writeHead(200,{'Content-Type':'text/html'});
response.end(docs.currentUserLat);
});
});
});
The key thing to remember is what JohnnyHK said in their comment: docs.currentUserLat is only available inside the anonymous function passed to findOne. So, whatever it is that you need to do, do it inside this function.
(Reading the link JohnnyHK provided is a great way to get started with understanding asynchronous functions in Node. Another is https://github.com/rvagg/learnyounode)
First of all you have to understand how javascript callback works. After that you will see that nothing assigns docs.currentUserLat to your userLat variable. The reason behind this is that your docs.currentUserLat is available only inside the callback. Think about it in the following way:
You program started to execute and encountered the line: var userLat = .... This line tells: do a callback (which basically asks someone else to do the job), your while your job is being executed the program continues, by assigning userLat to undefined and executes further. Then at some period of time callback finishes and console.log your docs.currentUserLat.
One way to have the desired behavior is to make userLat global and instead of console.log(docs.currentUserLat); do userLat = docs.currentUserLat. The problem that if you will do this, your userLat eventually will have the desired value (if callback will not fail), but you can not predict when. So if you will do
var userLat = db.collection('users', function (err, document){ ... });
.. some other code
console.log(userLat);
you will not be sure that you will get the output. Another way to do put everything in another callback.
I have a built a method in ExpressJS that exports a document as an HTML page:
html: function (req, res) {
Project.findOne( { req.params.project },
function (err, project) {
res.contentType('text/html');
res.render('exporting/html', { project.name });
}
);
},
Additionally, I'd like to create a method that includes that generated HTML page, together with some static assets, in a ZIP archive.
Here's my current code:
zip: function (req, res) {
Project.findOne( { req.params.project },
function (err, project) {
res.contentType('application/zip');
res.setHeader('content-disposition', 'attachment; filename=' + project.name + '.zip');
var zip = new AdmZip();
zip.addFile("readme.txt", new Buffer("This was inside the ZIP!"));
//------ Here I'd like to use zip.addFile to include the HTML output from the html method above ------
res.send(zip.toBuffer());
}
);
}
How can I make the zip method include the output from the html method?
You have two options: one is relatively simple and the other is a bit more complicated. You'll have to decide which you believe is which. ;)
First method
Since you are relying on express' Response.render to create your HTML from a view, you'll need to call that route on your server to retrieve the content of the page so you can include it in your zip response.
Assuming you have var http=require('http'); somewhere in this file, you can:
zip: function (req, res) {
var projectId=req.params.project||'';
if(!projectId){ // make sure we have what we need!
return res.status(404).send('requires a projectId');
}
// Ok, we start by requesting our project...
Project.findOne({id:projectId},function(err, project) {
if(err) { // ALWAYS handle errors!
return res.status(500).send(err);
}
if('object'!==typeof project){ // did we get what we expected?
return res.status(404).send('failed to find project for id: '+projectId);
}
var projectName=project.name || ''; // Make no assumptions!
if(!projectName){
return res.status(500).send('whoops! project has no name!');
}
// For clarity, let's write a function to create our
// zip archive which will take:
// 1. a name for the zip file as it is sent to the requester
// 2. an array of {name:'foo.txt',content:''} objs, and
// 3. a callback which will send the result back to our
// original requester.
var createZipArchive=function(name, files, cb){
// create our zip...
var zip=new AdmZip();
// add the files to the zip
if(Array.isArray(files)){
files.forEach(function(file){
zip.addFile(file.name,new Buffer(file.content));
});
}
// pass the filename and the new zip to the callback
return cb(name, zip);
};
// And the callback that will send our response...
//
// Note that `res` as used here is the original response
// object that was handed to our `zip` route handler.
var sendResult=function(name, zip){
res.contentType('application/zip');
res.setHeader('content-disposition','attachment; filename=' + name);
return res.send(zip.toBuffer());
};
// Ok, before we formulate our response, we'll need to get the
// html content from ourselves which we can do by making
// a get request with the proper url.
//
// Assuming this server is running on localhost:80, we can
// use this url. If this is not the case, modify it as needed.
var url='http://localhost:80/html';
var httpGetRequest = http.get(url,function(getRes){
var body=''; // we'll build up the result from our request here.
// The 'data' event is fired each time the "remote" server
// returns a part of its response. Remember that these data
// can come in multiple chunks, and you do not know how many,
// so let's collect them all into our body var.
getRes.on('data',function(chunk){
body+=chunk.toString(); // make sure it's not a Buffer!
});
// The 'end' event will be fired when there are no more data
// to be read from the response so it's here we can respond
// to our original request.
getRes.on('end',function(){
var filename=projectName+'.zip',
files=[
{
name:'readme.txt',
content:'This was inside the ZIP!'
},{
name:'result.html',
content:body
}
];
// Finally, call our zip creator passing our result sender...
//
// Note that we could have both built the zip and sent the
// result here, but using the handlers we defined above
// makes the code a little cleaner and easier to understand.
//
// It might have been nicer to create handlers for all the
// functions herein defined in-line...
return createZipArchive(filename,files,sendResult);
});
}).on('error',function(err){
// This handler will be called if the http.get request fails
// in which case, we simply respond with a server error.
return res.status(500).send('could not retrieve html: '+err);
});
);
}
This is really the best way to solve your problem, even though it might seem complex. Some of the complexity can be reduced by using a better
HTTP client library like superagent which reduce all the event handling rig-a-ma-roll to a simple:
var request = require('superagent');
request.get(url, function(err, res){
...
var zip=new AdmZip();
zip.addFile('filename',new Buffer(res.text));
...
});
Second method
The second method utilizes the render() method of express' app object, which is exactly what res.render() uses to convert views into HTML.
See Express app.render() for how this function operates.
Note that this solution is the same except for the portion annotated starting at // - NEW CODE HERE -.
zip: function (req, res) {
var projectId=req.params.project||'';
if(!projectId){ // make sure we have what we need!
return res.status(404).send('requires a projectId');
}
// Ok, we start by requesting our project...
Project.findOne({id:projectId},function(err, project) {
if(err) { // ALWAYS handle errors!
return res.status(500).send(err);
}
if('object'!==typeof project){ // did we get what we expected?
return res.status(404).send('failed to find project for id: '+projectId);
}
var projectName=project.name || ''; // Make no assumptions!
if(!projectName){
return res.status(500).send('whoops! project has no name!');
}
// For clarity, let's write a function to create our
// zip archive which will take:
// 1. a name for the zip file as it is sent to the requester
// 2. an array of {name:'foo.txt',content:''} objs, and
// 3. a callback which will send the result back to our
// original requester.
var createZipArchive=function(name, files, cb){
// create our zip...
var zip=new AdmZip();
// add the files to the zip
if(Array.isArray(files)){
files.forEach(function(file){
zip.addFile(file.name,new Buffer(file.content));
});
}
// pass the filename and the new zip to the callback
return cb(name, zip);
};
// And the callback that will send our response...
//
// Note that `res` as used here is the original response
// object that was handed to our `zip` route handler.
var sendResult=function(name, zip){
res.contentType('application/zip');
res.setHeader('content-disposition','attachment; filename=' + name);
return res.send(zip.toBuffer());
};
// - NEW CODE HERE -
// Render our view, build our zip and send our response...
app.render('exporting/html', { name:projectName }, function(err,html){
if(err){
return res.status(500).send('failed to render view: '+err);
}
var filename=projectName+'.zip',
files=[
{
name:'readme.txt',
content:'This was inside the ZIP!'
},{
name:'result.html',
content:html
}
];
// Finally, call our zip creator passing our result sender...
//
// Note that we could have both built the zip and sent the
// result here, but using the handlers we defined above
// makes the code a little cleaner and easier to understand.
//
// It might have been nicer to create handlers for all the
// functions herein defined in-line...
return createZipArchive(filename,files,sendResult);
});
}
While this method is somewhat shorter, by utilizing the underlying mechanism that Express uses to render views, it "couples" your zip route to the Express engine in such a way that, should the Express API change in the future, you'll need to make two changes to your server code (to properly handle the html route and the zip routes), rather than only one using the previous solution.
Personally, I favor the first solution as it is cleaner (in my mind) and more independent of unexpected change. But as they say YMMV ;).
I'm using the node-soap client from milewise (create API by the way), but I have some difficulties to get the results of the callback to the right scope.
Here is the code I have for now:
function generateSoapRequest(req, res, next)
{
soap.createClient('http://127.0.0.1:' + cfg.service_port + cfg.service_url_path,
function(err, client) {
client.CXIf.CXIf.CXProcessXML(
{"XMLRequestData": {"CXLogon": {"UserID":"1901007", "Password":"2580", "LogonType":11 } } },
function(err, result, body) {
if (err) {
console.log(err);
return;
}
console.log(result);
var cxresponse = result.XMLResponse[0].CXResponse;
console.log('SessionID:' + cxresponse.SessionID + ' SessionInstanceID:' + cxresponse.SessionInstanceID);
});
});
}
function getVoiceMailInformation(req, res, next) {
var cxresponse = generateSoapRequest(req, res, next);
var something = doSomethingNext(cxresponse);
}
function doSomethingNext(cxresponse){....; return something;}
Basically, when I launch the getVoiceMailInformation(), it creates a soap client and request some information through the generateSoapRequest().
The next step would be to get the result of that function (not implemented in the code above, because I don't know how) and do something else.
My problem is soap.createClient is asynchronous, so the callback is fired well after the function is complete.
What would be the best approach ?
(Maybe it's something trivial, but the scope of an anonymous function in javascript is something that is killing me.)
Any help will be very appreciated.
Basically you can't do something like:
var cxresponse = generateSoapRequest(req, res, next);
because the function you're calling invokes asynchronous code, and therefore can't return a value that's determined by that code. The normal way around this is to give the function an extra callback parameter for a function that will be called with the result once the result becomes available. It doesn't have to be an anonymous function; it can be a named function. In your case, (assuming you've modified generateSoapRequest to take a callback as its fourth argument and call it when the results are ready, you could write
generateSoapRequest(req, res, next, doSomethingNext);
and then doSomethingNext will be called with cxresponse as an argument. Of course, since doSomethingNext also gets called asynchronously, it can't return a value either, so you'll have to apply the same technique to it.
The async module can make this sort of thing easier: in particular, its "waterfall" pattern is useful when you have a bunch of functions that have to run in sequence, each being called back from the previous one.
So Im trying to use the nodejs express FS module to iterate a directory in my app, store each filename in an array, which I can pass to my express view and iterate through the list, but Im struggling to do so. When I do a console.log within the files.forEach function loop, its printing the filename just fine, but as soon as I try to do anything such as:
var myfiles = [];
var fs = require('fs');
fs.readdir('./myfiles/', function (err, files) { if (err) throw err;
files.forEach( function (file) {
myfiles.push(file);
});
});
console.log(myfiles);
it fails, just logs an empty object. So Im not sure exactly what is going on, I think it has to do with callback functions, but if someone could walk me through what Im doing wrong, and why its not working, (and how to make it work), it would be much appreciated.
The myfiles array is empty because the callback hasn't been called before you call console.log().
You'll need to do something like:
var fs = require('fs');
fs.readdir('./myfiles/',function(err,files){
if(err) throw err;
files.forEach(function(file){
// do something with each file HERE!
});
});
// because trying to do something with files here won't work because
// the callback hasn't fired yet.
Remember, everything in node happens at the same time, in the sense that, unless you're doing your processing inside your callbacks, you cannot guarantee asynchronous functions have completed yet.
One way around this problem for you would be to use an EventEmitter:
var fs=require('fs'),
EventEmitter=require('events').EventEmitter,
filesEE=new EventEmitter(),
myfiles=[];
// this event will be called when all files have been added to myfiles
filesEE.on('files_ready',function(){
console.dir(myfiles);
});
// read all files from current directory
fs.readdir('.',function(err,files){
if(err) throw err;
files.forEach(function(file){
myfiles.push(file);
});
filesEE.emit('files_ready'); // trigger files_ready event
});
As several have mentioned, you are using an async method, so you have a nondeterministic execution path.
However, there is an easy way around this. Simply use the Sync version of the method:
var myfiles = [];
var fs = require('fs');
var arrayOfFiles = fs.readdirSync('./myfiles/');
//Yes, the following is not super-smart, but you might want to process the files. This is how:
arrayOfFiles.forEach( function (file) {
myfiles.push(file);
});
console.log(myfiles);
That should work as you want. However, using sync statements is not good, so you should not do it unless it is vitally important for it to be sync.
Read more here: fs.readdirSync
fs.readdir is asynchronous (as with many operations in node.js). This means that the console.log line is going to run before readdir has a chance to call the function passed to it.
You need to either:
Put the console.log line within the callback function given to readdir, i.e:
fs.readdir('./myfiles/', function (err, files) { if (err) throw err;
files.forEach( function (file) {
myfiles.push(file);
});
console.log(myfiles);
});
Or simply perform some action with each file inside the forEach.
I think it has to do with callback functions,
Exactly.
fs.readdir makes an asynchronous request to the file system for that information, and calls the callback at some later time with the results.
So function (err, files) { ... } doesn't run immediately, but console.log(myfiles) does.
At some later point in time, myfiles will contain the desired information.
You should note BTW that files is already an Array, so there is really no point in manually appending each element to some other blank array. If the idea is to put together the results from several calls, then use .concat; if you just want to get the data once, then you can just assign myfiles = files directly.
Overall, you really ought to read up on "Continuation-passing style".
I faced the same problem, and basing on answers given in this post I've solved it with Promises, that seem to be of perfect use in this situation:
router.get('/', (req, res) => {
var viewBag = {}; // It's just my little habit from .NET MVC ;)
var readFiles = new Promise((resolve, reject) => {
fs.readdir('./myfiles/',(err,files) => {
if(err) {
reject(err);
} else {
resolve(files);
}
});
});
// showcase just in case you will need to implement more async operations before route will response
var anotherPromise = new Promise((resolve, reject) => {
doAsyncStuff((err, anotherResult) => {
if(err) {
reject(err);
} else {
resolve(anotherResult);
}
});
});
Promise.all([readFiles, anotherPromise]).then((values) => {
viewBag.files = values[0];
viewBag.otherStuff = values[1];
console.log(viewBag.files); // logs e.g. [ 'file.txt' ]
res.render('your_view', viewBag);
}).catch((errors) => {
res.render('your_view',{errors:errors}); // you can use 'errors' property to render errors in view or implement different error handling schema
});
});
Note: you don't have to push found files into new array because you already get an array from fs.readdir()'c callback. According to node docs:
The callback gets two arguments (err, files) where files is an array
of the names of the files in the directory excluding '.' and '..'.
I belive this is very elegant and handy solution, and most of all - it doesn't require you to bring in and handle new modules to your script.