Creating promises in loop - node.js

I have to create promises in loop according to given config file and return response when all are resolved. Here goes the code-
{for(let type in spotlight){
switch (type){
case "outliers":{
let ops= spotlight[type];
for(let i=0;i<ops.length;i++){
(function(op){
let p= new Promise(function(resolve,reject){
let reqUrl= urlCreator(op.uri,op.query);
//console.log("--------------------"+reqUrl);
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("trend is ------"+JSON.stringify(op));
// create objects array
// let temp= [];
// let overallScore= data.overall.score;
// for(let day in overallScore){
// temp.push({"key": day,"value": parseFloat(overallScore[day])});
// }
//let outliers= stats.outliers(temp,"key","value");
resolve({"type":type,"name": op.name,"data": outliers});
})
});
promiseArray.push(p);
}(ops[i]))
}
break;
}
case "filters":{
let ops= spotlight[type];
for(let i=0;i<ops.length;i++){
(function(op){
let p= new Promise(function(resolve,reject){
let reqUrl= urlCreator(op.uri,op.query);
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
})
})
promiseArray.push(p);
}(ops[i]))
}
break;
}
}
}
Promise.all(promiseArray).then(values=>{
return res.json(values);
},
reason=>{
return res.json(reason);
}).catch(reason=>{
return res.json(reason);
})}
Problem is that promises never return, neither resolved, nor rejected. According to the config file, it has to hit two URLs, say u1 and u2. I tried to log the output to see which requests are returning. When the server is started and very first req is made, U1 returns and req hangs. on refresh I get response from U2,U2 and request hangs, then on refresh again U1,U1 and this continues. It seems to me that for some reason only one request is returned and other sits in buffer or something and come when next request is made. Both requests are being made to the local server only, I am routing it externally just to make use of cache as url is being used as key for cache.
I tried using dummy urls like facebook.com and google.com, and it works perfectly fine.Using one local url and another like facebook.com also works, but when both urls are of local server, it gets stuck.
Does it has any thing to do with single threaded nature of node or due to using same socket for making both requests.
PS- I am using npm-request to make URL calls.

Perhaps hesitating before making the second request would solve your problem.
I've made some tools that could help with that. See the MacroQTools.js file at
https://github.com/J-Adrian-Zimmer/JavascriptPromisesClarified.git

You're defining the request callback as function(success , data), while request consumes error-first callbacks, defined like function(error , response).
You're calling request like:
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
});
Pretending that, if the first parameter misses, you have to reject it with the second parameter, data. While, really, it would something like:
apiService.get(reqUrl,function(err,data){
if(err){
reject(err);
}
else{
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
}
});
Since request expects error-first callbacks (like almost anything in node that takes a callback).
So, when the requests actually work as expected, your code must be actually rejecting the promises with the actual real value, since when the request works, isSuccess is null and data has the real response value.
This surely is breaking something and is not good, while just fixing it maybe doesn't solve your issue completely: I believe your requests are acting weird because some configuration problem of your api, not just because you're rejecting promises when requests are successful (that would just send the data as the rejection reason).
Also you're handling the rejection of Promise.all() twice, passing a second handler to then and calling catch again. Only one is needed, and the .catch(handler) is probably better.
I made a small working example on how you can use Promise.all to collect async requests. I used imdb as the apiService, but any async http service would work too. I didn't reproduce totally from your code, but I'm sure you can adapt this to make your code work, at least the part of the code that is just consuming http services.
var express = require('express');
var app = express();
var Promise = require('bluebird');
var imdb = require('imdb-api');
app.get('/', controllerHandler );
app.listen(3000, function () {
console.log('Example app listening on port 3000!')
});
var apiService = {}
apiService.get = imdb.getReq;
function controllerHandler(request , response){
//like iterating through spotlight.type and returning an array of promises from it.
//in this case the array is from films and Airbag is obviously the best of them
var promises = [{name : 'The Matrix'} , { name : 'Avatar'} , {name : 'Airbag'}].map( createPromise );
//use either .catch(errorHandler) or then( successHandler , errorHandler ). The former is the better:
Promise.all(promises).then( successHandler ).catch( errorHandler );
function successHandler(result){
return response.json(result);
}
function errorHandler(reason){
console.log('There was an error calling to the service:');
console.log(reason);
return response.send('there was an error');
}
}
function createPromise(film){
return new Promise( function(resolve , reject){
apiService.get(film , function(err , data){
if(err)
reject( new Error(err));
else
resolve( {title : data.title , year : data.year} );
});
});
};

Related

Recommended pattern to page through API response until exhausted?

I'm new to Node and the async programming model. I'm having problems dealing with a simple requirement that seems pretty basic in synchronous environments: paging through an API response until the response is empty.
More specifically, the API, on a successful call, will return data and a status of 200 or 206 (partial content). If I see the 206 response, I need to keep making calls to the API (also sending a page query param that I increment each time) until I see the 200 response.
In a synchronous language, the task will be a piece of cake:
// pseudocode
data = []
page = 1
do {
response = api.call(page)
data.append(response.data)
page++
} while (response != 200)
return data
Now, in Node, for a single api call, code like this will work:
// fire when '/' has a GET request
app.get('/', (req, res) => {
axios.get('https://api.com/v1/cats')
.then(response => {
// now what??
});
});
});
See the //now what?? comment? That's the point where I'm wondering how to proceed. I came across this somewhat-relevant post but am not able to convert this to a format that will work for me in Node and Axios.
Is it enough to just wrap the axios code in a separate function? I don't think so, because if I do this:
function getData(pageNum) {
axios.get('https://api.com/v1/cats')
.then(response => {
// now what??
});
});
}
I can't rely on a return value because as soon axios.get() gets executed, the function will be over. I can call getData() again after I get the first response, but then, suppose I want to return all the data from these multiple calls as the HTTP response from my Express server . . . how do I do that?
I hope I will not get downvoted for laziness or something. I've really looked around but not found anything relevant.
First, a counter-question: Is the data set so big that you need to worry about using up all the memory? Because if so then it will take more work to structure your code in a way that streams the data all the way through. (In fact I'm not even sure whether express allows streaming... you are using express aren't you?)
From the axios documentation, it looks like response is a readable stream which provides the response body. So reading it is also an asynchronous task. So you should write a function that does that. See the "Stream" page of the nodejs docs for more details. Or I could be persuaded to help with that too, time permitting. But for now, I'll assume you have a function readResponse, which takes an axios response object as an argument and returns a promise, and the promise resolves to an object such as { statusCode: 206, result: ['thing1', 'thing2'] }. I'll also assume that your goal is to get all the result arrays and concatenate them together to get e.g. ['thing1', 'thing2', 'thing3', 'thing4', 'thing5', 'thing6'].
You could write a self-calling version of your getData function. This will retrieve all data from a given page onwards (not just the page itself):
function getData(pageNum) {
axios.get('https://api.com/v1/cats' + (pageNum ? '?page=' + pageNum) : '')
.then(readResponse)
.then(function(parsedResponse) {
if(parsedResponse.statusCode == 200) {
return parsedResponse.result;
} else if(parsedResponse.statusCode == 206) {
return getData(pageNum + 1).then(function(laterData) {
return parsedResponse.result.concat(laterData);
});
} else {
// error handling here, throw an exception or return a failing promise.
}
});
});
}
Then, to get all data, just call this function with pageNum = 0:
// fire when '/' has a GET request
app.get('/', (req, res) => {
getData(0)
.then(function(results) {
// results is now the array you want.
var response = JSON.stringify(results); // or whatever you're doing to serialise your data
res.send(response);
});
});

Node.js DNS Lookup scope error? (POST request)

I'm making a DNS Lookup API using Node.js and Express.js framework such that when it sends a POST request, it should return the addresses of different record types.
app.post('/', (req, res) => {
// Request format
// const l = {
// lookup: 'twitter.com',
// recordTypes: ['A', 'TXT']
// };
// Using destructor to fetch properties
const { lookup, recordTypes } = req.body;
console.log(lookup, recordTypes);
// For each record type
recordTypes.forEach(function(type) {
// setTimeout to get something async
setTimeout(function() {
dns.resolve(lookup.toLowerCase(), type, (err, addresses) => {
console.log(type);
if (err) {
return console.log(`\nType(${type}):\n`, err);
}
result = result + JSON.stringify({ type: `${type}`, response: { addresses } });
console.log(result);
});
}, 2000);
});
res.send(result);
});
It logs the correct stuff in the console but when it comes to the response, it returns an empty string. I used setTimeout to mimic the asynchronous nature of the request but it just does not work.
Please assume that I have declared stuff like result etc. because it is working. Also, please don't to redirect me to the Node.js documentation because I have already read that stuff and that's not the problem here. The problem is that I need to get every record type in an array and send that back as a response.
Here's what I have tried:
Tried to push response for each record type in the result array,
Tried to use a for of loop instead of forEach
Please help!
The way I'm reading your code is that for each item in the array you correctly use callbacks to do each individual bit of processing.
However, remember that forEach itself is not asynchronous. Thus you are setting up a bunch of tasks that will complete sometime, then returning undefined... then your results start to trickle in.
There's a couple ways to correctly. As you are using callbacks here I will use that style. You want to get a callback when all items in an array have been completely processed. The async module does this very well, providing a lot of high quality methods that act on arrays and such and give you a way to have a callback when they are all over.
Your function will look something like:
let res = []
async.each( recordTypes,
( type, done ) => {
dns.resolve(lookup.toLowerCase(), type, (err, addresses) => {
result = result + JSON.stringify({ type: `${type}`, response: { addresses } });
done(err)
} )
},
(allOverError) => {
res.send(result);
}
)
Notice there are two function parameters here: the first one is called for every item in the list, and the last is called when every item in the list has been completely processed.
There are other ways too, promises or the async/await keywords (confusing because of the name of the async module), but callbacks are good.

How do I achieve a synchronous requirement using asynchronous NodeJS

I am adding user validation an data modification page on a node.js application.
In a synchronous universe, in a single function I would:
Lookup the original record in the database
Lookup the user in LDAP to see if they are the owner or admin
Do the logic and write the record.
In an asynchronous universe that won't work. To solve it I've built a series of hand-off functions:
router.post('/writeRecord', jsonParser, function(req, res) {
post = req.post;
var smdb = new AWS.DynamoDB.DocumentClient();
var params = { ... }
smdb.query(params, function(err,data){
if( err == null ) writeRecordStep2(post,data);
}
});
function writeRecord2( ru, post, data ){
var conn = new LDAP();
conn.search(
'ou=groups,o=amazon.com',
{ ... },
function(err,resp){
if( err == null ){
writeRecordStep3( ru, post, data, ldap1 )
}
}
}
function writeRecord3( ru, post, data ){
var conn = new LDAP();
conn.search(
'ou=groups,o=amazon.com',
{ ... },
function(err,resp){
if( err == null ){
writeRecordStep4( ru, post, data, ldap1, ldap2 )
}
}
}
function writeRecordStep4( ru, post, data, ldap1, ldap2 ){
// Do stuff with collected data
}
Additionally, because the LDAP and Dynamo logic are in their own source documents, these functions are scattered tragically around the code.
This strikes me as inefficient, as well as inelegant. I'm eager to find a more natural asynchronous pattern to achieve the same result.
Any promise library should sort your issue out. My preferred choice is bluebird. In summary they help you in performing blocking operations.
If you haven't heard about bluebird then just use it. It converts all function of a module and return promise which is then-able. Simply put, it promisifies all functions.
Here is the mechanism:
Module1.someFunction() \\do your job and finally pass the return object to next call
.then() \\Use that object which is return from the first call, do your job and return the updated value
.then() \\same goes on
.catch() \\do your job when any error occurs.
Hope you understand. Here is an example:
var readFile = Promise.promisify(require("fs").readFile);
readFile("myfile.js",
"utf8").then(function(contents) {
return eval(contents);
}).then(function(result) {
console.log("The result of evaluating
myfile.js", result);
}).catch(SyntaxError, function(e) {
console.log("File had syntax error", e);
//Catch any other error
}).catch(function(e) {
console.log("Error reading file", e);
});
I could not tell from your pseudo-code exactly which async operations depend upon results from with other ones and knowing that is key to the most efficient way to code a series of asynchronous operations. If two operations do not depend upon one another, they can run in parallel which generally gets to an end result faster. I also can't tell exactly what data needs to be passed on to later parts of the async requests (too much pseudo-code and not enough real code to show us what you're really attempting to do).
So, without that level of detail, I'll show you two ways to approach this. The first runs each operation sequentially. Run the first async operation, when it's done, run the next one and accumulates all the results into an object that is passed along to the next link in the chain. This is general purpose since all async operations have access to all the prior results.
This makes use of promises built into the AWS.DynamboDB interface and makes our own promise for conn.search() (though if I knew more about that interface, it may already have a promise interface).
Here's the sequential version:
// promisify the search method
const util = require('util');
LDAP.prototype.searchAsync = util.promisify(LDAP.prototype.search);
// utility function that does a search and adds the result to the object passed in
// returns a promise that resolves to the object
function ldapSearch(data, key) {
var conn = new LDAP();
return conn.searchAsync('ou=groups,o=amazon.com', { ... }).then(results => {
// put our results onto the passed in object
data[key] = results;
// resolve with the original object (so we can collect data here in a promise chain)
return data;
});
}
router.post('/writeRecord', jsonParser, function(req, res) {
let post = req.post;
let smdb = new AWS.DynamoDB.DocumentClient();
let params = { ... }
// The latest AWS interface gets a promise with the .promise() method
smdb.query(params).promise().then(dbresult => {
return ldapSearch({post, dbresult}, "ldap1");
}).then(result => {
// result.dbresult
// result.ldap1
return ldapSearch(result, "ldap2")
}).then(result => {
// result.dbresult
// result.ldap1
// result.ldap2
// doSomething with all the collected data here
}).catch(err => {
console.log(err);
res.status(500).send("Internal Error");
});
});
And, here's a parallel version that runs all three async operations at once and then waits for all three of the to be done and then has all the results at once:
// if the three async operations you show can be done in parallel
// first promisify things
const util = require('util');
LDAP.prototype.searchAsync = util.promisify(LDAP.prototype.search);
function ldapSearch(params) {
var conn = new LDAP();
return conn.searchAsync('ou=groups,o=amazon.com', { ... });
}
router.post('/writeRecord', jsonParser, function(req, res) {
let post = req.post;
let smdb = new AWS.DynamoDB.DocumentClient();
let params = { ... }
Promise.all([
ldapSearch(...),
ldapSearch(...),
smdb.query(params).promise()
]).then(([ldap1Result, ldap2Result, queryResult]) => {
// process ldap1Result, ldap2Result and queryResult here
}).catch(err => {
console.log(err);
res.status(500).send("Internal Error");
});
});
Keep in mind that due to the pseudo-code nature of the code in your question, this is also pseudo-code where implementation details (exactly what parameters you're searching for, what response you're sending, etc...) have to be filled in. This should be illustrative of promise chaining to serialize operations and the use of Promise.all() for parallelizing operations and promisifying a method that didn't have promises built in.

res.send() is not sending current response, instead keeps last one

This is some of my code that I have in my index.js. Its waiting for the person to visit url.com/proxy and then it loads up my proxy page, which is really just a form which sends back an email and a code. From my MongoDB database, I grab the users order using the code, which contains some information I need (like product and the message they're trying to get). For some reason, it seems like its responding before it gets this information and then holds onto it for the next time the form is submitted.
The newline in my res.send(product + '\n' + message) isnt working either, but thats not a big deal right now.
But.. for example, the first time I fill out the form ill get a blank response. The second time, I'll get the response to whatever I filled in for the first form, and then the third time ill get the second response. I'm fairly new to Web Development, and feel like I'm doing something obviously wrong but can't seem to figure it out. Any help would be appreciated, thank you.
app.get('/proxy', function(req,res){
res.sendFile(__dirname+ "/views/proxy.html");
});
var message = "";
var product = "";
app.post('/getMessage', function(req,res)
{
returnMsg(req.body.user.code, req.body.user.email);
//res.setHeader('Content-Type', 'text/plain');
res.send(product + "\n" + message);
});
function returnMsg(code, email){
MongoClient.connect(url, function(err, db){
var cursor = db.collection('Orders').find( { "order_id" : Number(code) })
cursor.each(function(err, doc){
assert.equal(err, null);
if (doc!= null)
{
message = doc["message"];
product = doc["product"];
}
else {
console.log("wtf");
// error code here
}
});
console.log(email + " + " + message);
var document = {
"Email" : email,
"Message" : message
}
db.collection("Users").insertOne(document);
db.close();
});
}
You need to do lots of reading about your asynchronous programming works in node.js. There are significant design problems with this code:
You are using module level variables instead of request-level variables.
You are not correctly handling asynchronous responses.
All of this makes a server that simply does not work correctly. You've found one of the problems already. Your async response finishes AFTER you send your response so you end up sending the previously saved response not the current one. In addition, if multiple users are using your server, their responses will tromp on each other.
The core design principle here is first that you need to learn how to program with asynchronous operations. Any function that uses an asynchronous respons and wants to return that value back to the caller needs to accept a callback and deliver the async value via the callback or return a promise and return the value via a resolved promise. The caller then needs to use that callback or promise to fetch the async value when it is available and only send the response then.
In addition, all data associated with a request needs to stay "inside" the request handle or the request object - not in any module level or global variables. That keeps the request from one user from interfering with the requests from another user.
To understand how to return a value from a function with an asynchronous operation in it, see How do I return the response from an asynchronous call?.
What ends up happening in your code is this sequence of events:
Incoming request for /getMessage
You call returnMsg()
returnMsg initiates a connection to the database and then returns
Your request handler calls res.send() with whatever was previously in the message and product variables.
Then, sometime later, the database connect finishes and you call db.collection().find() and then iterate the cursor.
6/ Some time later, the cursor iteration has the first result which you put into your message and product variables (where those values sit until the next request comes in).
In working out how your code should actually work, there are some things about your logic that are unclear. You are assigning message and product inside of cursor.each(). Since cursor.each() is a loop that can run many iterations, which value of message and product do you actually want to use in the res.send()?
Assuming you want the last message and product value from your cursor.each() loop, you could do this:
app.post('/getMessage', function(req, res) {
returnMsg(req.body.user.code, req.body.user.email, function(err, message, product) {
if (err) {
// send some meaningful error response
res.status(500).end();
} else {
res.send(product + "\n" + message);
}
});
});
function returnMsg(code, email, callback) {
let callbackCalled = false;
MongoClient.connect(url, function(err, db) {
if (err) {
return callback(err);
}
var cursor = db.collection('Orders').find({
"order_id": Number(code)
});
var message = "";
var product = "";
cursor.each(function(err, doc) {
if (err) {
if (!callbackCalled) {
callback(err);
callbackCalled = true;
}
} else {
if (doc != null) {
message = doc["message"];
product = doc["product"];
} else {
console.log("wtf");
// error code here
}
}
});
if (message) {
console.log(email + " + " + message);
var document = {
"Email": email,
"Message": message
}
db.collection("Users").insertOne(document);
}
db.close();
if (!callbackCalled) {
callback(null, message, product);
}
});
}
Personally, I would use promises and use the promise interface in your database rather than callbacks.
This code is still just conceptual because it has other issues you need to deal with such as:
Proper error handling is still largely unfinished.
You aren't actually waiting for things like the insert.One() to finish before proceeding.

How to use filesystem's createReadStream with Meteor router(NodeJS)

I need to allow the user of my app to download a file with Meteor. Currently what I do is when the user requests to download a file I enter into a "fileRequests" collection in Mongo a document with the file location and a timestamp of the request and return the ID of the newly created request. When the client gets the new ID it imediately goes to mydomain.com/uploads/:id. I then use something like this to intercept the request before Meteor does:
var connect = Npm.require("connect");
var Fiber = Npm.require("fibers");
var path = Npm.require('path');
var fs = Npm.require("fs");
var mime = Npm.require("mime");
__meteor_bootstrap__.app
.use(connect.query())
.use(connect.bodyParser()) //I add this for file-uploading
.use(function (req, res, next) {
Fiber(function() {
if(req.method == "GET") {
// get the id here, and stream the file using fs.createReadStream();
}
next();
}).run();
});
I check to make sure the file request was made less than 5 seconds ago, and I immediately delete the request document after I've queried it.
This works, and is secure(enough) I think. No one can make a request without being logged in and 5 seconds is a pretty small window for someone to be able to highjack the created request URL but I just don't feel right with my solution. It feels dirty!
So I attempted to use Meteor-Router to accomplish the same thing. That way I can check if they're logged in correctly without doing the 5 second open to the world trickery.
So here's the code I wrote for that:
Meteor.Router.add('/uploads/:id', function(id) {
var path = Npm.require('path');
var fs = Npm.require("fs");
var mime = Npm.require("mime");
var res = this.response;
var file = FileSystem.findOne({ _id: id });
if(typeof file !== "undefined") {
var filename = path.basename(file.filePath);
var filePath = '/var/MeteorDMS/uploads/' + filename;
var stat = fs.statSync(filePath);
res.setHeader('Content-Disposition', 'attachment; filename=' + filename);
res.setHeader('Content-Type', mime.lookup(filePath));
res.setHeader('Content-Length', stat.size);
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
return;
}
});
This looks great, fits right in with the rest of the code and is easy to read, no hacking involved, BUT! It doesn't work! The browser spins and spins and never quite knows what to do. I have ZERO error messages coming up. I can keep using the app on other tabs. I don't know what it's doing, it never stops "loading". If I restart the server, I get a 0 byte file with all the correct headers, but I don't get the data.
Any help is greatly appreciated!!
EDIT:
After digging around a bit more, I noticed that trying to turn the response object into a JSON object results in a circular structure error.
Now the interesting thing about this is that when I listen to the filestream for the "data" event, and attempt to stringify the response object I don't get that error. But if I attempt to do the same thing in my first solution(listen to "data" and stringify the response) I get the error again.
So using the Meteor-Router solution something is happening to the response object. I also noticed that on the "data" event response.finished is flagged as true.
filestream.on('data', function(data) {
fs.writeFile('/var/MeteorDMS/afterData', JSON.stringify(res));
});
The Meteor router installs a middleware to do the routing. All Connect middleware either MUST call next() (exactly once) to indicate that the response is not yet settled or MUST settle the response by calling res.end() or by piping to the response. It is not allowed to do both.
I studied the source code of the middleware (see below). We see that we can return false to tell the middleware to call next(). This means we declare that this route did not settle the response and we would like to let other middleware do their work.
Or we can return a template name, a text, an array [status, text] or an array [status, headers, text], and the middleware will settle the response on our behalf by calling res.end() using the data we returned.
However, by piping to the response, we already settled the response. The Meteor router should not call next() nor res.end().
We solved the problem by forking the Meteor router and making a small change. We replaced the else in line 87 (after if (output === false)) by:
else if (typeof(output)!="undefined") {
See the commit with sha 8d8fc23d9c in my fork.
This way return; in the route method will tell the router to do nothing. Of course you already settled the response by piping to it.
Source code of the middleware as in the commit with sha f910a090ae:
// hook up the serving
__meteor_bootstrap__.app
.use(connect.query()) // <- XXX: we can probably assume accounts did this
.use(this._config.requestParser(this._config.bodyParser))
.use(function(req, res, next) {
// need to wrap in a fiber in case they do something async
// (e.g. in the database)
if(typeof(Fiber)=="undefined") Fiber = Npm.require('fibers');
Fiber(function() {
var output = Meteor.Router.match(req, res);
if (output === false) {
return next();
} else {
// parse out the various type of response we can have
// array can be
// [content], [status, content], [status, headers, content]
if (_.isArray(output)) {
// copy the array so we aren't actually modifying it!
output = output.slice(0);
if (output.length === 3) {
var headers = output.splice(1, 1)[0];
_.each(headers, function(value, key) {
res.setHeader(key, value);
});
}
if (output.length === 2) {
res.statusCode = output.shift();
}
output = output[0];
}
if (_.isNumber(output)) {
res.statusCode = output;
output = '';
}
return res.end(output);
}
}).run();
});

Resources