I'm just start with some google cloud services, and I'm trying to get a entity from datastore.
If the client have internet connection, everything its going well.
But i want to put a try catch statement for the cases were the client have no access to datastore, due any reason (like internet).
Here's my code:
try{
let search = datastore.key(['Client', Client_id])
datastore.get(search, /*{timeout: 1000},*/ function (err, entity) {
console.log('limit >>>', entity.limit)
evt.emit('comparedate', res, entity.limit)
});
}
catch(error){
console.log('Error >>>', error)
}
My problem is: there is no time limit for connection attempt. When the client have no access to the internet the request keep "pending" forever, and don't go to the catch condition.
I tried some parameters like: Global#CallOptions, but with no success.
Thanks for any help!
EDIT >>>> I know that's not the most trustworthy way. But for now I resolved with this code:
evt.on('isonline', (res) => {
try{
require('dns').lookup('google.com',function(err) {
if (err && err.code == "ENOTFOUND") {
console.log('NO INTERNET')
evt.emit('readofflinedata', res)
} else {
console.log('WITH INTERNET')
evt.emit('readonlinedata', res)
}
})
}
catch(error){
res.status(200).send({ error: true, message: error.message })
}
})
The Datastore client uses a library internally called google-gax. You can configure timeouts/etc. by passing in gax options.
datastore.get(key, {
gaxOptions: {timeout: 1000}
}, (err, entity) => {
// ...
});
I didn't found any parameter to add a timeout in the get function of datastore. However you can use a Promise and set a timer, if the execution of the function takes too long it will stop it.
var Promise = require("bluebird");
var elt = new Promise((resolve, reject) => {
fun(param, (err) => {
if (err) reject(err);
doSomething(); // <- datastore.get() funtion
resolve();
});
elt.timeout(1000).then(() => console.log('done'))
.catch(Promise.TimeoutError, (e) => console.log("timed out"))
Related
I have a set of functions in Node.js that I would like to load in a certain order. I will provide some mockup code abstracted and simplified:
function updateMyApp() {
loadDataToServer()
.then(() => useData())
.then(() => saveData())
.then(() => { console.log("updateMyApp done") })
}
function loadDataToServer() {
return new Promise( (resolve, reject) {
...preparing data and save file to cloud...
resolve()})
}
function handleDataItem(item) {
// Function that fetches data item from database and updates each data item
console.log("Name", item.name)
}
function saveData() {
// Saves the altered data to some place
}
useData is a bit more complex. In it I would like to, in order:
console.log('Starting alterData()')
Load data, as json, from the cloud data source
Iterate through every item in the json file and do handleDataItem(item) on it.
When #2 is done -> console.log('alterData() done')
Return a resolved promise back to updateMyApp
Go on with saveData() with all data altered.
I want the logs to show:
Starting useData()
Name: Adam
Name: Ben
Name: Casey
useData() done
my take on this is the following:
function useData() {
console.log('Starting useData()')
return new Promise( function(resolve, reject) {
readFromCloudFileserver()
.then(jsonListFromCloud) => {
jsonListFromCloud.forEach((item) => {
handleDataItem(item)
}
})
.then(() => {
resolve() // I put resolve here because it is not until everything is finished above that this function is finished
console.log('useData() done')
}).catch((error) => { console.error(error.message) })
})
}
which seems to work but, as far as I understand this is not how one is supposed to do it. Also, this seems to do the handleDataItem outside of this chain so the logs look like this:
Starting useData()
useData() done
Name: Adam
Name: Ben
Name: Casey
In other words. It doesn't seem like the handleDataItem() calls are finished when the chain has moved on to the next step (.then()). In other words, I can not be sure all items have been updated when it goes on to the saveData() function?
If this is not a good way to handle it, then how should these functions be written? How do I chain the functions properly to make sure everything is done in the right order (as well as making the log events appear in order)?
Edit: As per request, this is handleDataItem less abstracted.
function handleDataItem(data) {
return new Promise( async function (resolve) {
data['member'] = true
if (data['twitter']) {
const cleanedUsername = twitterApi.cleanUsername(data['twitter']).toLowerCase()
if (!data['twitter_numeric']) {
var twitterId = await twitterApi.getTwitterIdFromUsername(cleanedUsername)
if (twitterId) {
data['twitter_numeric'] = twitterId
}
}
if (data['twitter_numeric']) {
if (data['twitter_protected'] != undefined) {
var twitterInfo = await twitterApi.getTwitterGeneralInfoToDb(data['twitter_numeric'])
data['twitter_description'] = twitterInfo.description
data['twitter_protected'] = twitterInfo.protected
data['twitter_profile_pic'] = twitterInfo.profile_image_url.replace("_normal", '_bigger')
data['twitter_status'] = 2
console.log("Tweeter: ", data)
}
} else {
data['twitter_status'] = 1
}
}
resolve(data)
}).then( (data) => {
db.collection('people').doc(data.marker).set(data)
db.collection('people').doc(data.marker).collection('positions').doc(data['report_at']).set(
{
"lat":data['lat'],
"lon":data['lon'],
}
)
}).catch( (error) => { console.log(error) })
}
The twitterAPI functions called:
cleanUsername: function (givenUsername) {
return givenUsername.split('/').pop().replace('#', '').replace('#', '').split(" ").join("").split("?")[0].trim().toLowerCase()
},
getTwitterGeneralInfoToDb: async function (twitter_id) {
var endpointURL = "https://api.twitter.com/2/users/" + twitter_id
var params = {
"user.fields": "name,description,profile_image_url,protected"
}
// this is the HTTP header that adds bearer token authentication
return new Promise( (resolve,reject) => {
needle('get', endpointURL, params, {
headers: {
"User-Agent": "v2UserLookupJS",
"authorization": `Bearer ${TWITTER_TOKEN}`
}
}).then( (res) => {
console.log("result.body", res.body);
if (res.body['errors']) {
if (res.body['errors'][0]['title'] == undefined) {
reject("Twitter API returns undefined error for :'", cleanUsername, "'")
} else {
reject("Twitter API returns error:", res.body['errors'][0]['title'], res.body['errors'][0]['detail'])
}
} else {
resolve(res.body.data)
}
}).catch( (error) => { console.error(error.message) })
})
},
// Get unique id from Twitter user
// Twitter API
getTwitterIdFromUsername: async function (cleanUsername) {
const endpointURL = "https://api.twitter.com/2/users/by?usernames="
const params = {
usernames: cleanUsername, // Edit usernames to look up
}
// this is the HTTP header that adds bearer token authentication
const res = await needle('get', endpointURL, params, {
headers: {
"User-Agent": "v2UserLookupJS",
"authorization": `Bearer ${TWITTER_TOKEN}`
}
})
if (res.body['errors']) {
if (res.body['errors'][0]) {
if (res.body['errors'][0]['title'] == undefined) {
console.error("Twitter API returns undefined error for :'", cleanUsername, "'")
} else {
console.error("Twitter API returns error:", res.body['errors'][0]['title'], res.body['errors'][0]['detail'])
}
} else {
console.error("Twitter API special error:", res.body)
}
} else {
if (res.body['data']) {
return res.body['data'][0].id
} else {
//console.log("??? Could not return ID, despite no error. See: ", res.body)
}
}
},
You have 3 options to deal with your main issue of async methods in a loop.
Instead of forEach, use map and return promises. Then use Promise.all on the returned promises to wait for them to all complete.
Use a for/of loop in combination with async/await.
Use a for await loop.
It sounds like there's a problem in the implementation of handleDataItem() and the promise that it returns. To help you with that, we need to see the code for that function.
You also need to clean up useData() so that it properly returns a promise that propagates both completion and errors.
And, if handleDataItem() returns a promise that is accurate, then you need to change how you do that in a loop here also.
Change from this:
function useData() {
console.log('Starting useData()')
return new Promise( function(resolve, reject) {
readFromCloudFileserver()
.then(jsonListFromCloud) => {
jsonListFromCloud.forEach((item) => {
handleDataItem(item)
}
})
.then(() => {
resolve() // I put resolve here because it is not until everything is finished above that this function is finished
console.log('useData() done')
}).catch((error) => { console.error(error.message) })
})
}
to this:
async function useData() {
try {
console.log('Starting useData()')
const jsonListFromCloud = await readFromCloudFileserver();
for (let item of jsonListFromCloud) {
await handleDataItem(item);
}
console.log('useData() done');
} catch (error) {
// log error and rethrow so caller gets the error
console.error(error.message)
throw error;
}
}
The structural changes here are:
Switch to use async/await to more easily handle the asynchronous items in a loop
Remove the promise anti-pattern that wraps new Promise() around an existing promise - no need for that AND you weren't capturing or propagating rejections from readFromCloudFileServer() which is a common mistake when using that anti-pattern.
rethrow the error inside your catch after logging the error so the error gets propagated back to the caller
I have the following code that I cannot figure out what I need to do to make this entire process finish before sending a response back to the client. Basically I am looking for the client to receive a response once everything in the method completes or errors out, not before.
I have tried several things, moving things around and adding some .then() and .catch() blocks that may not even be needed. I'm still fairly new to NodeJs so while I understand how things kind of work, I still have not got used to asynchronous coding.
let markStepsComplete = async function() {
let stepsToProcess = new Multimap();//multimap JS npm
let results = await dbUtils.getCompletedSteps();
results.forEach(result => {
stepsToProcess.set(result.ticket_id, result.step_id);
});
return new Promise((resolve,reject)=>{
stepsToProcess.forEachEntry(async function(entry, key) {
let payload = {
ticketId: key,
stepIds: entry
}
let response = await updateStep(payload)//returns promise
if (response.statusCode === 200) {
await Promise.all(payload.stepIds.map(async (stepId) => {
try {
await dbUtils.markStepsCompleted(stepId, payload.ticketId);
} catch(err) {
reject(err);
}
}));
}
else {
await Promise.all(payload.stepIds.map(async (stepId) => {
try {
await dbUtils.markStepsProcessed(stepId, payload.ticketId, response.statusCode);
} catch(err) {
console.log(err);
reject(err);
}
}));
}
});
resolve('Steps Marked Complete');
});//promise end
}
This is the path that i am testing right now that's making the call to the above method.
The updateStep() method is the method that actually calls a HTTP request to an outside REST API. That seems to be working returning a promise.
app.use('/api/posts',async (req,res,next)=>{
let data = await markStepsComplete.markStepsComplete()
.then((data)=>{
res.status(200).json({message: data})
})
.catch((e)=>{
console.log('error from catch:',e);
})
})
The code above runs and does run some stuff in our database as part of this process, but I get back the "Steps Marked Complete" resolve message before the process finishes.
Since the resolve is hit before the process ends, I can never get any of the reject() errors to come back to the client since resolve is called already.
Thanks in advance.
I noticed this snippet and wanted to call it out because it wont work:
payload.stepIds.forEach(async (stepId) => {
try {
await dbUtils.markStepsProcessed(stepId, payload.ticketId, response.statusCode);
} catch(err) {
console.log(err);
reject(err);
}
});
when looping through an array of items that require an async fetch, you can do something like:
await Promise.all(payload.stepIds.map(async (stepId) => {
try {
await dbUtils.markStepsProcessed(stepId, payload.ticketId, response.statusCode);
} catch(err) {
console.log(err);
reject(err);
}
}));
I'm new to Express framework and learning, I'm having a problem using .then. The problem is I have 2 functions and I want the first one to complete before the second to start executing. I'm exporting the modules.
var ubm = require('./userBasic');
There are 2 functions setUserData and showUserId, the showUserId must execute only after setUserData has performed its operation.
var userId = ubm.setUserData(userName,userEmail,userDOB,moment);
userId.then(ubm.showUserId(userId));
Below are the 2 functions:
module.exports = {
setUserData: function (userName,userEmail,userDOB,moment){
//Performing database activities
return userId;
}
showUserId: function (userId){
console.log(userId);
}
}
When i run it says TypeError: Cannot read property 'then' of undefined.
Like I said I'm very new and learning and was unable to figure out the solution. I did some google search and got a brief about promise, but I don't know how to implement here.
Try using promises
module.exports = {
setUserData: function(userName, userEmail, userDOB, moment) {
return new Promise(function(resolve, reject) {
//db stuff
reject(error);
resolve(userId);
});
},
showUserId: function(userId) {
console.log(userId);
};
};
So in your execution you would write
ubm.setUserData(username, usereEmail, userDOB, moment)
.then((data) => {
showUserId(data);
})
.catch((err) => {
console.log(err);
});
A couple of things to note is that in this instance you could just log data without the need for another function like
ubm.setUserData(username, usereEmail, userDOB, moment)
.then((data) => {
console.log(data);
})
.catch((err) => {
console.log(err);
});
Whatever value you pass into resolve() will be returned as well as you pass errors into reject().
I'm trying to build an API for a single-page web app using AWS Lambda and the Serverless Framework. I want to use Redis Cloud for storage, mostly for its combination of speed and data persistence. I may use more Redis Cloud features in the future, so I'd prefer to avoid using ElastiCache for this. My Redis Cloud instance is running in the same AWS region as my function.
I have a function called related that takes a hashtag from a GET request to an API endpoint, and checks to see if there's an entry for it in the database. If it's there, it should return the results immediately. If not, it should query RiteTag, write the results to Redis, and then return the results to the user.
I'm pretty new to this, so I'm probably doing something adorably naive. Here's the event handler:
'use strict'
const lib = require('../lib/related')
module.exports.handler = function (event, context) {
lib.respond(event, (err, res) => {
if (err) {
return context.fail(err)
} else {
return context.succeed(res)
}
})
}
Here's the ../lib/related.js file:
var redis = require('redis')
var jsonify = require('redis-jsonify')
var rt = require('./ritetag')
var redisOptions = {
host: process.env.REDIS_URL,
port: process.env.REDIS_PORT,
password: process.env.REDIS_PASS
}
var client = jsonify(redis.createClient(redisOptions))
module.exports.respond = function (event, callback) {
var tag = event.hashtag.replace(/^#/, '')
var key = 'related:' + tag
client.on('connect', () => {
console.log('Connected:', client.connected)
})
client.on('end', () => {
console.log('Connection closed.')
})
client.on('ready', function () {
client.get(key, (err, res) => {
if (err) {
client.quit()
callback(err)
} else {
if (res) {
// Tag is found in Redis, so send results directly.
client.quit()
callback(null, res)
} else {
// Tag is not yet in Redis, so query Ritetag.
rt.hashtagDirectory(tag, (err, res) => {
if (err) {
client.quit()
callback(err)
} else {
client.set(key, res, (err) => {
if (err) {
callback(err)
} else {
client.quit()
callback(null, res)
}
})
}
})
}
}
})
})
}
All of this works as expected, to a point. If I run the function locally (using sls function run related), I have no problems whatsoever—tags are read from and written to the Redis database as they should be. However, when I deploy it (using sls dash deploy), it works the first time it's run after deployment, and then stops working. All subsequent attempts to run it simply return null to the browser (or Postman, or curl, or the web app). This is true regardless of whether the tag I use for testing is already in the database or not. If I then re-deploy, making no changes to the function itself, it works again—once.
On my local machine, the function first logs Connected: true to the console, then the results of the query, then Connection closed. On AWS, it logs Connected: true, then the results of the query, and that's it. On the second run, it logs Connection closed. and nothing else. On the third and all subsequent runs, it logs nothing at all. Neither environment ever reports any errors.
It seems pretty clear that the problem is with the connection to Redis. If I don't close it in the callbacks, then subsequent attempts to call the function just time out. I've also tried using redis.unref instead of redis.quit, but that didn't seem to make any difference.
Any help would be greatly appreciated.
I've now solved my own problem, and I hope I can be of help to someone experiencing this problem in the future.
There are two major considerations when connecting to a database like I did in the code above from a Lambda function:
Once context.succeed(), context.fail(), or context.done() is called, AWS may freeze any processes that haven't finished yet. This is what was causing AWS to log Connection closed on the second call to my API endpoint—the process was frozen just before Redis finished closing, then thawed on the next call, at which point it continued right where it left off, reporting that the connection was closed. Takeaway: if you want to close your database connection, make sure it's fully closed before you call one of those methods. You can do this by putting a callback in an event handler that's triggered by a connection close (.on('end'), in my case).
If you split your code into separate files and require them at the top of each file, like I did, Amazon will cache as many of those modules as possible in memory. If that's causing problems, try moving the require() calls inside a function instead of at the top of the file, then exporting that function. Those modules will then be re-imported whenever the function is run.
Here's my updated code. Note that I've also put my Redis configuration into a separate file, so I can import it into other Lambda functions without duplicating code.
The Event Handler
'use strict'
const lib = require('../lib/related')
module.exports.handler = function (event, context) {
lib.respond(event, (err, res) => {
if (err) {
return context.fail(err)
} else {
return context.succeed(res)
}
})
}
Redis Configuration
module.exports = () => {
const redis = require('redis')
const jsonify = require('redis-jsonify')
const redisOptions = {
host: process.env.REDIS_URL,
port: process.env.REDIS_PORT,
password: process.env.REDIS_PASS
}
return jsonify(redis.createClient(redisOptions))
}
The Function
'use strict'
const rt = require('./ritetag')
module.exports.respond = function (event, callback) {
const redis = require('./redis')()
const tag = event.hashtag.replace(/^#/, '')
const key = 'related:' + tag
let error, response
redis.on('end', () => {
callback(error, response)
})
redis.on('ready', function () {
redis.get(key, (err, res) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
if (res) {
// Tag is found in Redis, so send results directly.
redis.quit(() => {
response = res
})
} else {
// Tag is not yet in Redis, so query Ritetag.
rt.hashtagDirectory(tag, (err, res) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
redis.set(key, res, (err) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
redis.quit(() => {
response = res
})
}
})
}
})
}
}
})
})
}
This works exactly as it should—and it's blazing fast, too.
The node mongodb docs specify to use next/each for large number of documents so as to not have everything loaded onto memory if we were to use toArray.
So, i thought my sample code should work as is. But it just returns one document.
What should be the correct way to deal with this problem?
This is my code sample :
var findAsync = function (collection,query) {
return mongodb.MongoClient.connectAsync(mongodbServerString)
.then(function (db) {
return [db.collection(collection).find(query), db];
});
};
findAsync("UserProfile",{})
.spread(function (cursor,db) {
return [cursor.project({Email:true}),db];
})
.spread(function (cursor, db) {
return cursor.eachAsync().then(function (doc) {
console.log(doc);
}).catch(function () {
db.close();
});
});
Promises represent singular values. Promises are basically like function returns, since a function cannot return multiple values - it wouldn't make sense to convert each to a promise returning function.
What you can do is either convert it to an Observable returning function and then use .forEach on that to get a promise back for the completion of the sequence or you can implement something similar manually:
function each(cursor, fn) {
return new Promise((resolve, reject) => {
cursor.forEach((err, data) => {
if(err) {
cursor.close();
return reject(err);
}
try { fn(data); } catch(e) { cursor.close(); reject(e); }
}, err => { { // finished callback
if(err) reject(err);
else resolve();
});
});
}
Which would let you write:
each(cursor, doc => console.log(doc)).then(...).catch(...)
Also note that Mongo connections are persistent, you're supposed to connect once when the server starts and then keep the connection open for as long as the server is run.