Nodejs google datastore hangs with no error message - node.js

I have a weird error using google-cloud/datastore in my nodejs app
const googleDatastoreFactory = () => {
console.log('inside the factory');
const Datastore = require('#google-cloud/datastore');
console.log('imported datastore stuff');
return new Datastore();
};
export default googleDatastoreFactory;
The above factory method hangs at this line
const Datastore = require('#google-cloud/datastore');
It literally just sits there (I know this because of my custom logging; console.log). There is no logging or error message from google-cloud/datastore.

Related

Getting multiple 404 and 405 errors in Teams Bot

I'm doing some investigating around a Teams Bot that I currently have in development. I'm seeing a lot of 404, and in some other cases, 405 errors when I look in Application Insights - I'm trying to understand if I've missed anything.
I have the App Service set to 'Always On' so my assumption is that it's polling the service every 5 minutes to keep it from idling out. However, I'm seeing a lot of 404 failures, specifically pointing to the GET/ endpoint and in other cases a 405 error as well, which is pointing to the api/messages endpoint.
I have the App ID and App Password set in the environment variables and I've set the storage using a Cosmos DB too as shown in the index.js file below. I have also checked the Teams manifest to ensure it's pointing to the Bot ID and recently added the bot domain as well to see if that makes a difference.
const restify = require('restify');
const path = require('path');
// Import required bot services.
// See https://aka.ms/bot-services to learn more about the different parts of a bot.
const { BotFrameworkAdapter, ConversationState, UserState } = require('botbuilder');
// Import required services for bot telemetry
const { ApplicationInsightsTelemetryClient, TelemetryInitializerMiddleware } = require('botbuilder-applicationinsights');
const { TelemetryLoggerMiddleware, NullTelemetryClient } = require('botbuilder-core');
// Import our custom bot class that provides a turn handling function.
const { DialogBot } = require('./bots/dialogBot');
const { ProvisioningProfileDialog } = require('./dialogs/provisioningProfileDialog');
// Read environment variables from .env file
const ENV_FILE = path.join(__dirname, '.env');
require('dotenv').config({ path: ENV_FILE });
// Create the adapter. See https://aka.ms/about-bot-adapter to learn more about using information from
// the .bot file when configuring your adapter.
const adapter = new BotFrameworkAdapter({
appId: process.env.MicrosoftAppId,
appPassword: process.env.MicrosoftAppPassword
});
// Define the state store for your bot.
const { CosmosDbPartitionedStorage } = require('botbuilder-azure');
const cosmosStorage = new CosmosDbPartitionedStorage({
cosmosDbEndpoint: process.env.CosmosDbEndpoint,
authKey: process.env.CosmosDbAuthKey,
databaseId: process.env.CosmosDbDatabaseId,
containerId: process.env.CosmosDbContainerId,
compatibilityMode: false
});
// Create conversation state with storage provider.
const conversationState = new ConversationState(cosmosStorage);
const userState = new UserState(cosmosStorage);
// Create the main dialog.
const dialog = new ProvisioningProfileDialog(userState);
const bot = new DialogBot(conversationState, userState, dialog);
dialog.telemetryClient = telemetryClient;
// Catch-all for errors.
const onTurnErrorHandler = async (context, error) => {
// This check writes out errors to console log .vs. app insights.
// NOTE: In production environment, you should consider logging this to Azure
// application insights.
console.error(`\n [onTurnError] unhandled error: ${ error }`);
// Send a trace activity, which will be displayed in Bot Framework Emulator
await context.sendTraceActivity(
'OnTurnError Trace',
`${ error }`,
'https://www.botframework.com/schemas/error',
'TurnError'
);
// Send a message to the user
await context.sendActivity('The bot encountered an error or bug.');
await context.sendActivity('To continue to run this bot, please fix the bot source code.');
// Clear out state
await conversationState.delete(context);
};
// Set the onTurnError for the singleton BotFrameworkAdapter.
adapter.onTurnError = onTurnErrorHandler;
// Add telemetry middleware to the adapter middleware pipeline
var telemetryClient = getTelemetryClient(process.env.InstrumentationKey);
var telemetryLoggerMiddleware = new TelemetryLoggerMiddleware(telemetryClient);
var initializerMiddleware = new TelemetryInitializerMiddleware(telemetryLoggerMiddleware);
adapter.use(initializerMiddleware);
// Creates a new TelemetryClient based on a instrumentation key
function getTelemetryClient(instrumentationKey) {
if (instrumentationKey) {
return new ApplicationInsightsTelemetryClient(instrumentationKey);
}
return new NullTelemetryClient();
}
// Create HTTP server.
const server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, function() {
console.log(`\n${ server.name } listening to ${ server.url }.`);
console.log('\nGet Bot Framework Emulator: https://aka.ms/botframework-emulator');
console.log('\nTo talk to your bot, open the emulator select "Open Bot"');
});
// Listen for incoming requests.
server.post('/api/messages', (req, res) => {
adapter.processActivity(req, res, async (context) => {
// Route the message to the bot's main handler.
await bot.run(context);
});
});
Whilst the Bot appears to run okay for the most part, am I missing something with these errors or is this expected behaviour since it's polling for a response?
Thanks in advance
Does your bot contain a web page as well? The node samples do not, but .NET samples do. If not, it would make sense, of course, to receive a 404. I tend to agree with you that the polling might be the cause.
Bots typically (especially when created from template or sample), do not handle GET endpoints to /api/messages. Everything is handled using POST.

Nodejs not waiting for async function to execute first in app.js

I want to retrieve my JWT key from AWS secrets and then run passport.js which will load the key.
// app.js
// Retrieve secrets from AWS
(async () => {
console.log("Retrieving AWS Secrets");
await awsSecretManager.getAllSecrets();
})();
require('./components/auth/passport.js');
However, before getAllSecrets function is completed, passport.js has already executed. Can be verified as there is a log that I had written in the file.
const getAllSecrets = async () => {
try {
// DB Prod/Dev Secret
const dbProdSecrets = await getSecretValue("db");
process.env.RDS_HOST = dbProdSecrets.SecretString.host;
process.env.RDS_USER = dbProdSecrets.SecretString.username
process.env.RDS_PASSWORD = dbProdSecrets.SecretString.password
process.env.RDS_DATABASE = "database"
// JWT Secret
const jwtSecret = await getSecretValue("jwt");
process.env.JWT_SECRET = jwtSecret.SecretString.JWT_SECRET;
} catch (err) {
console.log(err);
}
}
As soon as you run app.js your Immediately Invoked Function expression runs and it is fetching AllSecrets in the background while your passport file is also run. You need to wait for it. So place the require after fetching Secrets
// app.js
// Retrieve secrets from AWS
(async () => {
console.log("Retrieving AWS Secrets");
await awsSecretManager.getAllSecrets();
require('./components/auth/passport.js');
})();
I would rather wait for everything to finish up before starting the server
const app = require('express')();
const server = async () => {
// fetching secrets from aws secrets
await awsSecretManager.getAllSecrets();
// on fetching success
// ... doing other imports
require('./components/auth/passport.js');
// configuring my server
app.get('/', (req,res)=>{
return res.send("hello");
});
// start the server finally
app.listen(5000, () => console.log("okk"));
};
server();

How to Configure AWS X-Ray for node.js app not using Express middleware?

I am trying to integrate AWS X-Ray with my nodejs api hosted on AWS Lambda(serverless).
X-Ray works as intended for api using express middleware and able to see traces on AWS Console.
For async functions without express framework, I am facing issues while integration.
Tried enabling Manual mode, but facing- Lambda not supporting manual mode error.
Referred this - Developing custom solutions for automatic mode section but no luck.
Can someone help me out with this?
'use strict';
const AWSXRay = require('aws-xray-sdk-core');
const Aws = AWSXRay.captureAWS(require('aws-sdk'))
const capturePostgres = require('aws-xray-sdk-postgres');
const { Client } = capturePostgres(require('pg'));
module.exports.test = async (event, context) => {
var ns = AWSXRay.getNamespace();
const segment = newAWSXRay.Segment('Notifications_push');
ns.enter(ns.createContext());
AWSXRay.setSegment(segment_push);
.... };
So, when in Lambda, the SDK creates a placeholder (facade) segment automatically. More in-depth explanation here: https://github.com/aws/aws-xray-sdk-node/issues/148
All you need is:
const AWSXRay = require('aws-xray-sdk-core');
//lets patch the AWS SDK
const Aws = AWSXRay.captureAWS(require('aws-sdk'));
module.exports.test = async (event, context) => {
//All capturing will work out of box
var sqs = new AWS.SQS({apiVersion: '2012-11-05'});
var params = {...}
//no need to add code, just regular SQS call
sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.MessageId);
}
});
//if you want to create subsegments manually simply do
const seg = AWSXRay.getSegment();
const subseg = seg.addSubsegment('mynewsubsegment');
subseg.close();
//no need to close the Lambda segment
};
Additional documentation here: https://docs.aws.amazon.com/lambda/latest/dg/nodejs-tracing.html

What is a simple command I can run to test if I can connect to PostgreSQL using node-postgres?

My file db/index.js
const { Pool } = require('pg');
const pool = new Pool;
module.exports = {
query: (text, params, callback) => {
return pool.query(text,params,callback);
}
};
In my main file main.js I do:
const db = require('./db/index');
What command can I run on db to figure out if node-postgres is able to connect to my Postgres setup correctly?
To simply test if you can connect from node.js to pgsql database you can use the following snippet:
const { Pool } = require('pg')
const pool = new Pool()
pool.query('SELECT NOW()', (err, res) => {
console.log(err, res)
pool.end()
})
// or you can use async/await instead of callbacks
const res = await pool.query('SELECT NOW()')
console.log(res)
await pool.end()
This should return the response in form of pg.Result object containing current datetime.
node-postgres uses the same environment variables as libpq to connect to a PostgreSQL server, so to run the above code you can invoke it like so:
PGUSER=postgres PGHOST=127.0.0.1 PGPASSWORD=mysecretpassword PGDATABASE=postgres PGPORT=5432 node script.js
But you have to provide connection details to you database instance.
The default values for the environment variables used are:
PGHOST='localhost'
PGUSER=process.env.USER
PGDATABASE=process.env.USER
PGPASSWORD=null
PGPORT=5432
You can also provide the connection details programmatically, directly to either the Pool or Client instances. You can also use the connection string URI.
You can read more in the node-postgres documentation in the "connecting" section.

Google Cloud Functions: Could not authenticate request

I am using a node.js function on Google Cloud Functions to save Pub/Sub messages to GCS (Storage), but it randomly gives me the following error for some messages (most of the messages are succesfully written):
"Error: Could not authenticate request. Could not load the default credentials. Browse to https://developers.google.com/accounts/docs/application-default-credentials for more information."
It doesn't make sense, once running on GCE would use same service-account for all messages, which has the proper permissions, and all messages comes from the same source and goes to the same destination. Can someone enlighten me on what could I do?
I'm using google-cloud/storage version 0.8.0.
/**
* Triggered from a message on a Cloud Pub/Sub topic.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
const bucketName = 'backup-queue-bucket';
const util = require('util')
const gcs = require('#google-cloud/storage')();
const crypto = require('crypto');
exports.backupQueue = function backupQueue(event, callback) {
// The Cloud Pub/Sub Message object.
const timestamp = event.timestamp;
const resources = event.resource.split('/');
const pubsubMessage = event.data;
const messageContent = Buffer.from(pubsubMessage.data, 'base64').toString();
// We're just going to log the message to prove that
// it worked.
var queueName = resources[resources.length-1];
console.log(`Message received: ${messageContent} in queue ${queueName}`);
const filename = timestamp+'_'+crypto.createHash('md5').update(messageContent).digest('hex');
const bucket = gcs.bucket(bucketName);
const file = bucket.file(queueName+'/'+filename);
const fs = file.createWriteStream({});
fs.on('finish', function () {
console.log(`Message ${filename} successfully written to file.`);
});
fs.on('error', function (error) {
console.warn(`Message ${filename} could not be written to file. Retry will be called. Error: ${error.message}`);
setTimeout(backupQueue(event, callback), 1000);
});
fs.write(Buffer.from(pubsubMessage.data, 'base64').toString());
fs.end();
callback();
};`
EDIT:
I opened an issue on google-cloud-node and they reported this as a bug. It should be fixed by next release.

Resources