No json_payload for DialogFlow logs in Stackdriver in GCP - dialogflow-es

Recently, I tried to retrieve DialogFlow logs (i.e. containing everything regarding session information/interaction) from StackDriver to perform my custom analysis for dashboard purpose by enabling the option below:
However, after I checked the format of DialogFlow log entries, it seems there is only unstructured text_payload available (i.e.) and not having structured json_payload.
The pain is that I cannot easily parse the text_payload to get the data I want from it. Therefore, my question is:
Can json_payload be available by enabling some settings that I missed?
If for some reason json_payload is not available, any alternative format that can be easy to use for data retrieval? or any parser I can use onto the text_payload?
Thanks in advance.
A log entry format of a Dialogflow request:
{
"textPayload": "Dialogflow Request : {"session":"44885105","query_input":"{\n \"event\": {\n \"name\": \"WELCOME\",\n \"parameters\": {\n }\n }\n}","timezone":"Australia/Sydney"}",
"insertId": "gnp3xsg27351ns",
"resource": {
"type": "global",
"labels": {
"project_id": "ai-hackathon-2020-lrwc"
}
},
"timestamp": "2020-08-07T04:11:29.737Z",
"severity": "INFO",
"labels": {
"request_id": "05f6f343-a646-42e0-8181-48c2e853e21b-0820055c",
"protocol": "V2",
"type": "dialogflow_request"
},
"logName": "projects/ai-hackathon-2020-lrwc/logs/dialogflow_agent",
"trace": "44885105",
"receiveTimestamp": "2020-08-07T04:11:29.800030654Z"
}
A log entry format of a Dialogflow response:
{
"textPayload": "Dialogflow Response : id: "05f6f343-a646-42e0-8181-48c2e853e21b-0820055c"\nlang: "en"\nsession_id: "44885105"\ntimestamp: "2020-08-07T04:11:29.747Z"\nresult {\n source: "agent"\n resolved_query: "WELCOME"\n action: "input.welcome"\n score: 1.0\n parameters {\n }\n contexts {\n name: "defaultwelcomeintent-followup"\n lifespan: 2\n parameters {\n }\n }\n metadata {\n intent_id: "22498e9a-efcf-43e0-a945-36a7ef4c702d"\n intent_name: "Default Welcome Intent"\n webhook_used: "false"\n webhook_for_slot_filling_used: "false"\n is_fallback_intent: "false"\n }\n fulfillment {\n speech: "Hey Good Day! what kind of issue do you want to report?"\n messages {\n lang: "en"\n type {\n number_value: 0.0\n }\n speech {\n string_value: "Hey Good Day! what kind of issue do you want to report?"\n }\n }\n }\n}\nstatus {\n code: 200\n error_type: "success"\n}\n",
"insertId": "gnp3xsg27351nt",
"resource": {
"type": "global",
"labels": {
"project_id": "ai-hackathon-2020-lrwc"
}
},
"timestamp": "2020-08-07T04:11:29.789Z",
"severity": "INFO",
"labels": {
"type": "dialogflow_response",
"protocol": "V2",
"request_id": "05f6f343-a646-42e0-8181-48c2e853e21b-0820055c"
},
"logName": "projects/ai-hackathon-2020-lrwc/logs/dialogflow_agent",
"trace": "44885105",
"receiveTimestamp": "2020-08-07T04:11:29.800030654Z"
}

I had a look at this document on Processing payloads and found when the log line is a serialized JSON object and the detect_json option is enabled, the output plugin transforms the log entry into a structured (JSON) payload. By default this option is enabled for VM instances running on the App Engine flexible environment and Google Kubernetes Engine.Whereas this is not enabled by default in VM instances running on the App Engine standard environment.
However, It is possible to customize the agent's configuration to support ingesting structured logs from additional resources. Documentation on Streaming structured (JSON) log records to Cloud Logging might be helpful.
The payload of log records streamed by a custom-configured Logging agent can be either a single unstructured text message (textPayload) or a structured JSON message (jsonPayload).

You can use the parse-dialogflow-log module like so:
first remove
Dialogflow fulfillment response :
Dialogflow fulfillment request :
Dialogflow Request :
and Dialogflow Response :
for the first 3, just parse using JSON.parse()
parse the last one with parse-dialogflow-log module as shown in this simple function I made. (it can be made better to be more efficient)
async function parsePayload(odj){
let result = await odj.map((e)=>{
let a = e.textPayload;
let g;
let c;
let w = a.slice(0, 31);
if(w.includes(':')){
let t = a.slice(0,21)
g = a.replace(t, '{') + '}'
try {
c = parser.parse(g);
e.textPayload = c;
} catch (error) {
console.log('ERROR AT: ' + t)
try {
g = a.replace(t, '');
let df = JSON.parse(g)
e.textPayload = df
} catch (error) {
console.log('cannot parse req')
}
}
}else{
let u = a.slice(0, 32)
if(u.includes(':')){
g = a.replace(u, '')
try {
c = parser.parse(g);
console.log(u)
console.log(c)
} catch (error) {
console.log('ERROR AT: ' + u)
try {
let dd = JSON.parse(g)
e.textPayload = dd;
} catch (error) {
console.log('cannot parse')
}
}
}else{
let v = a.slice(0, 34);
g = a.replace(v, '')
try {
c = parser.parse(g);
} catch (error) {
console.log('ERROR AT: ' + v)
try {
let dd = JSON.parse(g)
e.textPayload = dd;
} catch (error) {
console.log('cannot parse')
}
}
}
}
return e
})
return result
}

Related

google.protobuf.Struct is coming back as just fields in NodeJS

I have the following response message in my proto file via NodeJS.
message WorkSpaceResponse{
google.protobuf.Struct workspace = 1;
}
And below is the service function that sends back the response as:
let sample = {
"name": "Test Workspace",
"id":"sampleId"
}
callback(null, { workspace: sample });
However, when the response gets back to the client all I'm getting is the following:
{
"fields": {}
}
Why is it so, I was expecting the result as
{
"name": "Test Workspace",
"id":"sampleId"
}
What am I doing wrong?

How can I fetch CouchDB server version via PouchDB

All I want to get is this object:
{
"couchdb": "Welcome",
"version": "3.1.1",
"git_sha": "ce596c65d",
"uuid": "ff0e85a5e76efdf116e1394e1a94a70f",
"features": [
"access-ready",
"partitioned",
"pluggable-storage-engines",
"reshard",
"scheduler"
],
"vendor": { "name": "The Apache Software Foundation" }
}
But I can't figure out how can I fetch the root server URL.
Maybe there is another option in PouchDB how to get CouchDB server version.
EDIT:
I have found this function is source code, but it doesn't get info described in the comment above the function. GitHub link
// Calls GET on the host, which gets back a JSON string containing
// couchdb: A welcome string
// version: The version of CouchDB it is running
api._info = function (callback) {
setup().then(function () {
return ourFetch(genDBUrl(host, ''));
}).then(function (response) {
return response.json();
}).then(function (info) {
info.host = genDBUrl(host, '');
callback(null, info);
}).catch(callback);
};
After a digging a bit into the source code, I have found the solution.
infoDb
.fetch('/')
.then((res) => {
return res.json();
})
.then((res) => {
console.log('FETCH', res);
});
Result:
{
"couchdb": "Welcome",
"version": "3.1.1",
"git_sha": "ce596c65d",
"uuid": "ff0e85a5e76efdf116e1394e1a94a70f",
"features": [
"access-ready",
"partitioned",
"pluggable-storage-engines",
"reshard",
"scheduler"
],
"vendor": {
"name": "The Apache Software Foundation"
}
}

express-graphql returns 500 status on errors which ios apollo client cant read the errors. Solution?

I have created an express-graphql server where in the resolvers if it encounters and error, an Error object is thrown. This results in an error array with the error and a 500 http status.
Ive created an swiftui app using apollo for the graphql client. 500 statuses go into the failed function which i cant read the errors from the server.
I have looked into changing the http status in 500 to no avail. Has anyone else had this issue? How to solve it?
in postman i get:
{
"errors": [
{
"message": "Email invalid",
"locations": [
{
"line": 2,
"column": 5
}
],
"path": [
"recover"
]
}
],
"data": null,
"extensions": {
"runTime": 198
}
}
And in the resolver I throw
if (!email) {
throw new HTTP404Error('Email invalid');
}
The error class
class HTTP404Error extends HTTPClientError {
constructor(message = 'Not found') {
super(message);
this.statusCode = 404;
}
}
In my catch block is as follows
.catch((e) => {
if (e instanceof PrismaClientValidationError) {
throw new HTTP409Error('Validation error');
} else {
if (e.code === 'P2002') {
throw new HTTP409Error('Constraint violation');
}
throw new HTTP400Error(e.message);
}
})
As the app is a hybrid rest and graphql server, all thrown errors in the rest endpoints like 'metrics', 'healthcheck' are handled as expected. its just the graphql endpoint that express-graphql handles all errors as 500 with a result
The only time express-graphql will throw 500 errors is if the schema itself is invalid. It will not happen for errors that are thrown inside a resolver -- the status will still be 200 if those are the only errors you encounter.
If the schema is invalid, your server shouldn't start to begin with. You can manually call validateSchema on your schema before starting your server and exit your process with an error if any errors are returned.
const { validateSchema } = require("graphql");
const schemaValidationErrors = validateSchema(schema);
if (schemaValidationErrors.length) {
console.log('Schema is not valid', schemaValidationErrors);
process.exit(1);
}

How to Handle Custom Lambda Errors in API Gateway Using Python

In my app i want handle the custom errors like 500,403,201,400 errors, i have tried in nodejs its working fine but i am working with python so i did'nt find any solution in python to handle the http status codes, so is there any way to handle them that helps me a lot.
var myErrorObj = {
status : 500,
errorType : "InternalServerError",
httpStatus : 233,
requestId : context.awsRequestId,
trace : {
"function": "abc()",
"line": 123,
"file": "abc.js"
}
};
const responseInvoke = (eventData) => {
return lambda.invoke({
FunctionName: 'test777',
Payload: JSON.stringify({"eventData":event}) // pass param
}).promise();
};
responseInvoke(event)
.then(res => {
console.log('*** response *** ', res);
const parsedRes = JSON.parse(res.Payload);
if(parsedRes.status === 'success') {
callback(null, res.Payload);
} else {
context.fail(JSON.stringify(myErrorObj));
}
})
.catch(err => {
console.log(err);
context.fail(JSON.stringify(myErrorObj));
});
here is the code which i have invoked from python lambda to nodejs lambda in python lambda i am returning success (or) fail messages to nodejs lambda based on the message i am sending the response to API gateway.

AWS lambda function- 'An error has occurred: Received error response from Lambda: Handled'

Working on AWS Lex for creating a ChatBot and using the Node.js in AWS Lambda.
Error: An error has occurred: Received error response from Lambda:
Handled
Lambda function:
var aws = require('aws-sdk');
var ses = new aws.SES({region: 'us-east-1'});
exports.handler = function(event, context, callback) {
var eParams = {
Destination: {
ToAddresses: [event.currentIntent.slots.Email]
},
Message: {
Body: {
Text: {
Data: "Hi, How are you?"
}
},
Subject: {
Data: "Title"
}
},
Source: "abc#gmail.com"
};
var email = ses.sendEmail(eParams, function(err, data){
if(err)
else {
context.succeed(event);
}
});
};
How to get a proper response from Lambda to Lex after successful execution (Email Service works properly). I have tried context.done(); but it did not worked out.
Edit 1:
Tried adding below response test from AWS Documentation for LEX still getting the same error response.
exports.handler = (event, context, callback) => {
callback(null, {
"dialogAction": {
"type": "ConfirmIntent",
"message": {
"contentType": "PlainText or SSML",
"content": "message to convey to the user, i.e. Are you sure you want a large pizza?"
}
}
});
As mentioned in the lambda-input-response-format docs here fulfillmentState property is required in the response.
Other thing is you have to pass either PlainText OR SSML for the contentType in the response. In your case its just PlainText.
exports.handler = (event, context, callback) => {
callback(null, {
"dialogAction": {
"type": "ConfirmIntent",
"fulfillmentState": "Fulfilled", // <-- Required
"message": {
"contentType": "PlainText",
"content": "message to convey to the user, i.e. Are you sure you want a large pizza?"
}
}
});
The above code should solve your problem.
However if you see the req-res in the network tab you would receive HTTP Error 424 which says DependencyFailedException which says "Amazon Lex does not have sufficient permissions to call a Lambda function" very misleading.

Resources