How do I query this auto generated AWS Amplify API? - node.js

I am trying to query an autogenerated Amplify API using postman. I'm banging my head against the wall on something that should be simple. Can someone explain why this query URL doesn't return a JSON object?. The data exists in dynamo but returns an empty array in postman (and a 200 status):
POSTMAN (this is what I expected to work):
https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/getShipContainer?location=fl
UPDATE after staring at the code for longer I see that req.params[partitionKeyName] is somehow evaluating to getShipContainer which would explain my issue, but how do I fix this? And why did it happen:
condition[partitionKeyName]['AttributeValueList'] = [ convertUrlType(req.params[partitionKeyName], partitionKeyType) ];
This syntax works (returns dynamo object) but is very clearly incorrect (location is a dynamo column, and fl is the filter param): https://xxxxxx.execute-api.us-east-1.amazonaws.com/staging/api/fl?location
Query method:
const userIdPresent = false; // TODO: update in case is required to use that definition
const partitionKeyName = "location";
const partitionKeyType = "S";
const sortKeyName = "containerId";
const sortKeyType = "S";
const hasSortKey = sortKeyName !== "";
const path = "/api";
const UNAUTH = 'UNAUTH';
const hashKeyPath = '/:' + partitionKeyName;
const sortKeyPath = hasSortKey ? '/:' + sortKeyName : '';
// declare a new express app
var app = express()
app.use(bodyParser.json())
app.use(awsServerlessExpressMiddleware.eventContext())
//Enable CORS for all methods
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*")
res.header("Access-Control-Allow-Headers", "*")
next()
});
// convert url string param to expected Type
const convertUrlType = (param, type) => {
switch(type) {
case "N":
return Number.parseInt(param);
default:
return param;
}
}
/********************************
* HTTP Get method for list objects *
********************************/
//api/:location
app.get(path + hashKeyPath, function(req, res) {
var condition = {}
condition[partitionKeyName] = {
ComparisonOperator: 'EQ'
}
if (userIdPresent && req.apiGateway) {
condition[partitionKeyName]['AttributeValueList'] = [req.apiGateway.event.requestContext.identity.cognitoIdentityId || UNAUTH ];
} else {
try {
condition[partitionKeyName]['AttributeValueList'] = [ convertUrlType(req.params[partitionKeyName], partitionKeyType) ];
} catch(err) {
res.statusCode = 500;
res.json({error: 'Wrong column type ' + err});
}
}
let queryParams = {
TableName: tableName,
KeyConditions: condition
}
console.log(`req gg cond::`,JSON.stringify(condition),`params`,Object.entries(req.params).map(([i,k])=>i+' '+k).join(','))
dynamodb.query(queryParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({error: 'Could not load items: ' + err});
} else {
res.json(data.Items);
}
});
});
results of the console.log I put in to debug:
req gg cond::
{
"location": {
"ComparisonOperator": "EQ",
"AttributeValueList": [
"getShipContainer"
]
}
}
params location getShipContainer
shouldn't the expected query be using LOCATION and ignoring "getShipContainer" completely? Im very confused because the code was auto generated. getShipContainer is the name of the lambda function that is being called
I also tested this in the API Gateway test console with the same result:
Here is a quick screenshot of my dynamo table as well:

the issue is mentioned in this github issue.
change your handler function to this, and https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/location will return the list of items.
app.get(path + hashKeyPath, function (req, res) {
let scanParams = {
TableName: tableName,
};
dynamodb.scan(scanParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json(data.Items);
}
});
});
And here's the code for filtering if the parameter is specified in the URL.
Location is one of the dynamodb reserved word, so I've used attribute mapping.
https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/location?location=fl
app.get(path + hashKeyPath, function (req, res) {
var filterParams = {};
const location = req.query[partitionKeyName] || "";
if (location) {
filterParams = {
FilterExpression: "#loc = :loc",
ExpressionAttributeNames: {
"#loc": "location",
},
ExpressionAttributeValues: {
":loc": location,
},
};
}
let scanParams = {
TableName: tableName,
...filterParams,
};
dynamodb.scan(scanParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json(data.Items);
}
});
});
Usually I use DynamoDB Global Secondary Index to query all items. But it might be depends on the use case. For example, I've used to query List of Items by its Site Id.
Table would be something like this:
itemID
itemName
siteID
router.get("/", function (req, res) {
const sortKeyName = "siteID";
let queryParams = {
TableName: tableName,
IndexName: "siteIDGSI",
KeyConditionExpression: "siteID = :site_id",
ExpressionAttributeValues: {
":site_id": req.params[sortKeyName],
},
};
dynamodb.query(queryParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json({
statusCode: 200,
message: "List of Items in " + req.params[sortKeyName],
items: data.Items,
});
}
});
});
So you can create GSI for containerId

Related

pass mysql value from node js to angular

Hi i have this post function to call create function which will insert to database
router.post('/', async (req, res, next) =>
{
const body = req.body;
console.log("tu sam todo_task",req.body);
try
{
const todo_task = await TodoTaskService.create(body);
console.log(todo_task, "function")
// created the todo_task!
return res.status(201).json({ todo_task: todo_task });
}
catch(err)
{
}
});
Then in second snippet i have this create function in service class where i want to return result from mysql query
class TodoTaskService
{
static create(data)
{
console.log("tu sam service todo_task create");
var vres = todoTaskValidator.validate(data, todo_taskVSchema);
/* validation failed */
if(!(vres === true))
{
let errors = {}, item;
for(const index in vres)
{
item = vres[index];
errors[item.field] = item.message;
}
throw {
name: "ValidationError",
message: errors
};
}
console.log("tu sam service todo_task validation passed");
let todo_task = new TodoTaskModel(data.todo_task_name, data.todo_task_complete_f);
let connection = mysql.createConnection(dev);
// insert statment
let sql = `INSERT INTO todo_task (todo_task_name,todo_task_complete_f) VALUES (?);`
let values = [data.todo_task_name, data.todo_task_complete_f];
// execute the insert statment
connection.query(sql, [values], (err, results, fields) => {
if (err) {
global.query = "Database error";
console.log(err.message + "zasto");
return err
}
else{
// get inserted rows
global.query = "OK";
todo_task.todo_task_id = results.insertId
console.log("rows inserted",todo_task);
return todo_task;
}
});
}
}
How can i pass return todo_task from connection.query back to the first post function and send it to angular?
I've found a solution on my own, don't know how if it's the best practice but here is my solution, I'm using callback to pass the value
router.post('/', async (req, res, next) =>
{
const body = req.body;
try
{
return await TodoTaskService.create(body,function (result){
console.log(result, "function result")
todo_task = result;
return res.status(201).json({ todo_task: todo_task });
});
// created the todo_task!
//return res.status(201).json({ todo_task: todo_task });
}
returning callback with result
class TodoTaskService
{
static create(data,callback)
{
var vres = todoTaskValidator.validate(data, todo_taskVSchema);
/* validation failed */
if(!(vres === true))
{
let errors = {}, item;
for(const index in vres)
{
item = vres[index];
errors[item.field] = item.message;
}
throw {
name: "ValidationError",
message: errors
};
}
console.log("tu sam service todo_task validation passed");
let todo_task = new TodoTaskModel(data.todo_task_name, data.todo_task_complete_f);
/* todo_task.uid = 'c' + counter++;
todo_tasks[todo_task.uid] = todo_task; */
let connection = mysql.createConnection(dev);
// insert statment
let sql = `INSERT INTO todo_task (todo_task_name,todo_task_complete_f) VALUES (?);`
let values = [data.todo_task_name, data.todo_task_complete_f];
// execute the insert statment
connection.query(sql, [values], (err, results, fields) => {
if (err) {
global.query = "Database error";
console.log(err.message + "zasto");
return err
}
else{
// get inserted rows
global.query = "OK";
todo_task.todo_task_id = results.insertId
console.log("rows inserted",todo_task);
return callback(todo_task);
}
});
}

How to consume a RESTful API in Node.js

I'm new to Node.js and I'm creating a simple pagination page. The REST API works fine, but consuming it has left me in limbo.
Here is the REST API (other parts have been taken out for brevity)
const data = req.query.pageNo;
const pageNo =
(typeof data === 'undefined' || data < 1) ? 1 : parseInt(req.query.pageNo);
let query = {};
const total = 10;
query.skip = (total * pageNo) - total;
query.limit = total;
try {
const totalCount = await Users.countDocuments();
const pageTotal = Math.ceil(totalCount / total);
const users = await Users.find({}, {}, query);
return res.status(200).json(users);
} catch (error) {
console.log('Error ', error);
return res.status(400).send(error)
};
};
When I return the json with just the 'users' object, like so return res.status(200).json(users); the page renders correctly, but when I pass in other objects like what I have in the code, it fails. This is how I'm consuming the API:
const renderHomepage = (req, res, responseBody) => {
let message = null;
if (!(responseBody instanceof Array)) {
message = 'API lookup error';
responseBody = [];
} else {
if (!responseBody.length) {
message = 'No users found nearby';
}
}
res.render('users-list', {
title: 'Home Page',
users: responseBody,
message: message
});
}
const homelist = (req, res) => {
const path = '/api/users';
const requestOptions = {
url: `${apiOptions.server}${path}`,
method: 'GET',
json: true,
};
request(
requestOptions,
(err, {statusCode}, body) => {
if (err) {
console.log('Ther was an error ', err);
} else if (statusCode === 200 && body.length) {
renderHomepage(req, res, body);
} else if (statusCode !== 200 && !body.length) {
console.log('error ',statusCode);
}
}
);
}
I've searched extensively on both here and other resources but none of the solutions quite answers my question. I hope someone could be of help

Error: NJS-012: encountered invalid bind data type in parameter 2

Even though I have searched for the solution of this error and i found some answers but none of them helped me fix this error, Error: NJS-012: encountered invalid bind data type in parameter 2.Maybe, one error can occur in a different scenarios.
Stored procedure definition
create or replace PROCEDURE SP_MEAL_GETMEALTYPES
(
p_DataSource OUT Sys_RefCursor
)
AS
BEGIN
OPEN p_DataSource FOR
select mealtypeid,description from mealtypes;
END;
File name: menusStoredProc.js
"use strict";
var dbParams = require('../../oracle/dbParams');
function storedProcs() {
this.SP_USER_GETMENUS = {
name: 'sp_meal_getmealtypes',
params: {
dataSource: {val: null, type: dbParams.CURSOR, dir: dbParams.BIND_OUT}
},
resultSetColumns: ['mealTypeId','description']
}
}
module.exports = storedProcs;
File name: menus.js
var express = require('express');
var MenusStoreProc = require('../storedProcedures/menusStoredProc');
var oraDbAssist = require('../../oracle/oracleDbAssist');
var router = express.Router();
router.get('/getmenus', (req, res, next) => {
var sp = new MenusStoreProc().SP_USER_GETMENUS;
oraDbAssist.getConnection(function (err, conn) {
if (err)
return console.log('Connecting to db failed - ' + err);
oraDbAssist.executeSqlWithConn(sp, false, conn, function (err, menus) {
if (err)
return console.log('Executing ' + sp.name + ' failed - ' + err);
res.status(200).json(JSON.stringify(menus));
});
});
});
module.exports = router;
Function definition added - executeSqlWithConn
function executeSqlWithConn(sp, autoCommit, connection, next) {
var sql = createProcedureSqlString(sp.name, sp.params);
var params = buildParams(sp.params);
connection.execute(sql, params, {autoCommit: autoCommit}, function(err, result) {
if (err) {
next(err, null);
return;
}
var allRows = [];
var numRows = 50; // number of rows to return from each call to getRows()
for(var attributeName in result.outBinds) {
if(result.outBinds[attributeName] && result.outBinds[attributeName].metaData) { // db response is a result set
function fetchRowsFromResultSet(pResultSet, pNumRows) {
pResultSet.getRows(pNumRows, function(readErr, rows) {
if(err) {
pResultSet.close(function (err) { // always close the result set
next(readErr);
});
return;
}
allRows.push(rows);
if (rows.length === pNumRows) {
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
var allRowsResult = Array.prototype.concat.apply([], allRows);
generateJsonFromDbResultSet(pResultSet.metaData, allRowsResult, sp, function(resultSet) {
pResultSet.close(function (err) { // always close the result set
next(null, resultSet);
});
});
});
}
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
}
next(null, result.outBinds);
});
}
Function definition added - buildParams
function buildParams(params) {
for(var attributeName in params) {
params[attributeName].val = typeof params[attributeName].val === 'undefined' ? null : params[attributeName].val;
if(params[attributeName].type.is(dbParams.DATE))
params[attributeName].val = params[attributeName].val ? new Date(params[attributeName].val) : null;
params[attributeName].type = params[attributeName].type.value;
params[attributeName].dir = params[attributeName].dir.value;
}
return params;
}
Any help, dear members ?

access values after authentication in node js

I've a program that does the below.
Look into a DynamoDB table.
Get the data from the table.
Save the variables in session
After the process, print the values in console.
My code is as below.
intentHandlers['GetMYBusinessInfo'] = function (request, session, response, slots) {
console.log('entered ext bloxk');
if (!session.attributes.userName) {
console.log('eneterd the user entered the block');
var userName = 'jim';
isUserRegistered(userName.toLowerCase(), function (res, err) {
if (err) {
response.fail(err);
console.log(err);
}
else if (!res) {
response.shouldEndSession = true;
}
else {
console.log(res);
var countRes = JSON.stringify(res.Count);
var unpUserRegion = JSON.stringify(res.Items[0].Region);
var unpUserCity = JSON.stringify(res.Items[0].State);
var userRegion = JSON.parse(unpUserRegion);
var userCity = JSON.parse(unpUserCity);
session.attributes.city = userCity;
session.attributes.region = userRegion;
console.log("parsed " + countRes + "\t region is " + userRegion);
session.attributes.userName = true;
}
});
}
console.log(`session values after authentication are user city is ${session.attributes.city}`);
}
The method to check if the value is in DynamoDb or not.
function isUserRegistered(userName, callback) {
var params = {
TableName: "usersTable",
FilterExpression: "#nme = :nme",
ExpressionAttributeNames: {
"#nme": "Name",
},
ExpressionAttributeValues: {
":nme": userName
}
};
var count = 0;
docClient.scan(params, function (err, data) {
if (err) {
console.error("Unable to scan the table. Error JSON:", JSON.stringify(err, null, 2));
callback(false, err);
} else {
console.log("Scan succeeded." + data.Items.length);
if (data.Items.length === 0) {
callback(false);
}
else {
data.Items.forEach(function (itemData) {
console.log("Item :", ++count, JSON.stringify(itemData));
});
callback(data);
}
}
});
}
when I run this, the output that I get is:
session values after authentication are user city is undefined
Scan succeeded.1
Item : 1
{
"ID": "3",
"State": "wisconsin",
"Region": "midwest",
"Name": "jim"
}
{ Items: [ { ID: '3', State: 'wisconsin', Region: 'midwest', Name: 'jim' } ],
Count: 1,
ScannedCount: 1 }
parsed 1 region is midwest
Here I know that Node js being Non-blockable process, the above output is correct, but I want to get the value of city printed in session values after authentication are user city is {HereTheCityComes} instead of session values after authentication are user city is undefined.
I'm sure that placing the console.log(session values after authentication are user city is ${session.attributes.city}); in the last else block(place where the data is returned).
But I need this type of functionality(Get data as shown in my current scenario), as there is some other things to be done after checking if the user is available in database.
please let me know where am I going wrong and how can I fix this.
You can't synchronously expect async result.
What you can do here is solve your problem with promises.
Here is a solution:
intentHandlers['GetMYBusinessInfo'] = function(request, session, response, slots) {
console.log('entered ext bloxk');
var userPromise = Promise.resolve();
if (!session.attributes.userName) {
console.log('eneterd the user entered the block');
var userName = 'jim';
userPromise = new Promise(function (resolve, reject) {
isUserRegistered(userName.toLowerCase(), function (res, err) {
if (err) {
response.fail(err);
reject(err);
}
var countRes = JSON.stringify(res.Count);
var unpUserRegion = JSON.stringify(res.Items[0].Region);
var unpUserCity = JSON.stringify(res.Items[0].State);
var userRegion = JSON.parse(unpUserRegion);
var userCity = JSON.parse(unpUserCity);
session.attributes.city = userCity;
session.attributes.region = userRegion;
console.log("parsed " + countRes + "\t region is " + userRegion);
resolve(res);
});
});
}
userPromise.then(function () {
console.log(`session values after authentication are user city is ${session.attributes.city}`);
});
}
If you are not using ES6, then just install bluebird and use var Promise = require('bluebird')

How to save nested array in MongoDB using Mongoose and NodeJS

Can anyone explain me how to save the nested array items into mongodb with mongoose and nodejs?.
Here is a schema I am using.
var demoSchema = ({
"r_id": Number,
"r_label": String,
"entity": [{
"d_label": String,
"d_type": String
}
]
})
And here is Nodejs function I am using to save the data into db
app.route("/mypages/rooms")
.post(function(req, res) {
var db = mongoOp.demo();
var response = {};
req.checkBody("r_id", "Enter a valid r_id address.").notEmpty();
req.checkBody("r_label", "Enter a valid label address.").notEmpty();
var errors = req.validationErrors();
if (errors) {
console.log(errors);
console.log(req.body);
res.status(500);
res.end('500 Server Error');
//res.render('addrooms',{flag:1});
return;
} else {
db.r_id = req.body.r_id;
db.r_label = req.body.r_label;
db.entity = req.body.entity;
db.save(function(err) {
if (err) {
findfromdb(req, res, 2); //own function for implementation purpose
} else {
findfromdb(req, res, 1);
}
});
//var middleVar = req.body.resources;
// console.log(middleVar[0].d_rgb);
}
});
set entity with array []
db.entity = [{}];
app.route("/mypages/rooms")
.post(function(req, res) {
var db = mongoOp.demo();
var response = {};
req.checkBody("r_id", "Enter a valid r_id address.").notEmpty();
req.checkBody("r_label", "Enter a valid label address.").notEmpty();
var errors = req.validationErrors();
if (errors) {
console.log(errors);
console.log(req.body);
res.status(500);
res.end('500 Server Error');
//res.render('addrooms',{flag:1});
return;
} else {
db.r_id = req.body.r_id;
db.r_label = req.body.r_label;
db.entity = [{
"d_label": req.body.label_type,
"d_type": req.body.d_type
}];
db.save(function(err) {
if (err) {
findfromdb(req, res, 2); //own function for implementation purpose
} else {
findfromdb(req, res, 1);
}
});
//var middleVar = req.body.resources;
// console.log(middleVar[0].d_rgb);
}
});
The below operation adds the element label_type and d_type to entity array if they does not exist in the array, if they exists, then they won't be added
https://docs.mongodb.com/manual/reference/operator/update/addToSet/
Model.update(
query, // { _id: 1 }
{
$addToSet: {
"enity": {
"d_label": req.body.label_type,
"d_type": req.body.d_type
}
}
}
)
have a look at this answer
Pushing item to Mongodb collection array

Resources