Create API in node js to get data with multiple queries with my sql - node.js

I am new to node js. I am trying to develop API for getting items list by its category list. For that i have create a function to fetch all available active featured tags from table ( featured_tags ). After that featured tag list fetch i want to get items list from ( items ) table which belongs to that particular tag. Can anyone help me how to so that in node js. I am using mysql database.
i have done below things to fetch categories from table.
route.js file
this.app.get('/list_menu',async(request,response) =>{
var itemlist = '';
const featuretags = helper.getFeatureTag().then(function(featuredtags){
//console.log('test');
itemlist = helper.getitems(featuredtags);
});
response.status(200).json({
status:200,
message: "success",
data:itemlist
});
});
function to get active featured tags in helper.js file
async getFeatureTag(){
return this.db.query("select * from featured_tags where status = 1 order by id desc ");
//const featuredtags = await this.db.query("select * from featured_tags where status = 1 order by id desc ");
}
Function which get items list of featured tags in helper.js file
async getitems(featuredtags){
var itemdata = [];
var featured_tagdataset = [];
if(featuredtags.length > 0){
for (var i = 0; i < featuredtags.length; i++) {
var row = featuredtags[i];
var featurtag = {};
featurtag.id = row.id;
featurtag.featured_tag = row.featured_tag;
var itemresult = await this.db.query("SELECT * FROM `items` WHERE status = 1 and FIND_IN_SET('"+ row.id +"' ,featured_tags) > 0");
if(itemresult.length > 0){
for(var l=0; l < itemresult.length; l++){
var itemrow = itemresult[l];
var item = {};
item.id = itemrow.id;
item.category_id = row.id;
item.name = itemrow.item_name;
itemdata.push(JSON.stringify(item));
}
}
featurtag.tag_items = itemdata;
featured_tagdataset.push(featurtag);
}
//console.log(featured_tagdataset);
return featured_tagdataset;
}else{
return null;
}
}
when i console featuredtag_dataset array in itemlist() in helper.js file it show me perfect response which i have to pass in API response. But in route.js it shows me blank in data parameter.
Can anyone help me for how to develop this type of APIs in node js.

This is because helper.getitems(featuredtags) method is called successfully but send response doesn't wait until method returns a response as node js is asynchronous .
you need to write the code in such a way that it should work in series. I have created sample example you can try this.
this.app.get('/list_menu',async(request,response) =>{
helper.getFeatureTag().then(function(featuredtags){
helper.getitems(featuredtags).then(function(itemlist){
response.status(200).json({
status:200,
message: "success",
data:itemlist
});
})
}
});

You forget to use await in your roter.js on calling asynchronous function, just update your router to this
this.app.get('/list_menu',async(request,response) =>{
const featuredtags = await helper.getFeatureTag(),
itemlist = await helper.getitems(featuredtags);
response.status(200).json({
status:200,
message: "success",
data:itemlist
});
});

you can either nested callback function or async await function or chained promises using then.

Related

Single Http Request to get multiple file data Parse.com

I'm using Back4app.
My Profile class schema has 4 File columns containing pictures.
So when I retrieve an object , I have to make an HTTP request for each file URL and get the byte data like this.
const data = await Parse.Cloud.httpRequest({url:profilePhoto.url()});
return data.buffer.toString('base64');
But for all four files I have to do 4 HTTP requests to the server.
Is there anyway to do a batch HTTP request so that with just 1 request I can get data for all 4 files ?
My main aim is to do the least amount of requests to the server as possible.
There is no out-of-the-box way to retrieve multiple files with one request in Parse Server.
You could implement your own Parse Cloud Code function to retrieve multiple files, but you would have to manually combine them server side and separate them client side.
As a starting point you could look at packages like multistream that allow you to combine multiple file streams into one to get some inspiration.
You might be able to do something similar to what I've done in cloud code.
I had to load up a bunch of information at the start of my application, requiring many round trips to the server.
So I wrote a function called getUserData().
This does many unrelated queries, and jams all of the results into one big object. I then return the object from the function.
Here is the entire function:
console.log("startig getUserData");
var callCount = 0;
var lastLoadTime=0;
// Given a user, load all friends. Save the objects to ret.objects,
// and save the objectIds to ret.friends
//
// Note: we always load the exhaustive friend list, because
// otherwise, we would have no way of recognizing
// removed friendships.
//
async function loadFriends(user, ret) {
const friendQuery = user.relation("friends").query();
const friends = await findFully(friendQuery);
for(var i=0;i<friends.length;i++){
ret.friends[friends[i].id]=1;
ret.objects[friends[i].id]=friends[i];
};
}
// Given a user, load all owned cells. Save the objects to ret.owned,
// and save their objectIds to ret.ownedCells.
//
// Also, save the ids of members, which we will use to flesh out ret.objects with
// the objects who are not friends, but share a cell with the current user.
async function loadPublicCells(user, ret, memberIds) {
const ownedCellQ = new Parse.Query('PublicCell');
ownedCellQ.equalTo('owner',user);
const joinedCellQ = new Parse.Query('PublicCell');
joinedCellQ.equalTo('members',user);
const publicCellQ = Parse.Query.or(ownedCellQ,joinedCellQ);
publicCellQ.greaterThan("updatedAt",new Date(lastLoadTime));
const publicCells=await findFully(publicCellQ);
for(var i=0;i<publicCells.length;i++) {
const cell = publicCells[i];
ret.ownedCells[cell.id]=cell;
const owner = cell.get("owner");
if(owner==null)
continue;
ret.objects[cell.id]=cell;
if(owner.id === user.id) {
ret.ownedCells[cell.id]=1;
} else {
ret.joinedCells[cell.id]=1;
};
const memberQ = cell.relation("members").query();
const members = await findFully(memberQ);
if(ret.memberMap[cell.id]==null)
ret.memberMap[cell.id]={};
const map = ret.memberMap[cell.id];
for(var j=0;j<members.length;j++){
const member=members[j];
map[member.id]=1;
ret.objects[member.id]=member;
};
};
};
// given a list of all members of all cells, load those objects and store
// them in ret.objects. We do not have to record which cells they belong
// to, because that information is in ret.memberMap
async function loadMembers(memberIds, ret) {
const memberQ = new Parse.Query(Parse.User);
var partIds;
while(memberIds.length){
partIds = memberIds.splice(0,100);
memberQ.containedIn('objectId',partIds);
const part = await findFully(memberQ);
for(var i=0;i<part.length;i++) {
ret.objects[part[i].id]=part[i];
}
};
};
// given a user, save all of the objectIds of people who have annoyed him with
// spam. We save only the ids, they don't go on ret.objects, because we only
// need to filter them out of things. The objectIds are sufficient.
//
// We always send all spam objects, otherwise we would not recognize deletions
async function loadUserSpams(user, ret) {
const userSpamsQ = new Parse.Query("_User");
userSpamsQ.equalTo("spamUsers",user);
userSpamsQ.greaterThan("updatedAt", new Date(lastLoadTime));
const userSpams = await findFully(userSpamsQ);
for(var i=0;i<userSpams.length;i++){
ret.userSpams[userSpams[i].id]=1;
};
};
// given a user, save all of the objectIds of people who have been annoyed *BY*
// him with spam. We save only the ids, they don't go on ret.objects, because we
// only need to filter them out of things. The objectIds are sufficient.
//
// We always send all spam objects, otherwise we would not recognize deletions
async function loadSpamUsers(user, ret) {
const spamUserR = user.relation('spamUsers');
const spamUserQ = spamUserR.query();
spamUserQ.greaterThan("updatedAt", new Date(lastLoadTime));
const spamUsers = await findFully(spamUserQ);
for(var i=0;i<spamUsers.length;i++){
ret.spamUsers[spamUsers[i].id]=1;
};
};
// given a user, save all of the objectIds of people to whom he has sent a
// friend request which is still pending. We save only the ids, they don't go
// on ret.objects, because we only need to filter them out of things. The
// objectIds are sufficient.
async function loadPendingFriends(user, ret) {
const request1Q = new Parse.Query('Request');
request1Q.equalTo("owner",user);
const request2Q = new Parse.Query('Request');
request2Q.equalTo("sentTo",user);
const requestQ = Parse.Query.or(request1Q,request2Q);
requestQ.equalTo("status",'PENDING');
const requests = await findFully(requestQ);
for(var i=0;i<requests.length;i++){
const request = requests[i];
const sentBy = request.get("owner");
if(sentBy==null){
console.warn("sentBy==null");
continue;
};
const sentTo = request.get("sentTo");
if(sentTo==null){
console.warn("sentTo==null");
continue;
};
console.dump({sentTo,sentBy});
if(sentBy.id==user.id){
ret["pendingFriends"][sentTo.id]=sentTo;
} else if ( sentTo.id==user.id ) {
ret["friendingPends"][sentBy.id]=sentBy;
};
};
};
// given a user, load all of his private cells. We do not store
// the user objects, because only friends will be in your private cells.
async function loadPrivateCells(user, ret) {
const privateCellQ = new Parse.Query('PrivateCell');
privateCellQ.equalTo("owner", user);
privateCellQ.greaterThan("updatedAt", new Date(lastLoadTime));
const privateCells = await findFully(privateCellQ);
for(var i=0;i<privateCells.length;i++) {
const cell = privateCells[i];
ret.objects[cell.id]=cell;
ret.privateCells[cell.id]=cell;
if(ret.memberMap[cell.id]==null)
ret.memberMap[cell.id]={};
const map = ret.memberMap[cell.id];
const memberQ = cell.relation("members").query();
const members = await findFully(memberQ);
for(var j=0;j<members.length;j++){
const member=members[j];
map[member.id]=1;
ret.objects[member.id]=member;
};
};
//});
}
// we use objects as maps to weed out duplicate objects and cells.
// when we are done, we use this function to replace the object
// with an array of objects. we don't need to send the keys, since
// they already exist within the objects.
function objToValueList(k,ret){
const objs = [];
for( var id in ret[k] )
objs.push(ret[k][id]);
ret[k]=objs;
ret.counts[k]=objs.length;
};
// convert the objects which have been used to accumulate key lists
// to arrays of objectIds. k is the name of the list we are working
// on. ret[k] is the list itself.
function objToKeyList(k,ret) {
const objs = [];
for( var id in ret[k] ) {
objs.push(id);
};
ret[k]=objs;
ret.counts[k]=objs.length;
};
async function checkUserConsent(user){
const query = new Parse.Query("PrivacyPolicy");
query.descending("createdAt");
query.limit(1);
const res = await query.find();
if(res.length==0) {
return true;
};
const policy=res[0];
console.dump(policy);
console.log(policy);
const userConsent=user.get("lastConsent");
return userConsent!=null && userConsent.id == policy.id;
};
async function loadAlerts(user,ret) {
const q1 = new Parse.Query("Alert");
q1.equalTo("owner", user);
const q2 = new Parse.Query("Response");
q2.equalTo("owner", user);
const q3 = new Parse.Query("Alert");
q3.matchesKeyInQuery("objectId", "alert", q2);
const q = Parse.Query.or(q1,q3);
const list = await q.find();
var time = new Date().getTime();
time -= 1000*86400;
time=Math.max(lastLoadTime, time);
q.greaterThan("updatedAt",time);
for(var i=0;i<list.length;i++) {
const item=list[i];
ret.alerts[item.id]=1;
ret.objects[item.id]=item;
};
}
async function doGetUserData(user) {
if(!user)
return {fatal: 'not logged in!' };
const ret = {
owner: {},
privateCells: {},
friends: {},
alerts: {},
objects: {},
ownedCells: {},
joinedCells: {},
spamUsers: {},
userSpams: {},
pendingFriends: {},
friendingPends: {},
memberMap: {},
loadTime: lastLoadTime,
counts: {callCount: callCount++},
};
{
user.fetch();
ret.owner=user.id;
const memberIds={};
ret.objects[user.id]=user;
console.log("loadFriends");
await loadFriends(user,ret);
console.log("loadPrivateCells");
await loadPrivateCells(user,ret,memberIds);
console.log("loadPublicCells");
await loadPublicCells(user,ret,memberIds);
console.log("loadPendingFriends");
await loadPendingFriends(user,ret);
console.log("loadUserSpams");
await loadUserSpams(user,ret);
console.log("loadSpamUsers");
await loadSpamUsers(user,ret);
console.log("loadAlerts");
await loadAlerts(user,ret);
const memberList=[];
for( var id in memberIds ) {
console.log(ret.objects[id]);
memberList.push(id);
};
console.log("loadMembers");
await loadMembers(memberList,ret);
}
for(var cell in ret.memberMap) {
var map = ret.memberMap[cell];
var list = [];
ret.memberMap[cell]=list;
for(var member in map) {
list.push(member);
};
}
delete ret.objects[user.id];
[
'friends', "friendingPends", 'pendingFriends',
'privateCells', 'ownedCells', 'joinedCells',
'userSpams', 'spamUsers', "alerts"
].forEach((k)=>{
objToKeyList(k,ret);
});
objToValueList('objects',ret);
delete ret.counts;
return ret;
}
async function getUserData(req) {
try {
var nextLoadTime=new Date().getTime();
const user = req.user;
console.log(user);
lastLoadTime = req.params.lastLoadTime;
if(lastLoadTime==null)
lastLoadTime=0;
lastLoadTime = new Date(lastLoadTime);
const ret = await doGetUserData(user);
ret.loadTime=nextLoadTime;
return ret;
} catch ( err ) {
console.log(err);
try {
console.log(err.stack());
} catch ( xxx ) {
console.log(err);
};
throw (`error getting data: ${err}`);
};
};
Parse.Cloud.define("getUserData", getUserData);
Something like this could easily be done to get your data for you. Like this solution, it is unlikely to be entirely pretty, but it would probably work.

How to convert the auth response into array of objects?

I am trying to get the response of the users using auth function and i have to create an excel sheet using the xlsx-populate library and i am able to convert that into an array of objects as the limit is 1000 so there are multiple arrays of objects. and i am not able to figure out how can i do this problem.in this problem, i am simply fetching results using auth and try to get the results into an array of objects. and i am also tried to use the objects to pass into the excel sheet but it gives the excel sheet with last 1000 queries response
const admin = require("firebase-admin");
const momentTz = require("moment-timezone");
const XlsxPopulate = require("xlsx-populate");
momentTz.suppressDeprecationWarnings = true;
const {
alphabetsArray
} = require("./constant");
var start = momentTz().subtract(4, "days").startOf("day").format();
var start = momentTz(start).valueOf();
const end = momentTz().subtract(1, "days").endOf("day").format();
const listAllUsers = async(nextPageToken) =>{
const [workbook] = await Promise.all([
XlsxPopulate.fromBlankAsync()
]);
const reportSheet = workbook.addSheet("Signup Report");
workbook.deleteSheet("Sheet1");
reportSheet.row(1).style("bold", true);
[
"Date",
"TIME",
"Phone Number"
].forEach((field, index) => {
reportSheet.cell(`${alphabetsArray[index]}1`).value(field);
});
let count = 0
// List batch of users, 1000 at a time.
const data = [];
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
});
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
})
// .catch(function (error) {
// console.log("Error listing users:", error);
// });
// const datas = []
// datas.push(data)
// console.log(datas)
return ;
}
// Start listing users from the beginning, 1000 at a time.
listAllUsers();
and the output i am getting is like this
[]
[]
[]
[]
[]
i want to convert this into a single array of response
You have a race condition. When you perform your console.log(JSON.stringify(data)) your listUserQuery is in progress (and in async mode) and you don't have yet the answer when you print the array. Thus the array is empty.
Try this (I'm not sure of this optimal solution, I'm not a nodeJS dev)
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
}
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
);

Historian for a particular participant

Is there any way in which I can get Historian for a particular participant in hyperledger-composer using node API?
I am developing an application based on hyperledger-composer using Node APIs.I want to show the history of transaction of a particular participant in his/her profile. I have created the permission.acl for that and that is working fine in playground. But when i am accessing the historian from node API it is giving complete historian of the network. I don't know how to filter that for a participant.
you can return results from REST API calls since v0.20 to the calling client application, so something like the following would work (not tested, but you get the idea). NOTE: You could just call the REST API end (/GET Trader) direct via REST with your parameter (or whatever endpoints you create for your own business network - the example below is trade-network), rather than the example of using 'READ-ONLY' Transaction processor Endpoint described below, for returning larger result sets to your client application. See more on this in the docs
NODE JS Client using APIs:
const BusinessNetworkConnection = require('composer-client').BusinessNetworkConnection;
const rp = require('request-promise');
this.bizNetworkConnection = new BusinessNetworkConnection();
this.cardName ='admin#mynet';
this.businessNetworkIdentifier = 'mynet';
this.bizNetworkConnection.connect(this.cardName)
.then((result) => {
//You can do ANYTHING HERE eg.
})
.catch((error) => {
throw error;
});
// set up my read only transaction object - find the history of a particular Participant - note it could equally be an Asset instead !
var obj = {
"$class": "org.example.trading.MyPartHistory",
"tradeId": "P1"
};
async function callPartHistory() {
var options = {
method: 'POST',
uri: 'http://localhost:3000/api/MyPartHistory',
body: obj,
json: true
};
let results = await rp(options);
// console.log("Return value from REST API is " + results);
console.log(" ");
console.log(`PARTICIPANT HISTORY for Asset ID: ${results[0].tradeId} is: `);
console.log("=============================================");
for (const part of results) {
console.log(`${part.tradeId} ${part.name}` );
}
}
// Main
callPartHistory();
//
MODEL FILE
#commit(false)
#returns(Trader[])
transaction MyPartHistory {
o String tradeId
}
READ-ONLY TRANSACTION PROCESSOR CODE (in 'logic.js') :
/**
* Sample read-only transaction
* #param {org.example.trading.MyPartHistory} tx
* #returns {org.example.trading.Trader[]} All trxns
* #transaction
*/
async function participantHistory(tx) {
const partId = tx.tradeid;
const nativeSupport = tx.nativeSupport;
// const partRegistry = await getParticipantRegistry('org.example.trading.Trader')
const nativeKey = getNativeAPI().createCompositeKey('Asset:org.example.trading.Trader', [partId]);
const iterator = await getNativeAPI().getHistoryForKey(nativeKey);
let results = [];
let res = {done : false};
while (!res.done) {
res = await iterator.next();
if (res && res.value && res.value.value) {
let val = res.value.value.toString('utf8');
if (val.length > 0) {
console.log("#debug val is " + val );
results.push(JSON.parse(val));
}
}
if (res && res.done) {
try {
iterator.close();
}
catch (err) {
}
}
}
var newArray = [];
for (const item of results) {
newArray.push(getSerializer().fromJSON(item));
}
console.log("#debug the results to be returned are as follows: ");
return newArray; // returns something to my NodeJS client (called via REST API)
}

Cloud Functions for Firebase BigQuery sync error

We're working on a cloud function that allows us to keep our bigquery and firebase database in sync. The function triggers when a place is created/updated/deleted.
Based on the trigger action (create/update/delete) we add a property called big_query_active to signal if the object exists or not. Same goes for the date.
Our current problem is that the call to big query sometimes returns an error. So that would mean that the data is not in sync anymore. How can this be prevented?
'use strict';
// Default imports.
const functions = require('firebase-functions');
const bigQuery = require('#google-cloud/bigquery');
// If you want to change the nodes to listen to REMEMBER TO change the constants below.
// The 'id' field is AUTOMATICALLY added to the values, so you CANNOT add it.
const ROOT_NODE = 'places';
const VALUES = [
'country_id',
'category_id',
'name',
'active',
'archived'
];
// This function listens to the supplied root node, but on child added/removed/changed.
// When an object is inserted/deleted/updated the appropriate action will be taken.
exports.children = functions.database.ref(ROOT_NODE + '/{id}').onWrite(event => {
const query = bigQuery();
const dataset = query.dataset('stampwallet');
const table = dataset.table(ROOT_NODE);
if (!event.data.exists() && !event.data.previous.exists()) {
return;
}
const item = event.data.exists() ? event.data.val() : event.data.previous.val();
const data = {};
data['id'] = event.params.id;
for (let index = 0; index < VALUES.length; index++) {
const key = VALUES[index];
data[key] = item[key] !== undefined ? item[key] : null;
}
data['big_query_date'] = new Date().getTime() / 1000;
data['big_query_active'] = event.data.exists();
return table.insert(data).then(() => {
return true;
}).catch((error) => {
if (error.name === 'PartialFailureError') {
console.log('A PartialFailureError happened while uploading to BigQuery...');
} else {
console.log(JSON.stringify(error));
console.log('Random error happened while uploading to BigQuery...');
}
});
});
This is the error that we (sometimes) receive
{"code":"ECONNRESET","errno":"ECONNRESET","syscall":"read"}
How could it be prevented that the data goes out of sync? Or is there a way to retry so that it always succeeds?

MongoDB Query takes too long

Problem: When creating the first document for a user, query takes too long
I'm creating some report, of the schema Report. I also have a UserSchema. I create a document in my UI and pass that data to a post request which is this:
exports.addSubReport = function(req,res) {
var id = req.body.masterform;
var subform = new Report();
var myDate = Date();
subform.title = req.body.title;
subform.date = req.body.date;
subform.date = myDate;
subform.owner = req.user;
subform.body = req.body.body;
subform.save();
Report.findById(id, function (err, report) {
if(err) {
res.redirect('/404NotFound');
}
else {
report.subreport.push(subform);
subform.parentReport = report;
report.save();
}
});
User.findById(req.body.id, function (err, user) {
user.forms_created.push(subform);
subform.owner = req.user;
subform.authors[0] = user.profile.firstName + " " + user.profile.lastName;
subform.author = user;
subform.save();
});
res.json(req.body);
};
this works fine and creates the object the way I want it to, however after creating the document, I set the state in my UI to 'Wait' until I can recieve the JSON with this new Report I just created. This is the GET request code:
exports.allMyReports = function(req, res) {
var id = req.user._id;
var totalproc = 0;
var dupe = [];
Report.find({"author" : id}, function (err, form) {
dupe = form;
dupe.forEach(function (person) {
User.findById(person.author, function (err, user) {
if (!err) {
person.authors[0] = user.profile.firstName + " " + user.profile.lastName;
person.save();
totalproc = totalproc + 1;
}
if (totalproc == dupe.length) {
res.json(dupe);
}
}
);
});
});
};
However the problem is that on every first report I create for a user, it takes an extremely long time. It's most likely the query of searching for it by author but than I thought well.... if the user has 15 documents already how does it even find all those documents instaneously? I have no idea why it takes so long in this case though and I haven't been able to come up with a solution yet but I think it has to do with how I'm querying.
Here is a sample of how i do it in the UI:
_onCreateReport = () => {
const title = React.findDOMNode(this.refs.title).value;
const date = React.findDOMNode(this.refs.date).value;
const body = React.findDOMNode(this.refs.body).value;
ReportsActions.addNewReport({
title: title,
date: date,
body: body
});
ReportsActions.getMyReports();
}
I perform the action of adding a new report ('post' request to API), and then getMyReport 'get' request to api for all reports belonging to me, once that returns it shows a new render of 3 buttons, one to view that document, one to view all my documents, one to create another report.
All I did, was request all the documents, and figure it out in the front-end. It reduced the time of the ajax call and I just filtered it out in my front-end which performs quick and doesn't hold the server up.

Resources