I get data on the local machine, but not on the server - nestjs

I have empty facilities coming from the server, and I know they need to be filled. The local machine runs normally with data on the local mysql server. Server is NestJs.
async montryReport(userId: number, data?: string) {
const d = new Date();
d.setDate(d.getDate() - 30);
if (!data) data = new Date().toISOString().substring(0, 10);
const allClient = await this.clientsService.find({
where: {
timeAt:
MoreThanOrEqual(d.toISOString().substring(0, 10) + " 00:00:00") &&
LessThanOrEqual(data + " 23:59:00"),
userId: userId,
},
});
const clientsLastMonth = ClientsReport(allClient);
const allReceptions = await this.carpetReceptionService.find({
where: {
timeAt:
MoreThanOrEqual(d.toISOString().substring(0, 10) + " 00:00:00") &&
LessThanOrEqual(data + " 23:59:00"),
userId: userId,
},
});
const carpetLastMonth = NumberOfCarpetReport(allReceptions);
const allCarpet = await this.carpetService.find({
where: {
timeAt:
MoreThanOrEqual(d.toISOString().substring(0, 10) + " 00:00:00") &&
LessThanOrEqual(data + " 23:59:00"),
userId: userId,
},
});
const surfaceAndPayLastMonth = SurfacePayReport(allCarpet);
return new AnalysisReportInfo(
clientsLastMonth,
carpetLastMonth,
surfaceAndPayLastMonth
);
}
async yearReport(userId: number) {
console.log(userId);
// concatination all clients, carpet, surface and forPay and return for all monts
const d = new Date();
const m = d.getMonth();
const y = d.getFullYear();
const months = [];
for (let i = 0; i < 12; i++) {
const month = new Date(y, m - i, +1).toISOString().split("T")[0];
const ad = await this.montryReport(userId, month);
months.push([month, ad]);
}
return months;
}
This is what I get from the server.
On the local machine, the facilities are filled. There's data on the server for at least two months?
[
[
"2022-04-30",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2022-03-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2022-02-28",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2022-01-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-12-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-11-30",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-10-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-09-30",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-08-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-07-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-06-30",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
],
[
"2021-05-31",
{
"clients": {},
"numberOfCarpet": {},
"surfaceAndForPayment": {}
}
]
]

Related

Update an array element in MongoDB with NodeJS

**I want to update the "paymentStatus" field in the games array of "ballpool" or "valorant".I am using NodeJS.So please help me how can i update the payment status by giving the value "ballpool" or "valorant" as parameter **
{
"uniqueCode": "n5Eue",
"games": [
{
"ballpool": {
"email": "aniketshaw#gmail.com",
"name": "Aniket Shaw",
"phone": "31231231",
"ballpoolUID": "4232",
"paymentStatus": false,
"_id": "63de237567fa64e9711bb2b7",
"__v": 0
}
},
{
"valorant": {
"_id": "63de237567fa64e9711bb2b7",
"email": "aniketshaw#gmail.com",
"name": "Aniket Shaw",
"phone": "31231231",
"valorantUID": "4232",
"paymentStatus": true,
"__v": 0
}
}
],
}
Here's an example
let param = 'ballpool';
let setOperator;
let filterOperator;
if (param === 'ballpool') {
setOperator = {
'games.$[element].ballpool.paymentStatus': true,
};
filterOperator = {
"element.ballpool": { $ne: null }
};
} else if (param === 'valorant') {
setOperator = {
'games.$[element].valorant.paymentStatus': true,
};
filterOperator = {
"element.valorant": { $ne: null }
};
}
await Model.updateOne({
uniqueCode: 'foo',
}, {
$set: setOperator,
}, {
arrayFilters: [filterOperator]
});
Learn more about $[identifier] here

Pagination with aggregate search in mongoose

const page = 1;
const limit = 10;
const searchResultsArray = await mongoose.connection.db
.collection('game_activity')
.aggregate([
{
$match: {
game_id
}
},
{
$addFields: {
activities: {
$filter: {
input: '$activity',
as: 'activity',
cond: {
$and: condition
}
}
}
}
},
{
$project: {
activities: 1,
activity_number: { <--- can't read property error
$size: '$activities'
}
}
},
{ $limit: 50 },
{
$facet: {
paginatedResult: [ { $sort: searchFilter }, { $skip: (page - 1) * limit } ]
}
}
])
.toArray();
const searchResultsObject = searchResultsArray[0];
if (
searchResultsObject.paginatedResult[0].activity_number === undefined ||
searchResultsObject.paginatedResult[0].activity_number == 0
) {
const result = {
no_activities: true
};
res.json(result);
} else {
const result = {
search_result: {
total_activities: searchResultsObject.paginatedResult[0].activity_number,
current_page: page,
last_page: Math.ceil(searchResultsObject.paginatedResult[0].activity_number / 10),
searched_activities: searchResultsObject.paginatedResult[0].activities
},
no_activities: false
};
res.json(result);
}
I have this .aggregate() search function and trying to apply the pagination result. This format worked on other search but on this aggregate() search, I have 2 problems.
console.log(searchResultsObject.paginatedResult[0]);
-------- result --------
{
search_result: {
total_activities: 16,
current_page: 1,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object]
]
},
no_activities: false
}
const limit = 10 does not applied here. It returns all of the activities.
when I set the const page = 2;, I get TypeError: Cannot read property 'activity_number' of undefined this error and console.log(searchResultsObject.paginatedResult[0]); shows `undefined
I want to return 10 gaming activities per page, so in this case, if I set const page =2;, then it will return the rest 6 activities. I couldn't quite figure out how to fix this since this works on other .aggregate() search that I have.
------------ MongoDB Document ---------
{
"_id": {
"$oid": "601942d93aca6ee8cb300327"
},
"location_id": "ddff23323443",
"activity": [{
"activity_id": "VVINxmhRHsnMwvfT",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"location_id": "ddff23323443",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}, {
"activity_id": "EBZNKmsFKDgdeDz0",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"id": "ddff23323443",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}, {
"activity_id": "j8J1Jlk8MtWPi1HT",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"location_id": "bvvsd33",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}]
}
----- expectation -----
const page = 1
{
search_result: {
total_activities: 16,
current_page: 1,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
]
},
no_activities: false
}
const page = 2
{
search_result: {
total_activities: 16,
current_page: 2,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
]
},
no_activities: false
}
Your expectation is not clear as per your try, i can explain the basic pagination process,
pagination configs
const page = 1;
const limit = 10;
// If you are starting page number from 1 then you need to minus 1
const skip = (page - 1) * limit;
$unwind deconstruct activities array
$replaceRoot replace activities object to root
$sort documents
$facet to separate result, result data and count, $skip should before the $limit
const searchResultsArray = await mongoose.connection.db.collection('game_activity').aggregate([
{ $match: { _id: game_id } },
{
$project: {
activities: {
$filter: {
input: '$activity',
as: 'activity',
cond: { $and: condition }
}
}
}
},
{ $unwind: "$activities" },
{ $replaceRoot: { newRoot: "$activities" } },
{ $sort: searchFilter },
{
$facet: {
searched_activities: [
{ $skip: skip },
{ $limit: limit }
],
total_activities: [
{ $count: "count" }
]
}
}
]).toArray();
No data validation response
// No Data Fond!
if (!searchResultsArray[0].total.length) {
res.json({ no_activities: true });
return;
}
Success response
res.json({
search_result: {
total_activities: searchResultsArray[0].total_activities[0].count,
current_page: page,
last_page: Math.ceil(searchResultsArray[0].total_activities[0].count / limit),
searched_activities: searchResultsArray[0].searched_activities
},
no_activities: false
});
Combine above code together in sequence and try.

How to add data to a List, of multiple records in dynamoDB using nodejs?

Need solution to to add data to a List, of multiple records in dynamoDB using nodejs.
Appreciate in advance..
Scenario i have 2 user.
user1 = {
id: '',
email: '',
sendRequest: [
],
request: [
],
friendsList: [
],
};
user2 = {
id: '',
email: '',
sendRequest: [
],
request: [
],
friendsList: [
],
};
If user1 sends a request to user2. i need to add requested user details to "user1.sendRequest" and also sender details to "user2.request" list.
I have tried below code but it only update 2nd item in TransactWriteItem object.
TransactItems: [
{
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.reqId }
},
UpdateExpression: 'SET #sendRequest = list_append(if_not_exists(#sendRequest, :empty_list), :updateValue)',
ExpressionAttributeNames: {
'#sendRequest': 'sendRequest'
},
ExpressionAttributeValues: {
':updateValue': { L: [initiatorPost] },
':empty_list': { L: [] },
},
},
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.sub }
},
UpdateExpression: 'SET #request = list_append(if_not_exists(#request, :empty_list), :updateValue)',
ExpressionAttributeNames: {
'#request': 'request'
},
ExpressionAttributeValues: {
':updateValue': { L: [recieverPost] },
':empty_list': { L: [] },
},
}
}
]
}
return db
.transactWriteItems(TransactItems)
.promise()
.then(() => {
const callBackResponse = reqBody;
callBackResponse.status ="success"
callback(null, response(201, callBackResponse));
})
.catch((err) => response(null, response(err.statusCode, err)));
with new suggested approach.
Below code works. Please suggest better solution if any ?
'use strict';
const AWS = require('aws-sdk');
const db = new AWS.DynamoDB({ apiVersion: '2019.11.21' });
const usersTabel = process.env.USERS_TABLE;
function response(statusCode, message) {
return {
statusCode: statusCode,
body: JSON.stringify(message),
"headers": {"Access-Control-Allow-Origin":"*"}
};
}
module.exports.sendRequest = (event, context, callback) => {
// const reqBody = event;
console.log("check event::::::::"+event);
console.log("reqBody.reqId"+event.reqId);
console.log("reqBody.sub"+event.sub)
const reqBody = event;
// const reqBody = JSON.parse(event.body);
if (
!reqBody.reqId ||
reqBody.reqId.trim() === '' ||
!reqBody.sub ||
reqBody.sub.trim() === '' ||
!reqBody.email ||
reqBody.email.trim() === ''
) {
return callback(
null,
response(400, {
error: 'Post must have a reqId, sub and email and they must not be empty'
})
);
}
const initiatorPost = {
M: {
createdAt: { S: new Date().toISOString() },
sub: { S: reqBody.sub },
email: { S: reqBody.email }
}
};
const recieverPost = {
M: {
createdAt: { S: new Date().toISOString() },
sub: { S: reqBody.reqId },
email: { S: reqBody.reqEmail },
friendshipStatus: { S: "pending" }
}
};
const TransactItems1 = {
TransactItems: [
{
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.reqId }
},
UpdateExpression: 'SET #sendRequests = list_append(#sendRequests, :updateValue)',
ExpressionAttributeNames: {
'#sendRequests': 'sendRequests'
},
ExpressionAttributeValues: {
':updateValue': { L: [initiatorPost] }
},
}
}
]
};
const TransactItems2 = {
TransactItems: [
{
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.sub }
},
UpdateExpression: 'set #myRequests = list_append(#myRequests, :updateValue)',
ExpressionAttributeNames: {
'#myRequests': 'myRequests'
},
ExpressionAttributeValues: {
':updateValue': { L: [recieverPost] }
},
}
}
]
}
return db
.transactWriteItems(TransactItems1)
.promise()
.then(() => {
return db
.transactWriteItems(TransactItems2)
.promise()
.then(() => {
const callBackResponse = reqBody;
callBackResponse.status ="success"
callback(null, response(201, callBackResponse));
})
.catch((err) => response(null, response(err.statusCode, err)));
})
.catch((err) => response(null, response(err.statusCode, err)));
};
New approach
I tested this approach works as expected :
var db = new AWS.DynamoDB.DocumentClient();
let updateItems = {
TransactItems: [
{
Update: {
TableName: usersTabel,
Key: {
id: reqBody.reqId
},
UpdateExpression: 'SET sendRequest = list_append(if_not_exists(sendRequest, :emptyValue), :updateValue)',
ExpressionAttributeValues: {
':emptyValue': [],
':updateValue': [initiatorPost]
}
},
Update: {
TableName: usersTabel,
Key: {
id: reqBody.sub
},
UpdateExpression: 'SET myRequest = list_append(if_not_exists(myRequest, :emptyValue), :updateValue)',
ExpressionAttributeValues: {
':emptyValue': [],
':updateValue': [recieverPost]
}
}
}
]
}
return db.transactWrite(updateItems).promise().then(res => {
console.log(res)
}).catch(err => {
console.log(err, " ERROR")
});
Let me know if you get other issue through...
Fixed the syntax and works as expected. Below is the Before(where i had issue) and After(is the fix).
Before.
TransactItems: [
{
Update: {},
Update: {},
}
]
After
TransactItems: [
{
Update: {},
},
{
Update: {},
}
]
whole code.
const params = {
TransactItems: [
{
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.reqId }
},
UpdateExpression: 'SET #sendRequests = list_append(#sendRequests, :updateValue)',
ExpressionAttributeNames: {
'#sendRequests': 'sendRequests'
},
ExpressionAttributeValues: {
':updateValue': { L: [initiatorPost] }
},
}
},
{
Update: {
TableName: usersTabel,
Key: {
id: { S: reqBody.sub }
},
UpdateExpression: 'SET #myRequests = list_append(#myRequests, :updateValue)',
ExpressionAttributeNames: {
'#myRequests': 'myRequests'
},
ExpressionAttributeValues: {
':updateValue': { L: [recieverPost] }
},
}
}
]
};

how to remove null, {}, [] from json object in node js?

The json format is like that:
[
[
{},
{
"Country": "Japan",
"cityName": "tokyo",
"onto": [
{
"level1": "one",
"articles": [
null,
{
"id": "114506604",
"name": "bunya3",
"abc": [
{
"filename": "attachmentsfilename3",
"size": 3
}
],
"image": {}
}
]
}
]
}
],
[
{}
]
]
We can see few null, {} and [{}]. How can we remove it ? By the way I am using node js. I have tried by nnjson
nnjson.removeNull(obj_summary);
But not works object without key.
If we assume that your data is always going to be an array, we can map over it and remove empty arrays and objects from the first level:
const data = [
[
{},
{
Country: 'Japan',
cityName: 'tokyo',
onto: [
{
level1: 'one',
articles: [
null,
{
id: '114506604',
name: 'bunya3',
abc: [
{
filename: 'attachmentsfilename3',
size: 3
}
],
image: {}
}
]
}
]
}
],
[{}]
]
function clean(input) {
return input
.map(item => {
// remove empty arrays
if (Array.isArray(item) && item.length === 0) {
return null
}
// Remove empty objects
if (item instanceof Object && Object.keys(item).length === 0) {
return null
}
return item
})
.filter(item => item)
}
console.log(clean(data))
I found the solution.
To remove null I used:
let retSummary = JSON.parse(stringifySummary, (k, v) => Array.isArray(v) ?
v.filter(e => e !== null) : v);
To remove {} I used
var newArray = parObj.filter(value => Object.keys(value).length !== 0);

Aggregate + $project + $group + $sum + $count + $group again in MongoDB

Consider the Schema :
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const EightWeekGamePlanSchema = new Schema({
Week: {
type: Number,
required: true
},
LeadId: {
type: String,
required: true
},
SupplierGeneralId: {
type: String,
required: true
},
// ... More properties
TotalClaimsLeftToBeClaimedByClientType: {
// incresed by 1 every time it's claimed
type: Number,
required: true
},
InsertDate: {
type: Date
// default: Date.now
}
});
module.exports = EightWeekGamePlan = mongoose.model(
"eightweekgameplan",
EightWeekGamePlanSchema
);
And consider the Mongo query :
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{
$project: {
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" }
}
},
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: "$month",
year: "$year"
},
ClaimsSummary : { $sum: "$TotalClaimsLeftToBeClaimedByClientType" } ,
// TotalLeadsPerSupplierAndDate : ...
// Here I want to group again , by LeadID and count all the
// unique LeadID's
}
}
]
I want to group by SupplierGeneralId and Month + Year of InsertDate ,
Summarize for each month the TotalClaimsLeftToBeClaimedByClientType
Group again but this time by the LeadID , and count all the unique LeadIds for each supplier (previously grouped by SupplierGeneralId, Month, Year).
However I'm getting
[ { _id: { month: 1, year: 2020 }, ClaimsSummary: 0 } ]
...even though there is data.
What's wrong with the pipeline ? and how can I group again to get the unique LeadIds for each supplier ?
Thanks
EDIT:
I've added more fields to the Project but now I'm getting empty array in the $push :
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{ $sort: { SupplierGeneralId: 1 } },
{
$project: {
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" },
SupplierGeneralId: "$SupplierGeneralId",
TotalClaimsLeftToBeClaimedByClientType:
"$TotalClaimsLeftToBeClaimedByClientType"
}
},
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: "$month",
year: "$year"
},
LeadsCollection: {
$push: {
LeadId: "$LeadId"
}
},
ClaimsSummary: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
}
]
Output:
[
[0] {
[0] _id: {
[0] SupplierGeneralId: 'qCwHWFD1cBvrfPp5hdBL6M',
[0] month: 1,
[0] year: 2020
[0] },
[0] LeadsCollection: [
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {}
[0] ],
[0] ClaimsSummary: 4
[0] }
[0] ]
SECOND EDIT :
EightWeekGamePlan.aggregate(
[
{ $sort: { SupplierGeneralId: 1 } },
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" }
},
LeadsUploaded: {
$push: {
LeadId: "$LeadId"
}
},
Sells: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
},
{
$project: {
Sells: "$Sells",
LeadsUploaded: {
$reduce: {
input: { $setUnion: "$LeadsUploaded.LeadId" },
initialValue: [],
in: {
$concatArrays: [
"$$value",
[
{
Lead_ID: "$$this"
}
]
]
}
}
}
}
}
]
You should just drop the $project stage, you're grouping right after so theres no real point of doing it, adding it just makes the pipeline less efficient.
Rewrite your pipeline as:
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{ $sort: { SupplierGeneralId: 1 } },
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: {"$month" : "$InsertDate"},
year: { $year: "$InsertDate" },
},
LeadsCollection: {
$push: {
LeadId: "$LeadId"
}
},
ClaimsSummary: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
}
]

Resources