How do I get data from mongodb, manipulate it and print it - node.js

I want to connect to MongoDB and query a collection based on the filter 'category'. From the results, I want to randomly select one of entries, concatenate with another string and print to console. I am a novice at this and can't figure out how to get the results of my query and randomly select one of the entries from the query results.
//Connecting To DB
const mongoose = require('mongoose');
//const { makeCard } = require('./utils/card');
const db = mongoose.connection;
const host = process.env.host;
console.log(host)
const dbupdate = {
useNewUrlParser : true,
useUnifiedTopology : true
};
mongoose.connect(host, dbupdate);
db.on('error', (err) => console.log('Error, DB not connected'));
db.on('connected', () => console.log('connected to mongo'));
db.on('disconnected', () => console.log('Mongo is disconnected'));
db.on('open', () => console.log('Connection Made!'));
const Schema= mongoose.Schema;
const cardSchema = new Schema({
word: String,
visual : String,
category : String
});
const Card = mongoose.model('expressions', cardSchema );
--I want this function to return the results of the query, but it doesn't.
function getWords(ctgry ) {
const result = Card.find({"category" : ctgry },(error,results) => {
return results;
});
};
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min) + min);
};
function getWord( ctgry ) {
const result = getWords( ctgry );
const num = getRandomInt(0,result.length); --this doesn't work
return result[num];
};
console.log("Sample text: " + getWord());

What you seem to be doing here is that you're fetching all the documents from the Card model for a particular category and picking a random one from it, which I wouldn't recommend. Not sure what data you're storing here, but what if your app grows to a point where there are a million documents in your cards collection? You wouldn't fetch them all in memory and pick a random one.
A good practice while using any database is to try to only fetch as much data is required. There's no way to fetch a random document from a query in mongodb from what I know, but there's a way to do this (inspired from this answer which I recommend you read).
async function getRandomWord(ctgry) {
const count = await User.count({
"category": ctgry,
}).exec();
const random = Math.floor(Math.random() * count);
return User.findOne({
"category": ctgry,
}).skip(random).exec();
}

Related

How to convert the auth response into array of objects?

I am trying to get the response of the users using auth function and i have to create an excel sheet using the xlsx-populate library and i am able to convert that into an array of objects as the limit is 1000 so there are multiple arrays of objects. and i am not able to figure out how can i do this problem.in this problem, i am simply fetching results using auth and try to get the results into an array of objects. and i am also tried to use the objects to pass into the excel sheet but it gives the excel sheet with last 1000 queries response
const admin = require("firebase-admin");
const momentTz = require("moment-timezone");
const XlsxPopulate = require("xlsx-populate");
momentTz.suppressDeprecationWarnings = true;
const {
alphabetsArray
} = require("./constant");
var start = momentTz().subtract(4, "days").startOf("day").format();
var start = momentTz(start).valueOf();
const end = momentTz().subtract(1, "days").endOf("day").format();
const listAllUsers = async(nextPageToken) =>{
const [workbook] = await Promise.all([
XlsxPopulate.fromBlankAsync()
]);
const reportSheet = workbook.addSheet("Signup Report");
workbook.deleteSheet("Sheet1");
reportSheet.row(1).style("bold", true);
[
"Date",
"TIME",
"Phone Number"
].forEach((field, index) => {
reportSheet.cell(`${alphabetsArray[index]}1`).value(field);
});
let count = 0
// List batch of users, 1000 at a time.
const data = [];
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
});
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
})
// .catch(function (error) {
// console.log("Error listing users:", error);
// });
// const datas = []
// datas.push(data)
// console.log(datas)
return ;
}
// Start listing users from the beginning, 1000 at a time.
listAllUsers();
and the output i am getting is like this
[]
[]
[]
[]
[]
i want to convert this into a single array of response
You have a race condition. When you perform your console.log(JSON.stringify(data)) your listUserQuery is in progress (and in async mode) and you don't have yet the answer when you print the array. Thus the array is empty.
Try this (I'm not sure of this optimal solution, I'm not a nodeJS dev)
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
}
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
);

ES6 Async/Await, ExpressJS and Postgres transactions

REVISED QUESTION
I've revised the question, in the hope of getting a clearer answer.
I'm trying to process data in ExpressJS, based on the incoming req.body and the existing data in the table.
I'm receiving a req.body that contains a JSON list of updated fields. Some of those fields are stored as JSONB in Postgres. If an incoming field is JSONB, then the form (external code) that is making the request has already run a jsonpatch.compare() to generate the list of patches, and it is these patches and not the full values that are being passed in. For any non-JSONB values, incoming values just need to be passed through to the UPDATE query.
I have a working version, as below, that pretends that the existing JSONB values in the table ARE NULL. Clearly, this is NOT what is needed. I need to pull the values from the db. The non-querying-of-current-values version and a bare minimum router, looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const SQL = require('sql-template-strings')
const { Client } = require('pg')
const dbConfig = require('../db')
const jsonpatch = require('fast-json-patch')
const FormRouter = express.Router()
I have some update code:
````javascript
const patchFormsRoute = (req, res) => {
const client = new Client(dbConfig)
const { id } = req.body
const parts = []
const params = [id]
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
// this is a DUMMY RECORD instead of the result of a client.query
let currentRecord = { 'sections':[], 'editors':[], 'descriptions':[] }
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery = 'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
client.connect()
return client
.query(updateQuery, params)
.then(result => res.status(200).json(result.rowCount))
.catch(err => res.status(400).json(err.severity))
.then(() => client.end())
}
FormRouter.route('/')
.patch(bodyParser.json({ limit: '50mb' }), patchFormsRoute)
exports.FormRouter = FormRouter
I promise, that this is working code, which does almost what I need. However, I want to replace the dummy record with the data already in the table, fetched contemporaneously. My issue, is because multiple clients could be updating a row at the same time (but looking at orthogonal elements of the JSONB values), I need the fetch, calc, and update to happen as a SINGLE TRANSACTIOn. My plan is to:
BEGIN a transaction
Query Postgres for the current row value, based on the incoming id
For any JSONB fields, apply the patch to generate the correct value for that field in the UPDATE statement.
Run the UPDATE statement with the appropriate param values (either from the req.body or the patched row, depending on whether the field is JSONB or not)
COMMIT the transaction, or ROLLBACK on error.
I've tried implementing the answer from #midrizi; maybe it's just me, but the combination of awaits and plain testing of res sends the server off into Hyperspace... and ends in a timeout.
In case anyone is still awake, here's a working solution to my issue.
TLDR; RTFM: A pooled client with async/await minus the pooling (for now).
const patchFormsRoute = (req, res) => {
const { id } = req.body
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
const parts = []
const params = [id]
;(async () => {
const client = await new Client(dbConfig)
await client.connect()
try {
// begin a transaction
await client.query('BEGIN')
// get the current form data from DB
const fetchResult = await client.query(
SQL`SELECT * FROM forms WHERE id = ${id}`,
)
if (fetchResult.rowCount === 0) {
res.status(400).json(0)
await client.query('ROLLBACK')
} else {
const currentRecord = fetchResult.rows[0]
// patch JSONB values or update non-JSONB values
let val = []
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery =
'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
// update record in DB
const result = await client.query(updateQuery, params)
// commit transaction
await client.query('COMMIT')
res.status(200).json(result.rowCount)
}
} catch (err) {
await client.query('ROLLBACK')
res.status(400).json(err.severity)
throw err
} finally {
client.end()
}
})().catch(err => console.error(err.stack))
}

Mongoose, how to empty a collection

I've the following hapi.js server
const Hapi = require('hapi')
const Mongoose = require('mongoose')
const Wreck = require('wreck');
const server = new Hapi.Server({
"host": "localhost",
"port": 3000
})
Mongoose.connect('mongodb://localhost/myDB', { useNewUrlParser: true })
const BlockModel = Mongoose.model('block', {
height: Number,
size: Number,
time: Number
})
server.route({
method: "GET",
path: "/",
handler: async (request, h) => {
Mongoose.model.blocks.remove({}); //<------This is the part of the code I intend to use to delete the collection
const { res, payload } = await Wreck.get('https://api.url');
let myJson = JSON.parse(payload.toString()).blocks
console.log(myJson)
for (let i = 0; i<myJson.length; i++) {
var block = new BlockModel({ height: myJson[i].height, size: myJson[i].size, time: myJson[i].time });
block.save();
}
console.log(myJson)
return "test"
}
})
server.start();
Point is, it works fine and saves the desired data to my collection, but ofc the database will keep growing if I dont delete the data on each execution. So I intend to implement something similar to
db.blocks.remove({}) //where blocks is my collection
Which works fine in the mongoconsole.
But I cant find how to implement this in the code
You can use the deleteMany operator with an empty filter.
db.collection.deleteMany({})
or with your model:
await BlockModel.deleteMany({})
Empty all collections in a db
const collections = await mongoose.connection.db.collections();
for (let collection of collections) {
await collection.deleteMany({})
}

Node.js Query sqlite with 'sqlite`

I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.

What data type should I use to store an image with MongoDB?

I have an image with base64, e.g.
data:image/jpeg;base64,/9j/4AAQSkZJRgABAgAAAQABAAD/7QCcUGhvdG9zaG9w....
How to save in the database? What should be the type of the field in schema? Buffer?
The short answer is store as "Binary", for which in a mongoose schema you can use Buffer to do this.
The longer form is to demonstrate a round trip of conversion starting with the original binary and back again. Base64 encoding/decoding is not a necessary step in most real world cases, and is just there for demonstration:
Read an image (or any binary data) from file
Base64 encode that data (just to show it can be done) - optional
Turn back into Binary data from Base64 ( just to show it can be done ) - optional
Store Binary data in the database
Read Binary data from the database
Output binary data to a new file
So the Schema Part is simple, just use Buffer:
var albumSchema = new Schema({
name: String,
image: Buffer
})
Then all we are going to do is follow the process and put the binary data into the property and read it back out again.
Note though that if you are coming directly from a string with a MIME type on it like :
data:image/png;base64,long-String
Just use a JavaScript .split() and take the second array index for the base64 string itself:
var string = "data:image/png;base64,long-String"
var bindata = new Buffer(string.split(",")[1],"base64");
Here's a listing with a complete demo in and out:
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema,
fs = require('fs');
mongoose.Promise = global.Promise;
mongoose.set('debug',true);
mongoose.connect('mongodb://localhost/test');
var albumSchema = new Schema({
name: String,
image: Buffer
})
const Album = mongoose.model('Albumn', albumSchema);
async.series(
[
(callback) =>
async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
(callback) =>
async.waterfall(
[
(callback) => fs.readFile('./burger.png', callback),
(data,callback) => {
// Convert to Base64 and print out a bit to show it's a string
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
// Feed out string to a buffer and then put it in the database
let burger = new Buffer(base64, 'base64');
Album.create({
"title": "burger",
"image": burger
},callback)
},
// Get from the database
(album,callback) => Album.findOne().exec(callback),
// Show the data record and write out to a new file.
(album, callback) => {
console.log(album);
fs.writeFile('./output.png', album.image, callback)
}
],
callback
)
],
(err) => {
if (err) throw err;
mongoose.disconnect();
}
)
NOTE The example was originally given with asyncJS and older mongoose API, which notably has different connection options as shown in more modern and current API examples. Refer to these instead for testing on current NodeJS LTS releases:
Or with a a bit more modern syntax and API usage for comparison:
const fs = require('mz/fs');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useFindAndModify', false);
mongoose.set('useCreateIndex', true);
const albumSchema = new Schema({
name: String,
image: Buffer
});
const Album = mongoose.model('Album', albumSchema);
(async function() {
try {
const conn = await mongoose.connect(uri, opts);
await Promise.all(
Object.entries(conn.models).map(([k, m]) => m.deleteMany())
)
let data = await fs.readFile('./burger.png');
// Convert to Base64 and print out a bit to show it's a string
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
// Feed out string to a buffer and then put it in the database
let burger = new Buffer(base64, 'base64');
await Album.create({ "title": "burger", "image": burger });
// Get from the database
// - for demo, we could have just used the return from the create() instead
let album = Album.findOne();
// Show the data record and write out to a new file.
console.log(album);
await fs.writeFile('./output.png', album.image)
} catch(e) {
console.error(e);
} finally {
mongoose.disconnect()
}
})()
And even with "plain promises" where that is either preferred or you are still using a NodeJS without async/await support. But you really should not be, considering v6.x reaches end of life in April 2019:
// comments stripped - refer above
const fs = require('mz/fs');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useFindAndModify', false);
mongoose.set('useCreateIndex', true);
const albumSchema = new Schema({
name: String,
image: Buffer
});
mongoose.connect(uri, opts)
.then(conn =>
Promise.all(
Object.entries(conn.models).map(([k, m]) => m.deleteMany())
)
)
.then(() => fs.readFile('./burger.png'))
.then(data => {
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
let burger = new Buffer(base64, 'base64');
return Album.create({ "title": "burger", "image": burger });
})
.then(() => Album.findOne() )
.then(album => {
console.log(album);
return fs.writeFile('./output.png', album.image)
})
.catch(console.error)
.then(() => mongoose.disconnect());
And here's a burger.png to play with:
Also kudos to How to reduce image size on Stack Overflow which allows the sample image here to not appear as "huge" as it originally was, and yet still download at full size.

Resources