GridFSBucket Errors - node.js

When I try to use GridFSBucket from mongodb, chunks are created in fs.chunks collection, but no fs.files documents are created. Is there a reason why this maybe happening or is this a bug?
This will work on my local machine and docker on my local machine but not on docker on an AWS EC2.
const { MongoClient, GridFSBucket } = require('mongodb');
const crypto = require('crypto');
const dotenv = require('dotenv');
dotenv.config();
const url = process.env.MONGO_URI;
const dbName = 'tree_club';
const opts = { useUnifiedTopology: true };
const getPosts = async () => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const posts = await db.collection('post').find({}).sort({ created: -1 }).toArray();
client.close();
return posts;
};
const createPost = async (parent, { markdown, file }) => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const bucket = new GridFSBucket(db);
const fileId = file ? crypto.randomBytes(64).toString('hex') : undefined;
const { ops: [post] } = await db.collection('post').insertOne({
markdown,
fileId,
created: Date.now()
});
if (file) {
await new Promise(async (resolve, reject) => {
const { createReadStream } = await file;
const readStream = createReadStream();
const writeStream = bucket.openUploadStreamWithId(fileId, fileId);
readStream.pipe(writeStream);
readStream.on('error', () => {
console.log('error');
});
readStream.on('finish', () => {
resolve();
client.close();
});
});
} else {
client.close();
}
return post;
};
const pipeImage = async (req, res) => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const bucket = new GridFSBucket(db);
try {
const readStream = bucket.openDownloadStream(req.params.fileId);
readStream.pipe(res);
readStream.on('finish', () => {
client.close();
});
} catch (err) {
res.status(400).json('Image was not found');
}
};
module.exports = { getPosts, createPost, pipeImage };

I searched for the answer to this question and turns out there's an error in Node.js version 13 with streams.
I downgraded my Node.js version to 12 and the code above worked perfectly.

Related

How to perform an HTTP post request using express on Cloud Functions for Firebase using busboy

Hi I am trying to insert data in the database using a POST Request but the data is not being inserted.
On further investigation I found that to upload form-data, busboy needs to be used for image upload in firebase functions but I am not able to find a solution for using busboy with post method.
Hence if someone can please help me resolve this issue.
Below is the code for reference.
app.js
const express = require('express')
//const router = true;
const router = new express.Router()
const userInfo = require('../models/userProfile')
const multer = require('multer');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
const planCheck = require('../models/planDetails');
const login = require('../models/login_creditionals');
const {Storage} = require('#google-cloud/storage');
const {format} = require('util');
const busboy = require('busboy');
const storage = new Storage({
projectId: "biz-1",
keyFilename: "/Users/akm/pixNodes/functions/pixbiz-65a402.json"
});
const bucket = storage.bucket("gs://biz-1");
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024
}
})
const uploadImageToStorage = (file) => {
return new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${Date.now() + path.extname(file.originalname)}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = format(`https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`);
resolve(url);
});
blobStream.end(file.buffer);
});
}
router.post('/userprofile/check' ,upload.single('profile_pic'), async (req,res) => {
var reqFiles;
var reqFilesUrl;
reqFiles = req.file;
if(reqFiles) {
// const imagerUrl = await uploadImageToStorage(reqFiles)
reqFilesUrl = imagerUrl;
console.log(reqFilesUrl);
const notify = new userInfo({
userId: req.body.userId,
mobile_number : req.body.mobileNumber,
profile_pic: reqFilesUrl
})
try {
console.log('success insert data');
await notify.save((err,post) => {
if(err) {
console.log(err);
}
//console.log('data saved', post);
res.status(201).send(post);
});
// });
// res.status(201).send();
console.log('201');
} catch(e) {
//res.status(401);
return res.send(e);
}

Connecting to mongodb in nodejs

I am first to use MongoClient to connect mongodb in nodejs, in each js file I use it like following
'use strict'
//part 1
const { MongoClient } = require('mongodb');
const dbconfig = require('../config/index');
const Mongodb = {
client: new MongoClient(dbconfig.product.dbUrl, {
useNewUrlParser: true,
useUnifiedTopology: true,
}),
oper: null,
db: null,
dbName: '',
};
const dbConnect = async (dbName = dbconfig.product.dbName) => {
if (Mongodb.oper) {
if (dbName !== Mongodb.dbName) {
Mongodb.db = Mongodb.client.db(dbName);
Mongodb.dbName = dbName;
}
return Mongodb.db;
}
Mongodb.oper = await Mongodb.client.connect();
return await dbConnect(dbName);
};
//part 2
const db = await dbConnect();
let info = await db.collection.find({});
//more code
The situation is that there is a lot of duplicate code, such as part 1, and I want to put part 1 into a file and import it where needed. I have no idea how to do, give me some ideas please, thank you.
You only need to connect to db once. My advice would be - google and try mongoose. I find it easier. Google some examples and read the docs.
Create a JS module that exports the connect function using module.exports then require it where necessary, something like this:
// mongo.js
const { MongoClient } = require('mongodb');
const dbconfig = require('../config/index');
const client = new MongoClient(dbconfig.product.dbUrl, {
useNewUrlParser: true,
useUnifiedTopology: true
});
let databasePromise;
async function _connect() {
try {
await client.connect();
} catch (e) {
console.error(e);
await closeConnection();
throw e;
}
return client.db();
}
function connect() {
if (!databasePromise) {
databasePromise = _connect();
}
return databasePromise;
}
async function close() {
await client.close();
databasePromise = undefined;
}
isConnected() {
return client.isConnected();
}
module.exports = {
connect,
close,
isConnected,
}
// index.js
const { connect } = require('./mongo');
(async () => {
const db = await connect();
const results = await db.collection('collection1').find({});
console.log(results);
})();
// file2.js
const { connect } = require('./mongo');
(async () => {
const db = await connect();
const results = await db.collection('collection2').find({});
console.log(results);
})();
Note, this code is not tested so it might need adjustments.

Redis not cache all data in ec2 server and data from mongo atlas

I have used redis but its not work as expected. when first time I have execute the find query results comes from mongoose its give approx 19MB data but after caching redis only one document. don't now where I am doing wrong below share the configuration file.
const mongoose = require('mongoose');
const { createClient } = require('redis');
const util = require('util');
let client
(async() => {
client = createClient();
await client.connect({
legacyMode: true
});
await client.ping();
})();
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = { time: 36000 }) {
this.useCache = true;
this.time = options.time;
this.hashKey = JSON.stringify(options.key || this.mongooseCollection.name);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return await exec.apply(this, arguments);
}
const key = JSON.stringify({
...this.getQuery(),
});
const cacheValue = await client.get(this.hashKey, key);
console.log(cacheValue, "Cache Value");
if (cacheValue) {
const doc = JSON.parse(cacheValue);
console.log('Response from Redis',doc, this.model);
return Array.isArray(doc) ? doc.map((d) => new this.model(d)) : new this.model(doc);
}
const result = await exec.apply(this, arguments);
client.set(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
return result;
};
module.exports = {
clearKey(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Here is Query
Products.find({is_deleted:false})
.select(select)
.populate('type', 'type')
.populate('shape', 'type')
.populate('category', 'type is_pain_scale name').sort({updated_at:1}).lean().cache({
time: 36000
});

Nextjs MongoDB template: clientPromise is not a function

I am using Next.js's MongoDB example template and it comes with a MongoDB util function:
import { MongoClient } from 'mongodb';
const uri = process.env.MONGODB_URI;
const options = {};
let client;
let clientPromise;
if (!process.env.MONGODB_URI) {
throw new Error('Please add your Mongo URI to .env.local');
}
if (process.env.NODE_ENV === 'development') {
if (!global._mongoClientPromise) {
client = new MongoClient(uri, options);
global._mongoClientPromise = client.connect();
}
clientPromise = global._mongoClientPromise;
} else {
client = new MongoClient(uri, options);
clientPromise = client.connect();
}
export default clientPromise;
Here is how I am using it and I am certain that I am using this incorrectly.
import { clientPromise } from '../../lib/mongodb';
export default async (req, res) => {
try {
const db = await clientPromise();
const users = await db.collection('users').find({}).limit(20).toArray();
res.json(users);
} catch (e) {
console.error(e);
res.json({ error: 'Not connected!' });
}
};
The error is "TypeError: (0 , lib_mongodb__WEBPACK_IMPORTED_MODULE_0_.clientPromise) is not a function"
You're exporting clientPromise, which is client.connect(). At this point you already "triggered" the connection function. now all you have to do is wait on that promise,
So instead of
const db = await clientPromise();
You should do:
const connection = await clientPromise; // this is a client connection not a db
const db = connection.db();

Delete .files and .chunk with GridFSBucket

I'm trying to delete both .files and .chunk data but all the posts that I have found are either outdated or do not apply to my issue.
This is my backend route:
const mongoose = require("mongoose");
const config = require("config");
const db = config.get("mongoURI");
let gfs;
const conn = mongoose.createConnection(db);
conn.once("open", () => {
gfs = new mongoose.mongo.GridFSBucket(conn.db, {
bucketName: "photos"
});
});
router.delete('/:imageID', async (req, res) => {
gfs.delete({_id: req.params.imageID, root:"photos"}, function(error){
test.equal(error, null);
}
Any ideas?
Solved! to succesfully delete GridFS .files and .chunks just find the obj_id and do gfs.delete( obj_id)
Code:
router.delete("/:imageID", auth, async (req, res) => {
try {
const post = await Post.findOne({ image: req.params.imageID });
console.log(post);
if (post.user != req.user.id) {
res.status(401).send("Invalid credentials");
}
// Here:
const obj_id = new mongoose.Types.ObjectId(req.params.imageID);
gfs.delete( obj_id );
await post.remove();
res.json("successfully deleted image!");
} catch (err) {
console.error(err.message);
res.status(500).send("Server Error");
}
});

Resources