File not found error when using GridFSBucket openDownloadStream - node.js

I am able to upload a file using openDownloadStream of GridFSBucket and see that the file is uploaded and visible under songs.files chunks. But for some reason, get the following error while trying to download it -
Caught exception: Error: FileNotFound: file def1.txt was not found
My code is -
var express = require('express');
var gridModule = express.Router();
var mongoose = require('mongoose');
var fs = require('fs');
gridModule.post('/', (req, res) => {
console.log("::::grid");
//const gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db);
//const writeStream = gridfs.openUploadStream('test.dat');
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
fs.createReadStream('./def.txt').
pipe(gridfs.openUploadStream('def1.txt')).
on('error', function (error) {
assert.ifError(error);
}).
on('finish', function () {
console.log('done!');
process.exit(0);
});
});
gridModule.get('/', (req, res) => {
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
/* var bucket = new mongodb.GridFSBucket(db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
}); */
gridfs.openDownloadStream('def1.txt').
pipe(fs.createWriteStream('./def1.txt')).
on('error', function(error) {
console.log(":::error");
assert.ifError(error);
}).
on('finish', function() {
console.log('done!');
process.exit(0);
});
});
module.exports = gridModule;
I tried using ObjectId id as well but same error. Anyone any guesses what I may be doing wrong here?
Note - Code may not seem optimized here like declaring bucket twice, kindly ignore it for now as I will correct it once it works.

According to the API doc here, in order to use filename as argument you should use
openDownloadStreamByName(filename, options)
not openDownloadStream. openDownloadStream takes id
of the file.

Another possible explanation for this, if you're already calling openDownloadStream and still experiencing the FileNotFound error, and you are 100% the id is correct, is that you didn't pass an ObjectId type.
In my case, I was passing an id string instead of an id as an ObjectId.
bucket.openDownloadStream(mongoose.Types.ObjectId(id));
vs
bucket.openDownloadStream(id);

Related

AWS DAX Client promise() Error: Request object already used

Thank you! for taking time to read this post.
I an new to JS and stuck in a callback issue related to promise() when querying AWS DAX. There is a post (using the .promise() call on a dax-services dynamodb document client returns error: Error: ValidationException: Request object already used) related to this issue but am not able to follow the answer.
I also visited below page but got stuck as the page does not provide a sample for client.send(command)
https://www.npmjs.com/package/#aws-sdk/client-dax
I keep getting below error:
DaxClientError: ValidationException: Request object already used.
Can you please help correcting below code and with explanation to the solution?
I will truly appreciate your help. I spent an entire day on this but did not make any progress.
const express = require('express');
const AWS = require('aws-sdk');
const AmazonDaxClient = require('amazon-dax-client');
var region = "us-west-2";
AWS.config.update({
region: region
});
const myendpoint = "daxs://mydax.xxxx.dax-clusters.us-west-2.amazonaws.com";
const dax = new AmazonDaxClient({endpoints: [myendpoint], region: region});
// If using AWS.DynamoDB.DocumentClient ...
const doc = new AWS.DynamoDB.DocumentClient({service: dax});
// Constants
const PORT = 8080;
const HOST = '0.0.0.0';
// App
const app = express();
app.get('/', (req, res) => {
res.send('Hello World');
});
app.get('/readreview', async (req, res) => {
try{
var myresult='empty';
var params = {
TableName: 'test',
Key:{
"id": 'p12',
"key": 'description'
}
};
await doc.get(params, function(err, data) {
if (err) {
console.error("Unable to read item. Error JSON:", JSON.stringify(err, null, 2));
} else {
myresult=data;
console.log("#########"+data);
}
}).promise();
res.send(myresult);
}
catch (e) {
console.log(e);
}
});
app.listen(PORT, HOST);
console.log(`Running on http://${HOST}:${PORT}`);
Thank you!

Piping/streaming file from memory to bucket in Firebase/GCS

I'm trying to generate multiple documents and pack them together to a zip and upload it to Google Cloud Storage via Firebase (using default bucket).
So far:
generating multiple documents works
packing them together doesn't work (I get empty zip and I'm using ArchiverJS)
uploading to the bucket (it's just empty) works
Seems like to upload a file I need to save it to temporary folder within Firebase Function, but I can't find any solution how to empty it after using. So I wanted to use streams, but then I was warned that it's a bad idea because checksum and such.
On the other hand .save() function let's save arbitrary data. It was specifically requested here, but really doesn't seem to work (at least for me).
Also, ArchiverJS seems to also let use of streams.
So, theoretically, it should all work nicely. But it's not so I hope someone else knows better.
const express = require('express')
var router = express.Router()
var archiver = require('archiver')
var admin = require("firebase-admin");
var serviceAccount = require("../servicekey.json")
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://myName.firebaseio.com",
storageBucket: "myName.appspot.com"
})
var bucket = admin.storage().bucket()
const {
convertCSVtoJSON,
generateDocuments,
generateDocx,
isCorrectTemplateFileType
} = require('./generateServices')
router.post('/', async (req, res) => {
try {
if(!isCorrectTemplateFileType(req.files.template))
return res.status(403).send({
message: 'Wrong file type. Please provide .docx file.'
})
const template = req.files.template.data
const data = await convertCSVtoJSON(req.files.data1)
let zip = archiver('zip')
zip.on('warning', function(err) {
console.log(err)
});
zip.on('error', function(err) {
res.status(500).send({error: err.message})
});
zip.on('entry', function(ars) {
// console.log(ars)
});
zip.on('end', function() {
console.log('Archive wrote %d bytes', zip.pointer())
});
// res.attachment('archive-name.zip')
// zip.pipe(output)
// zip.pipe(res)
data.forEach((docData, index) => {
let buff = generateDocx(template, docData, 'title')
zip.append(buff, { name: `${index}.docx` })
})
zip.finalize()
console.log(zip)
const file = bucket.file("pliki.zip") // nazwa do zmiany
file.save(zip, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
res.sendStatus(201)
} catch (error) {
console.log(error)
res.send(error)
}
})
module.exports = router

NodeJS Stream badly returned via async/await

I am trying to check if an image exists in a folder.
If it exists I want to pipe its stream to res (I'm using Express)
If it does not exist I want to do another thing.
I created an async function that is supposed to either return the image's stream if it exists or false if it doesn't.
I get a stream when I do it but I get an infinite load on the browser, as if there was an issue with the stream.
Here is the minimal reproduction I could have :
Link to runnable code
const express = require('express');
const path = require('path');
const fs = require('fs');
const app = express();
app.get('/', async (req, res) => {
// Check if the image is already converted by returning a stream or false
const ext = 'jpg';
const imageConvertedStream = await imageAlreadyConverted(
'./foo',
1,
'100x100',
80,
ext
);
// Image already converted, we send it back
if (imageConvertedStream) {
console.log('image exists');
res.type(`image/${ext}`);
imageConvertedStream.pipe(res);
return;
} else {
console.log('Image not found');
}
});
app.listen(3000, () => {
console.log('Server started on port 3000');
});
async function imageAlreadyConverted(
basePath,
id,
size,
quality,
extWanted
) {
return new Promise(resolve => {
// If we know the wanted extension, we check if it exists
let imagePath;
if (extWanted) {
imagePath = path.join(
basePath,
size,
`img_${id}_${quality}.${extWanted}`
);
} else {
imagePath = path.join(basePath, size, `img_${id}_${quality}.jpg`);
}
console.log(imagePath);
const readStream = fs.createReadStream(imagePath);
readStream.on('error', () => {
console.log('error');
resolve(false);
});
readStream.on('readable', () => {
console.log('readable');
resolve(readStream);
});
});
}
95% of my images will be available and I need performance, I suppose checking with fs.stats and then creating the stream is taking longer than trying to create the stream and handling the error.
The issue was with the "readable" event. Once I switched to the "open" event, everything is fine.

lwip.open doesn't work after mongoose findOne

So I have mongoose, multer and lwip (they are required from the top part).
var express = require('express');
var router = express.Router();
var mongoose = require('mongoose');
var jwt = require('jsonwebtoken');
var Users = require('../models/users.js');
var multer = require('multer');
var mime = require('mime');
var lwip = require('lwip');
If I comment the Users.findOne part, the image is cropped as I want it to be cropped. But if I uncomment it the lwip part stops working, though no errors are thrown. It just doesn't enter lwip.open().
router.post('/image', upload.single('file'), function (req, res) {
//This part works always.
Users.findOne({userid: req.body.userid}, function (err, user) {
var imgpath = req.file.path.split("public\\")[1];
user.user_photos.push(imgpath);
user.save(function (err) {
if (err)
console.log('error');
else
console.log('success');
});
});
//If I remove the top part, this part will work too.
lwip.open(req.file.path, function(err, image){
image.batch()
.crop(200,200)
.writeFile(req.file.path, function(err) {
if (err)
console.log('error');
else
console.log('success');
});
});
});
You might need my multer config too, so here it is:
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './public/uploads/')
},
filename: function (req, file, cb) {
var extension;
if (mime.extension(file.mimetype) == "jpeg") {
extension = "jpg";
}
else {
extension = mime.extension(file.mimetype);
}
cb(null, file.fieldname + '-' + Date.now() + '.' + extension);
}
});
Can you help me to figure out what the problem is here?
They're both async functions. How can you guarantee both are done before the function exits? I recommend using a promise library like Bluebird. With it you can run multiple async functions at the same time and specify what happens when they both return.
Don't forget to 'promisify' any libraries that are used that you want to treat as promises. You're code will look something like:
my route('blah', function (){
return Promise.all([ myfunct1, myfunct2], (retval) => { return {f1val: retval[1], f2val: retval[2]}})
I know some asshat is going to come along and take my answer and write out the code for you so that all you have to do is copy paste it, but I really do hope that you take the time to learn WHY and HOW it works if you do not already know.

Why gridfs get isn't working on file id (ObjectId) only by filename

I'm using nodejs mongodb mongoose and gridfs.
when I try to get a file by it's filname everthing is working great by if i want to get it by id i get
Error: The file you wish to read does not exist.
I the following code the console.log("res.pic_id : " + res.pic_id) i get the correct ObjectId.
Here's the code :
var GridFS = require('GridFS').GridFS;
var myFS = new GridFS('db');
var fs = require('fs')
var Profile = db.model('Profile');
Profile.findOne({'_id' : clientID},['_id', 'username','pic_id','pic_filename'],function(err, res){
if (err) {
console.log("ERROR serching user info: " + err);
callback(JSON.stringify(JSONRes(false, err)));
}
else {
if (res) {
console.log("res.pic_id : " + res.pic_id);
myFS.get(res.pic_id,function(err,data){
if (err)
console.log("ERROR "+err)
else {
callback(data);
}})
};
}
else {
callback(JSON.stringify(JSONRes(false, err)));
}
}
})
Thank you!
I had a similar problem. The issue turned out to be that I was using the string representation of an ObjectID instead of the real ObjectID. Instead of this:
var gridStore = new GridStore(db, '51299e0881b8e10011000001', 'r');
I needed to do this:
var gridStore = new GridStore(db, new ObjectID('51299e0881b8e10011000001'), 'r');
You have to either store it as a file name or object.id as primary key. The best way is to store it with ObjectID as an identifier and then add the filename to the metadata and query using that.
Look at the third example from the documentation (this is in the case of the native driver with lies under mongoose)
http://mongodb.github.com/node-mongodb-native/api-generated/gridstore.html#open
You can create a Mongodb Object using mongoose:
const mongoose = require('mongoose');
const fileId = new mongoose.mongo.ObjectId(req.params.id);`
Now you can get files and do whatever other stuff using gridfs and fileID, ex:
let gfs = Grid(mongoose.createConnection(mongoURI), mongoose.mongo);
app.get('URI', function(req, res){ //...
gfs.files.findOne({_id: fileId}, //callback...
)
})
That worked just fine for me.

Resources