I'm trying to generate multiple documents and pack them together to a zip and upload it to Google Cloud Storage via Firebase (using default bucket).
So far:
generating multiple documents works
packing them together doesn't work (I get empty zip and I'm using ArchiverJS)
uploading to the bucket (it's just empty) works
Seems like to upload a file I need to save it to temporary folder within Firebase Function, but I can't find any solution how to empty it after using. So I wanted to use streams, but then I was warned that it's a bad idea because checksum and such.
On the other hand .save() function let's save arbitrary data. It was specifically requested here, but really doesn't seem to work (at least for me).
Also, ArchiverJS seems to also let use of streams.
So, theoretically, it should all work nicely. But it's not so I hope someone else knows better.
const express = require('express')
var router = express.Router()
var archiver = require('archiver')
var admin = require("firebase-admin");
var serviceAccount = require("../servicekey.json")
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://myName.firebaseio.com",
storageBucket: "myName.appspot.com"
})
var bucket = admin.storage().bucket()
const {
convertCSVtoJSON,
generateDocuments,
generateDocx,
isCorrectTemplateFileType
} = require('./generateServices')
router.post('/', async (req, res) => {
try {
if(!isCorrectTemplateFileType(req.files.template))
return res.status(403).send({
message: 'Wrong file type. Please provide .docx file.'
})
const template = req.files.template.data
const data = await convertCSVtoJSON(req.files.data1)
let zip = archiver('zip')
zip.on('warning', function(err) {
console.log(err)
});
zip.on('error', function(err) {
res.status(500).send({error: err.message})
});
zip.on('entry', function(ars) {
// console.log(ars)
});
zip.on('end', function() {
console.log('Archive wrote %d bytes', zip.pointer())
});
// res.attachment('archive-name.zip')
// zip.pipe(output)
// zip.pipe(res)
data.forEach((docData, index) => {
let buff = generateDocx(template, docData, 'title')
zip.append(buff, { name: `${index}.docx` })
})
zip.finalize()
console.log(zip)
const file = bucket.file("pliki.zip") // nazwa do zmiany
file.save(zip, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
res.sendStatus(201)
} catch (error) {
console.log(error)
res.send(error)
}
})
module.exports = router
Related
I built a nodejs server to act as an adapter server, which upon receiving a post request containing some data, extracts the data from the request body and then forwards it to a few other external servers. Finally, my server will send a response consisting of the responses from each of the external server (success/fail).
If there's only 1 endpoint to forward to, it seems fairly straightforward. However, when I have to forward to more than one servers, I have to rely on things like Promise.All(), which has a fail-fast behaviour. That means if one promise is rejected (an external server is down), all other promises will also be rejected immediately and the rest the servers will not receive my data.
May be this ain't be the exact solution. But, what I am posting could be the work around of your problem.
Few days back I had the same problem, as I wanted to implement API versioning. Here is the solution I implemented, please have a look.
Architecture Diagram
Let me explain this diagram
Here in the diagram is the initial configuration for the server as we do. all the api request come here will pass to the "index.js" file inside the release directory.
index.js (in release directory)
const express = require('express');
const fid = require('./core/file.helper');
const router = express.Router();
fid.getFiles(__dirname,'./release').then(releases => {
releases.forEach(release => {
// release = release.replace(/.js/g,'');
router.use(`/${release}`,require(`./release/${release}/index`))
})
})
module.exports = router
code snippet for helper.js
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
module.exports = {
getFiles: (presentDirectory, directoryName) => {
return new Promise((resolve, reject) => {
//joining path of directory
const directoryPath = path.join(presentDirectory, directoryName);
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// console.log(files);
//handling error
if (err) {
console.log('Unable to scan directory: ' + err);
reject(err)
}
//listing all files using forEach
// files.forEach(function (file) {
// // Do whatever you want to do with the file
// console.log(file);
// });
resolve(files)
});
})
}
}
Now, from this index file all the index.js inside each version folder is mapped
Here is the code bellow for "index.js" inside v1 or v2 ...
const express = require('express');
const mongoose = require('mongoose');
const fid = require('../../core/file.helper');
const dbconf = require('./config/datastore');
const router = express.Router();
// const connection_string = `mongodb+srv://${dbconf.atlas.username}:${dbconf.atlas.password}#${dbconf.atlas.host}/${dbconf.atlas.database}`;
const connection_string = `mongodb://${dbconf.default.username}:${dbconf.default.password}#${dbconf.default.host}:${dbconf.default.port}/${dbconf.default.database}`;
mongoose.connect(connection_string,{
useCreateIndex: true,
useNewUrlParser:true
}).then(status => {
console.log(`Database connected to mongodb://${dbconf.atlas.username}#${dbconf.atlas.host}/${dbconf.atlas.database}`);
fid.getFiles(__dirname,'./endpoints').then(files => {
files.forEach(file => {
file = file.replace(/.js/g,'');
router.use(`/${file}`,require(`./endpoints/${file}`))
});
})
}).catch(err => {
console.log(`Error connecting database ${err}`);
})
module.exports = router
In each of this index.js inside version folder is actually mapped to each endpoints inside endpoints folder.
code for one of the endpoints is given bellow
const express = require('express');
const router = express.Router();
const userCtrl = require('../controllers/users');
router.post('/signup', userCtrl.signup);
router.post('/login', userCtrl.login);
module.exports = router;
Here in this file actually we are connecting the endpoints to its controllers.
var config = {'targets':
[
'https://abc.api.xxx',
'https://xyz.abc',
'https://stackoverflow.net'
]};
relay(req, resp, config);
function relay(req, resp, config) {
doRelay(req, resp, config['targets'], relayOne);
}
function doRelay(req, resp, servers, relayOne) {
var finalresponses = [];
if (servers.length > 0) {
var loop = function(servers, index, relayOne, done) {
relayOne(req, servers[index], function(response) {
finalresponses.push[response];
if (++index < servers.length) {
setTimeout(function(){
loop(servers, index, relayOne, done);
}, 0);
} else {
done(resp, finalresponses);
}
});
};
loop(servers, 0, relayOne, done);
} else {
done(resp, finalresponses);
}
}
function relayOne(req, targetserver, relaydone) {
//call the targetserver and return the response data
/*return relaydone(response data);*/
}
function done(resp, finalresponses){
console.log('ended');
resp.writeHead(200, 'OK', {
'Content-Type' : 'text/plain'
});
resp.end(finalresponses);
return;
}
It sounds like you are trying to design a reverse proxy. If you are struggling to get custom code to work, there is a free npm library which is very robust.
I would recommend node-http-proxy
I have posted link below, which will lead you directly to the "modify response", since you mentioned modification of the API format in your question. Be sure to read the entire page though.
https://github.com/http-party/node-http-proxy#modify-a-response-from-a-proxied-server
Note: this library is also very good because it can support SSL, and proxies to both localhost (servers on the same machine) and servers on other machines (remote).
Promise.all() from MDN
It rejects with the reason of the first promise that rejects.
To overcome the problem, you'll need to catch() each request you've made.
e.g.
Promise.all([
request('<url 1>').catch(err => /* .. error handling */),
request('<url 2>').catch(err => /* .. error handling */),
request('<url 3>').catch(err => /* .. error handling */)
])
.then(([result1, result2, result3]) => {
if(result1.err) { }
if(result2.err) { }
if(result3.err) { }
})
My issue is this:
I have made a call to someones web service. I get back the file name, extension and the "bytes". Bytes actually come in as an array and at position 0 "Bytes[0]" is the following string:
JVBERi0xLjYKJeLjz9MKMSAwIG9iago8PC9EZWNvZGVQYXJtczw8L0sgLTEvQ29sdW1ucyAyNTUwL1Jvd3MgMzMwMD4+L1R5cGUvWE9iamVjdC9CaXRzUGVyQ29tcG9uZW50IDEvU3VidHlwZS9JbWFnZS9XaWR0aCAyNTUwL0NvbG9yU3BhY2UvRGV2aWNlR3JheS9GaWx0ZXIvQ0NJVFRGYXhEZWNvZGUvTGVuZ3RoIDI4Mzc0L0hlaWdodCAzMzAwPj5zdHJlYW0K////////y2IZ+M8+zOPM/HzLhzkT1NAjCCoEY0CMJNAjCR4c8HigRhBAi1iZ0eGth61tHhraTFbraRaYgQ8zMFyGyGM8ZQZDI8MjMI8M6enp9W6enp+sadIMIIEYwy/ggU0wwgwjWzSBUmwWOt/rY63fraTVNu6C7R7pN6+v///20v6I70vdBaPjptK8HUQfX9/17D/TMet+l06T//0v3/S9v+r98V0nH///7Ff+Ed3/v16X9XX/S/KP0vSb//W88ksdW18lzBEJVpPXT0k9b71///...
The string example above has been cut off for readability.
How do I take that string and save it as a readable file?
This case it's a pdf.
let pdfBytes = '{String shown above in example}'
You can use the Node.js File System Module to save the received buffer.
Assuming the encoding of your data is base64:
const fs = require('fs');
let pdfBytes = 'JVBERi0xLjYKJeLjz9...'
let writeStream = fs.createWriteStream('filename.pdf');
writeStream.write(pdfBytes, 'base64');
writeStream.on('finish', () => {
console.log('saved');
});
writeStream.end();
I am using the fs file system here to create and save the file. I use a lot of try catch in case anything goes wrong. This example shows how you could pass the data to a function that could then create the file for you.
const util = require('util');
const fs = require('fs');
const fsOpen = util.promisify(fs.open);
const fsWriteFile = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
function saveNewFile(path, data) {
return new Promise((async (resolve, reject) => {
let fileToCreate;
// Open the file for writing
try {
fileToCreate = await fsOpen(path, 'wx');
} catch (err) {
reject('Could not create new file, it may already exist');
return;
}
// Write the new data to the file
try {
await fsWriteFile(fileToCreate, data);
} catch (err) {
reject('Error writing to new file');
return;
}
// Close the file
try {
await fsClose(fileToCreate);
} catch (err) {
reject('Error closing new file');
return;
}
resolve('File created');
}));
};
// Data we want to use to create the file.
let pdfBytes = 'JVBERi0xLjYKJeLj...'
saveNewFile('./filename.pdf', pdfBytes);
I am able to upload a file using openDownloadStream of GridFSBucket and see that the file is uploaded and visible under songs.files chunks. But for some reason, get the following error while trying to download it -
Caught exception: Error: FileNotFound: file def1.txt was not found
My code is -
var express = require('express');
var gridModule = express.Router();
var mongoose = require('mongoose');
var fs = require('fs');
gridModule.post('/', (req, res) => {
console.log("::::grid");
//const gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db);
//const writeStream = gridfs.openUploadStream('test.dat');
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
fs.createReadStream('./def.txt').
pipe(gridfs.openUploadStream('def1.txt')).
on('error', function (error) {
assert.ifError(error);
}).
on('finish', function () {
console.log('done!');
process.exit(0);
});
});
gridModule.get('/', (req, res) => {
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
/* var bucket = new mongodb.GridFSBucket(db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
}); */
gridfs.openDownloadStream('def1.txt').
pipe(fs.createWriteStream('./def1.txt')).
on('error', function(error) {
console.log(":::error");
assert.ifError(error);
}).
on('finish', function() {
console.log('done!');
process.exit(0);
});
});
module.exports = gridModule;
I tried using ObjectId id as well but same error. Anyone any guesses what I may be doing wrong here?
Note - Code may not seem optimized here like declaring bucket twice, kindly ignore it for now as I will correct it once it works.
According to the API doc here, in order to use filename as argument you should use
openDownloadStreamByName(filename, options)
not openDownloadStream. openDownloadStream takes id
of the file.
Another possible explanation for this, if you're already calling openDownloadStream and still experiencing the FileNotFound error, and you are 100% the id is correct, is that you didn't pass an ObjectId type.
In my case, I was passing an id string instead of an id as an ObjectId.
bucket.openDownloadStream(mongoose.Types.ObjectId(id));
vs
bucket.openDownloadStream(id);
I am trying to check if an image exists in a folder.
If it exists I want to pipe its stream to res (I'm using Express)
If it does not exist I want to do another thing.
I created an async function that is supposed to either return the image's stream if it exists or false if it doesn't.
I get a stream when I do it but I get an infinite load on the browser, as if there was an issue with the stream.
Here is the minimal reproduction I could have :
Link to runnable code
const express = require('express');
const path = require('path');
const fs = require('fs');
const app = express();
app.get('/', async (req, res) => {
// Check if the image is already converted by returning a stream or false
const ext = 'jpg';
const imageConvertedStream = await imageAlreadyConverted(
'./foo',
1,
'100x100',
80,
ext
);
// Image already converted, we send it back
if (imageConvertedStream) {
console.log('image exists');
res.type(`image/${ext}`);
imageConvertedStream.pipe(res);
return;
} else {
console.log('Image not found');
}
});
app.listen(3000, () => {
console.log('Server started on port 3000');
});
async function imageAlreadyConverted(
basePath,
id,
size,
quality,
extWanted
) {
return new Promise(resolve => {
// If we know the wanted extension, we check if it exists
let imagePath;
if (extWanted) {
imagePath = path.join(
basePath,
size,
`img_${id}_${quality}.${extWanted}`
);
} else {
imagePath = path.join(basePath, size, `img_${id}_${quality}.jpg`);
}
console.log(imagePath);
const readStream = fs.createReadStream(imagePath);
readStream.on('error', () => {
console.log('error');
resolve(false);
});
readStream.on('readable', () => {
console.log('readable');
resolve(readStream);
});
});
}
95% of my images will be available and I need performance, I suppose checking with fs.stats and then creating the stream is taking longer than trying to create the stream and handling the error.
The issue was with the "readable" event. Once I switched to the "open" event, everything is fine.
I'm having some troubles trying to figure out how to prevent Multer from uploading a file if an entry has already been added to the database.
In my scenario, I have a multipart form where you can add beers specifying an id. It has 2 inputs, one with a text input (id) and another one with a file input (beerImage).
What I'd like to do is, before uploading a file, check if it already exist in the database and, if not, upload the file. Currently, my code is uploading the file first and then checking if it exists, and that's a bad thing!
That's my code:
var express = require('express');
var router = express.Router();
var multer = require('multer');
var database = require('../services/database');
var upload = multer({ dest: 'uploads/' });
var cpUpload = upload.fields([{ name: 'beerImage', maxCount: 1 } ]);
router.route('/beers')
.post(function (req, res, next) {
// I'd like to use req.body.id in the query here, but it doesn't exist yet!
cpUpload(req, res, function (err) {
if (err) {
return next(new Error('Error uploading file: ' + err.message + ' (' + err.field + ')'));
} else {
database.getConnection().done(function (conn) {
conn.query('SELECT COUNT(*) AS beersCount FROM beers WHERE id=?', req.body.id, function (err, rows, fields) {
if (err) {
conn.release();
return next(err);
}
if (rows[0].beersCount > 0) {
conn.release();
return next(new Error('Beer "' + req.body.id + '" already exists!'));
} else {
delete req.body.beerImage;
conn.query('INSERT INTO beers SET ?', req.body, function (err, rows, fields) {
conn.release();
if (err) {
return next(err);
}
res.json({ message: 'Beer "' + req.body.id + '" added!' });
});
}
});
});
}
});
});
module.exports = router;
I can't find a way to first "parse" the multipart form data (to be able to do the query and check if it exists using req.body.id) and then decide if I want to upload the file or not. It seems that "cpUpload" does both things at the same time!
Any idea of how to get "req.body.id" first (to do the query) and then decide if I want to upload the file?
I realized there is a function called "fileFilter" on Multer where you can control which files are accepted. Here I've been able to do the desired query to check if the entry exists.
You have to upload the file before testing if it exists. And then save it or drop it
This is how I achieved file fields and text fields validation of multipart/formdata before uploading to the cloud. My filterFile function inside multer looks like this:
Note any data sent using postman's formdata is considered as multipart/formdata. You have to use multer or other similar library in order to parse formdata.
For x-www-form-urlencoded and raw data from postman does not require multer for parsing. You can parse these data either using body-parser or express built-in middlewares express.json() and express.urlencoded({ extended: false, })
filterFile = async (req, file, cb) => {
// if mimetypes are expected mimetypes
const allowedMimeTypes = ["image/jpeg", "image/jpg", "image/png"];
if (allowedMimeTypes.includes(file.mimetype)) {
// then validate formdata fields
// reject the request with error if any validation fails
try {
let { highlights, specifications, ...rest } = req.body;
// deserializing string into respective javascript object(here into an array datatype) to facilate validation
highlights = JSON.parse(req.body.highlights);
specifications = JSON.parse(req.body.specifications);
// constructing deserialized formdata
const formdata = { highlights, specifications, ...rest };
// using Joi for validation
let skuAndProductResult = await skuAndProductSchema.validateAsync(
formdata
);
if (skuAndProductResult) {
// accept the fields and proceed to the next middleware
cb(null, true);
return;
}
// reject the request with error
cb(true, false);
return;
} catch (err) {
// reject the request with error
cb(err, false);
}
} else {
// To reject this file pass `false`
cb(`${file.mimetype} is not supported`, false);
}
};