How to make express download link with GridFsBucket? - node.js

As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?

I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);

Related

node.js save http response as pdf file

I have AWS Lambda function that return pdf file like arr.
I want to call function and save pdf file, but after saving I can't open it, it brocken. I cant undestand why, i tried differente ways to create pdf, by the way i can get arr and using online converter convert arr to file, and its work, but when i create pdf file using node code its always broken. I tried internal node moduls like fs, and external like pdfkit.
`const https = require('https');
const fs = require('fs');
const PDFDocument = require('pdfkit');
const options = {
host: 'uek9w0hztc.execute-api.eu-north-1.amazonaws.com',
path: '/pdfmaker',
method: 'POST',
headers: {
url: 'https://www.linkedin.com'
}
}
const req = https.request(options, res => {
let data = [];
const headerDate = res.headers && res.headers.date ? res.headers.date : 'no response date';
console.log('Status Code:', res.statusCode);
console.log('Date in Response header:', headerDate);
res.on('data', chunk => {
data.push(chunk);
});
res.on('end', () => {
console.log('Response ended: ');
// fs.writeFileSync('index.pdf', Buffer.from(data));
// fs.writeFileSync("index_v2.pdf", Buffer.from(data), 'binary', (err) => {
// if(err) {
// return console.log(err);
// }
// console.log("The file was saved!");
// });
// const doc = new PDFDocument();
// doc.pipe(fs.createWriteStream('output.pdf'));
let writeStream = fs.createWriteStream('pdf123.pdf')
writeStream.once('open', (fd) =>{
writeStream.write(new Buffer.from(data, 'binary'))
writeStream.on('finish', () => {
console.log('wrote all data to file');
});
writeStream.end()
})
});
}).on('error', err => {
console.log('Error: ', err.message);
});
req.end();`
I tried internal node moduls like fs, and external like pdfkit.
I expect someone give me a hint where the problem is.

Write excel file to file stream with nodejs and download it with api call in browser

I am stuck at problem on how to write excel file to filestream and download it in browser.. I can only create new file in server but this is not what I want.. I don't want to create in on server (ok if it must be created then i also want to delete it when user downloads it in browser).
But I can't achieve the download..
So the general idea is that I read the csv file, than parse the data.
I also read a template Excele file which I overwrite and write it to the file stream. When I call the get API, then I can the download starts (I will integrate it in Angular app later)..
I am using Exceljs npm package.
I don't have any errors but code is not working as I want
I uploaded whole code in github so you can easily see the code and duplicate my code.
https://github.com/zigax1/mean-generate-download-excel/tree/master
My excel-builder script:
export const generateExcel = async (req: Request, res: Response) => {
try {
await csvParse();
res.setHeader("Content-disposition", `attachment;`);
res.contentType(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
);
return res.status(200).json("Success");
} catch (err) {
return res.status(500).json("False");
}
};
const csvParse = async () => {
fs.createReadStream("./content/TestCsv.csv")
.pipe(csv.parse())
.on("error", (error: any) => console.log("Error"))
.on("data", (row: any) => {
let line: any = String(row);
line = line.split(";");
//let parsedData = line[0];
let parsedData = line;
allParsedData.push(parsedData);
})
.on("end", (rowCount: any) => {
let test = allParsedData.toString();
generateFile(test);
});
};
const generateFile = (data: any) => {
return new Promise<fs.ReadStream>((resolve, reject) => {
const workbook = new Excel.Workbook();
workbook.xlsx.readFile("./utilities/template.xlsx").then(() => {
workbook.xlsx.writeFile("./content/Test.xlsx").then(
() => {
let stream = fs.createReadStream("./content/Test.xlsx");
stream.on("close", () => {
fs.unlink("./content/Test.xlsx", (error) => {
if (error) {
throw error;
}
});
});
resolve(stream);
},
(err) => {
throw err;
}
);
});
});
};
Thanks to everyone!
const csv = require('fast-csv');
const fs = require('fs');
function exportCSVFile(res, path, data) {
const ws = fs.createWriteStream(path + ".csv");
ws.on("finish", function () {
res.download(path + ".csv", () => {
fs.unlinkSync(path + ".csv");
});
});
csv.write(data, {headers: true}).pipe(ws);
}
You use this export csv function for your response

createReadStream directly from url

How can i read a file from https://storage.googleapis.com without download ?
let url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
async function processLineByLine(url) {
try {
const rl = createInterface({
input: createReadStream(url),
crlfDelay: Infinity
});
rl.on("line", (line) => {
console.log(line);
});
await once(rl, "close");
console.log("File processed.");
} catch (err) {
console.error(err);
}
}
You could stream the file using got and then use ndjson as the file is json in each line.
const ndjson = require('ndjson')
const etl = require("etl");
const got = require("got");
const url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
function getStream() {
return got.stream(url);
}
getStream()
.pipe(ndjson.parse())
.pipe(etl.map(data => {
// do something with data
console.log(data);
return data;
}))
.promise()
.then(() => console.log("done"))
.catch(err => console.error(err));

cloud function reads file but it's not returning the content on the client side

I am using the code below to read a json file in google firebase storage and then return the content of the file. The code works but all I am getting on the client side is null
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath
const bucketname = data.bucket
const remoteFile = bucket.file("storeid.json");
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {console.log(err)})
.on('data', function(response) {
buffer += response
console.log(buffer)
})
.on('end', function() {
//console.log(buffer);
console.log("FINISHED!!")
})
return buffer
})
this is my client side js call
function getUpdatedCatalogue(){
var getStorageData = firebase.functions().httpsCallable('updateProductCatalogue');
var callData = {
"bucket":"test"
}
getStorageData(callData).then(function(result){
console.log(result)
}).catch(function(error){
console.log(error)
})
}
The cloud console.log is showing that the content is read and shown in log but client side console.log is returning null. Here is the file file i am reading.
Why am I not getting the file content returned and displayed on client side? how can I fix this?
The problem is that you're returning the buffer before the stream finishes reading the file.
Try this (not tested),
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath;
const bucketname = data.bucket;
const remoteFile = bucket.file("storeid.json");
return new Promise(resolve, reject) => {
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {
console.log(err);
reject(err);
})
.on('data', function(response) {
buffer += response;
console.log(buffer);
})
.on('end', function() {
console.log("FINISHED!!")
resolve(buffer);
});
});
});

Wait for response from node.js request using await

Yes, I have seen many other questions and answers. I know I need to use a callback response. However, I still don't get how to do this particular example. Most examples involve a callback response that logs something or the post has hundreds of different answers.
How do I return the request response from getPageData?
var url = "myurl";
var name = await getPageData(url);
// wait until I get name and then do stuff with name
function getPageData(url)
{
const https = require('https');
https.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
var name = JSON.parse(data);
// what do I do here to get name out?
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
await can only be used in async functions. You can however return a promise from getPageData and "await" using chained then:
Use the Promise object:
const https = require('https');
var url = "myurl";
var name;
getPageData(url)
.then(data => { name = data; /*This is the scope in which you would use name*/ })
.catch(err => { console.log('Error occured', err); });
async function getPageData(url) {
return new Promise((resolve, reject) => {
https
.get(url, resp => {
let data = '';
resp.on('data', chunk => {
data += chunk;
});
resp.on('end', () => {
const name = JSON.parse(data);
// what do I do here to get name out?
resolve(name);
});
})
.on('error', err => {
console.log(`Error: ${err.message}`);
reject(err);
});
});
}
The higher level solution here is to use a module for making http requests that already supported promises. You can see a list of many of them here. My favorite from that list is got() and you can use it to solve your problem like this:
function getPageData(url) {
return got(url);
}
// can only use await inside a function declared as async
async function someFunction() {
try {
let name = await getPageData(url);
console.log(name);
} catch(e) {
console.log(e);
}
}
The got() library does a whole bunch of things for you.
It is entirely based on promises so you can directly use await on the promise it returns.
It collects the whole response for you (you don't have to write your own code to do that).
If the response is JSON, it automatically parses that for you and resolves to the parsed Javascript object.
It automatically detects http or https URL and uses the right low level module.
And, it has dozens of other useful features (not needed in this example).
Or, if you want the lower level solution where you make your own promisified function for doing an https request, you can do this:
const https = require('https');
// can only use await inside a function declared as async
async function someFunction() {
const url = "myurl";
try {
let name = await getPageData(url);
console.log(name);
} catch(e) {
console.log(e);
}
}
function getPageData(url) {
return new Promise((resolve, reject) => {
https.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
try {
const name = JSON.parse(data);
resolve(name);
} catch(e) {
// JSON parsing error
reject(e);
}
});
}).on("error", (err) => {
console.log("Error: " + err.message);
reject(err);
});
}).on('error', (err) => {
console.log("Error: " + err.message);
reject(err);
});
}

Resources