Node download and save gif - node.js

I want to download a gif from the net and save it in local drive.
So far I have:
1) getFile function that returns the gif as Buffer:
import fs from 'fs'
import https from 'https'
let url = '//some gif url'
const getFile = function(url) {
return new Promise((res, rej) =>{
https.get(url, (resp) => {
let data = [];
resp.on('data', (chunk) => {
data.push(chunk);
});
resp.on('end', () => {
var buffer = Buffer.concat(data);
res(buffer)
});
}).on("error", (err) => {
rej(err);
});
})
}
2) Saving function:
async function save() {
let data = await getFile(url)
fs.writeFile('./test.gif', data, (e, r) => {
console.log(e,r) // null, undefined
})
}
save()
The file is saved and have contents but when i try to open it it says: Could not load image....
I tried to convert the output as utf8 encoding but it gives me the same result.
NOTE: I want to make it work with raw nodejs without 3-party tools or "pipe" to understand the working parts.
I guess the problem is with encoding but i could't figure what exactly it is.
Thanks

Related

Using fs.read inside promise does not work

I am trying to do a fs.read after the promise job is done by using the .then()
Here is how my code looks like
(async () => {
const feed = await parser.parseURL('https://www.nasa.gov/rss/dyn/breaking_news.rss');
console.log(feed.title);
const items = [];
await Promise.all(feed.items.map(async (currentItem) => {
// some code here to create data
items.push(data);
})).then(
items.forEach((element) => {
const file = downloadFile(element.url);
let checksumValue;
try {
fs.readFileSync(file, (_err, data) => {
checksumValue = generateChecksum(data);
console.log(`The checksum is: ${checksumValue}`);
// Delete the downloaded file
deleteFile(file);
});
} catch (error) {
console.error(error);
// expected output: ReferenceError: nonExistentFunction is not defined
// Note - error messages will vary depending on browse
}
})(),
);
})();
But it doesn't operate this piece of code :
fs.readFileSync(file, (_err, data) => {
checksumValue = generateChecksum(data);
console.log(`The checksum is: ${checksumValue}`);
// Delete the downloaded file
deleteFile(file);
});
How should I read the file?
fs.readFileSync is sync, so it doesn't take a callback.
Either use the non-sync version:
fs.readFile(file, (_err, data) => {
checksumValue = generateChecksum(data);
console.log(`The checksum is: ${checksumValue}`);
// Delete the downloaded file
deleteFile(file);
});
or use it as intended:
const data = fs.readFileSync(file);
checksumValue = generateChecksum(data);
console.log(`The checksum is: ${checksumValue}`);
// Delete the downloaded file
deleteFile(file);

Converting HTML to PDF buffer in Nodejs

I am trying to convert an HTML code that is returned by the "returnDefaultOfferLetter" function here into PDF buffer(that I will use for sending attachments in a mail) using html-pdf package. So, the problem is it works on localhost but on AWS elastic beanstalk server it throws me ASSERTION ERROR. So after some research, I got to know I need to specify phantomPath. I tried everything I could, but I haven't got any solution.
BTW one week before it was working on AWS, so don't know what's wrong now. Help me in finding some solution or suggest me any method or package to convert HTML into pdf BUFFER. (Please, don't ignore buffer)
const htmlToBase64Pdf = (req, res) => {
const promise = new Promise((resolve, reject) => {
const offerLetterHTML = returnDefaultOfferLetter(req.body).toString(
"utf8"
);
const pdfOptions = {
format: "A3",
phantomPath: "../../node_modules/phantomjs-prebuilt/bin/phantomjs",
};
pdf.create(offerLetterHTML, pdfOptions).toBuffer(function (
err,
buffer
) {
if (err) {
// console.log("err", err);
reject(err);
} else {
// console.log("buffer", buffer);
const base64Attachment = buffer.toString("base64");
resolve(base64Attachment);
}
});
});
promise
.then((resp) => res.send(resp))
.catch((e) => {
res.send(e);
});
};

How to write unit tests for PDF Kit for node js?

I'm writing a pdf generator using https://github.com/foliojs/pdfkit inside a hapi server route, I have successfully implemented it but I'm struggling on writing the unit tests for it.
I've googled this but I have not had found any answer yet and package's documentation doesn't mention anything about testing.
Anyone have written unit test for his library?
Thanks for your help y'all!
Edit: The focus of the test is how 'doc', an instance of pdfkit, works here is the function i want to test, the functions that are not added are just separation of some repetitive pdf code
const generatePdf = async (request, h) => {
return new Promise(async (resolve, reject) => {
try {
const doc = initPdf();
const {curated, address, photo} = await dataHandler(request, h);
// Title page
doc.font('Lato-Semibold')
.fontSize(16)
.text(
`${address.address1}, ${address.city} ${address.stateProvince} ${address.postalCode}`,
{align: 'center'}
);
doc.fontSize(14).text(' ');
if (photo.base64Photo) {
handleImage(doc, photo.base64Photo);
}
// Others page(s)
doc.addPage();
renderSections(doc, curated.others);
// Amenities page(s)
doc.addPage();
doc.font('Lato-Semibold')
.fontSize(26)
.text(
await getTranslatedString(
locale,
`px-hospitality.amenity.category.AMENITIES`
),
{align: 'center'}
);
// Hack to add a space between text and next header
doc.fontSize(14).text(' ');
renderSections(doc, curated.amenities);
generatePageNumbers(doc);
// write to PDF
const chunks = []; // contains the base64 string
doc.on('data', (chunk) => {
chunks.push(chunk);
});
doc.on('end', () => {
// the stream is at its end, so push the resulting base64 string to the response
const result = Buffer.concat(chunks);
resolve({code: 200, result});
});
doc.on('error', (err) => {
reject({code: 500, error});
});
doc.end(); // will trigger the stream to end
} catch (error) {
reject({code: 500, error});
}
});
};

How do decode base64 file when reading from GridFS via Node?

I'm trying to read a file encoded in base64 from a MongoDB GridFS collection using Node. I have been able to get the file saved from MongoDB to my local machine, but it's in base64 format and I want to save it unencoded.
Ideally I would like to decode the file "on-the-fly" without having to save once, to then read > decode > write it back to the filesystem.
My code currently looks like this...
return new Promise(async (resolve, reject) => {
let bucket = new mongodb.GridFSBucket(db, {bucketName: 'Binaries'});
let objectID = new mongodb.ObjectID(fileID);
// create the download stream
bucket.openDownloadStream(objectID)
.once('error', async (error) => {
reject(error);
})
.once('end', async () => {
resolve(downloadPath);
})
// pipe the file to the stream
.pipe(fs.createWriteStream(downloadPath));
});
Any ideas?
Just in case anyone else is looking at this, here's where I landed...
return new Promise(async (resolve, reject) => {
let bucket = new mongodb.GridFSBucket(db, {
bucketName: 'Binaries'
});
let objectID = new mongodb.ObjectID(fileInformation._id);
// temporary variable to hold image
var data = [];
// create the download stream
let downloadStream = bucket.openDownloadStream(objectID);
downloadStream.on('data', (chunk) => {
data.push(chunk);
});
downloadStream.on('error', async (error) => {
reject(error);
});
downloadStream.on('end', async () => {
// convert from base64 and write to file system
let bufferBase64 = Buffer.concat(data)
let bufferDecoded = Buffer.from(bufferBase64.toString(), 'base64');
fs.writeFileSync(fileName, bufferDecoded, 'binary');
resolve(fileName);
});
});
Node has a built-in buffer parser Buffer.from(string[, encoding]) that you can pass the base64 encoded string to it, and get a stream of bytes from the other side, which you can convert it .toString() easily afterwards.
Ex.
let whatYouNeed = Buffer.from(gridFsData, 'base64').toString();
More about Buffer.from() function here.

Nodejs download binary octet-stream

I am trying to download (meaning create an instance of the file on the server) a .pdf file from a server that returns it to me in binary format, with:
Content-Type = application / octet-stream.
After a bit of online research I came to write:
http.get(url.parse(pdfURL), res => {
let data = [];
console.log(res.statusCode);
res.on('data', chunk => {
data.push(chunk);
}).on('end', () => {
let buffer = Buffer.concat(data);
console.log(buffer.toString('base64'));
fs.open(path, 'w', (e, fd) => {
if (e) throw e;
fs.write(fd, buffer, 0, buffer.length, null, e => {
if (e) throw e;
fs.close(fd, () => console.log('Wrote successfully'));
});
});
});
});
Everything works properly, but when I try to open the generated pdf, it tells me that the file is corrupt and not readable.
Any idea what might have been wrong?
Thanks
Edit:
I noticed that with postman everything works as it should, so I think the way I treat the binary is wrong
Ok, i got it,
I wasn't de-gzipping the response, now works properly
This didn't work for me, tried so many different ways until I found got, an npm library that handles http requests, here's what worked for me:
const stream = require('stream');
const { promisify } = require('util');
const fs = require('fs');
const got = require('got');
const pipeline = promisify(stream.pipeline);
async function downloadImage(url, name) {
await pipeline(
got.stream(url),
fs.createWriteStream(name)
);
}
More info here: https://bleext.com/post/downloading-images-with-nodejs

Resources