Firebase + Google cloud storage: Handling files of 1MB+ without stalling - node.js

I've run into a curious problem:
Basically I'm making a Firebase Cloud function where when someone uploads a file then that file is sent via email or API to somewhere else.
Everything works for tiny files (100K-ish), if slow, but anything above 1MB (haven't tested the exact size) stalls. Doesn't give any errors in the Firebase log, the function just never completes.
Here's the relevant code:
const Storage = require('#google-cloud/storage')({
projectId: 'bilagskortet',
keyFilename: './service-account-key.json'
});
const returnBase64 = (fileFullPathAndName) => {
console.log("Fetching file...")
// Downloads the file
return Storage
.bucket(bucketName)
.file(fileFullPathAndName)
.download()
.then((data) => {
const file = {};
file.file = new Buffer(data[0]).toString('base64');
return file;
})
.catch((error) => {
console.error("Didn't get file:", error);
});
}
This is used together with two other Promises to get everything about the file needed for the email:
Promise
.all([
StorageFile.returnDownloadURL(attachementRef)
.then(url => {
console.log("AttachmenetLink is: ")
console.log(typeof url);
console.log(url);
email.attachementLink = url
})
.catch(error => {
console.error("Error, didn't get link: ")
console.error(error)
}),
StorageFile.returnMetaData(attachementRef)
.then(metadata => {
console.log("Content type:")
console.log(metadata.contentType);
file.contentType = metadata.contentType;
})
.catch(error => {
console.error("Didn't get the metadata")
console.error(error);
}),
StorageFile.returnBase64(attachementRef)
.then(data => {
console.log("File is: ")
console.log(typeof data);
console.log(data);
file.data = data.file;
})
.catch(error => {
console.error("Error, didn't get file: ")
console.error(error)
})
])
.then(allData => {
// Define and send email with attachement (cut for brevity)
}).catch(error =>
console.error(error)
)
As I've said, the code works well if the file is tiny. Times out and dies if the file is for example a 1.7MB image (.png)
Anyone know what might be going on?
Last thing that's logged is the "AttachmentLink" and "Content Type" ones, and last thing in the StorageFile.returnBase64 function is "Fetching file..."

Related

Node JS - createWriteStream

I am going crazy trying to fix this bug so please help :-)
I am using https://pdfkit.org/
This creates a stream that when finished is piped to fs.createWriteStream
My issue is the first time the code runs this works and the PDF is generated.
The next time the Code runs a file with Zero Bytes is created.
I am calling the function from an API running on express.
The issue appears to be the async nature of fs.createWriteStream.
The stream finishes after the API has returned. I cannnot seem to find a way to block while confirming the file has been created.
What is odd is that the first time the code works run again it fails:
Here is the Pipe Function;
async function _writeFile(fileObj) {
let fileStream = fs.createWriteStream(fileObj.fileName)
pipeline(
doc,
fileStream,
async (err) => {
if (err) {
console.error('PDF failed', err);
return ('Pipeline failed', err)
} else {
console.log('PDF succeeded');
}
}
)
}
This is called from:
exports.drawReport = async (payload) => {
var date = new Date();
const timeStamp = date.toJSON();
let path = './controllers/tmp/'
var fileName = path + timeStamp + '.' + payload.type + '.pdf'
try {
// Start Report
await _startReport(payload)
// Check Starting position on page & add status box header
if (device_card_reference == 260) {
await _deviceTitle(payload);
}
// Add Devices
await _reportDevice(payload);
// Call Footer for final page
await _reportFooter()
console.log("PDF Done - Writing File")
// File Meta Data
let fileObj = {
type: payload.type,
siteId: payload.siteId,
fileName: fileName,
timeStamp: timeStamp
}
// Create file to store PDF
await _writeFile(fileObj)
doc.end()
console.log("PDF MADE?")
return (fileObj)
} catch (err) {
console.error('MakePDF ERROR: ' + err.message);
return (err.message)
}
}
pipeline runs asynchronously, so it's not awaited, which is why doc.end() runs before the file is done
try wrapping pipeline in a promise, and then resolve when the stream is done:
// function that returns a promise
function _writeFile(fileObj) {
return new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(fileObj.fileName);
pipeline(
doc,
fileStream,
async(err) => {
if (err) {
console.error('PDF failed', err);
// err, handle in `.catch`
reject({res:'Pipeline failed', err});
} else {
console.log('PDF succeeded');
// done, resolve, to move to doc.end
resolve('PDF succeeded');
}
}
)
});
}
add .catch() to handle error:
// Create file to store PDF
await _writeFile(fileObj).catch(err => console.log(err));
or even better, use stream promises API
const {pipeline } = require('stream/promises');
async function _writeFile(fileObj) {
const fileStream = fs.createWriteStream(fileObj.fileName);
await pipeline(doc, fileStream);
console.log('PDF succeeded');
}

Node.js ''Skips'' a line of code?? [screenshot]

i am making a script that will take a picture of my desktop (for server disk stuff) but it doesn't take the picture. But it will when i create another script and run that.
const screenshot = require('screenshot-desktop')
const Discord = require('discord.js');
const client = new Discord.Client();
client.on('message', msg => {
if (msg.content === '!Photo') {
screenshot({ filename: 'C:/Users/MyName/Desktop/TestPictureFile.png' })
Bot.sendFile(`C:/Users/MyName/Desktop/TestPictureFile.png`);
}
});
If i only put screenshot({ filename: 'C:/Users/MyName/Desktop/TestPictureFile.png' }) In another script, it works fine. But it doesn't when i try the code above this one.
Whenever a function writes/reads a file (or query database) it is most likely asynchronous which means you need to wait for it to write/read the file then continue.
As you can see in the docs https://www.npmjs.com/package/screenshot-desktop#usage
screenshot() returns a Promise so you have 2 options.
First, call Bot.sendFile in the then function like so
client.on('message', msg => {
if (msg.content === '!Photo') {
screenshot({ filename: 'C:/Users/MyName/Desktop/TestPictureFile.png' }).then(function(){
Bot.sendFile(`C:/Users/MyName/Desktop/TestPictureFile.png`);
})
}
});
or use await like so
client.on('message', msg => {
if (msg.content === '!Photo') {
await screenshot({ filename: 'C:/Users/MyName/Desktop/TestPictureFile.png' })
Bot.sendFile(`C:/Users/MyName/Desktop/TestPictureFile.png`);
}
});

Converting HTML to PDF buffer in Nodejs

I am trying to convert an HTML code that is returned by the "returnDefaultOfferLetter" function here into PDF buffer(that I will use for sending attachments in a mail) using html-pdf package. So, the problem is it works on localhost but on AWS elastic beanstalk server it throws me ASSERTION ERROR. So after some research, I got to know I need to specify phantomPath. I tried everything I could, but I haven't got any solution.
BTW one week before it was working on AWS, so don't know what's wrong now. Help me in finding some solution or suggest me any method or package to convert HTML into pdf BUFFER. (Please, don't ignore buffer)
const htmlToBase64Pdf = (req, res) => {
const promise = new Promise((resolve, reject) => {
const offerLetterHTML = returnDefaultOfferLetter(req.body).toString(
"utf8"
);
const pdfOptions = {
format: "A3",
phantomPath: "../../node_modules/phantomjs-prebuilt/bin/phantomjs",
};
pdf.create(offerLetterHTML, pdfOptions).toBuffer(function (
err,
buffer
) {
if (err) {
// console.log("err", err);
reject(err);
} else {
// console.log("buffer", buffer);
const base64Attachment = buffer.toString("base64");
resolve(base64Attachment);
}
});
});
promise
.then((resp) => res.send(resp))
.catch((e) => {
res.send(e);
});
};

Copy file to NFS NAS using nodejs with read/write stream wrapped in promise

I am trying to copy a file to a NFS mounted directory using nodejs (basic working code below). While the file is successfully copied to the NFS mount, the promise chain breaks without throwing any error (nothing else in the promise chain executes). However, the web server shows a 502 bad gateway indicating that some error has occurred.
As basic precautions, I have ensured that the UID and GID of the user on the NAS are the same for the client.
const fs = require('fs');
function myFunction(fileToCopy) {
return Promise.resolve()
.then(() => new Promise((resolve, reject) => {
let newFileName = uuid() + '.txt';
let fileReadStream = fs.createReadStream(fileToCopy),
fileWriteStream = fs.createWriteStream('/mnt/' + newFileName, { flags: 'w+', mode: 0o664 });
fileReadStream
.pipe(
fileWriteStream,
{end: false}
);
fileReadStream
.on('end', () => {
console.log('end event copying file (logged)');
fileWriteStream.end();
})
.on('error', error => {
console.error('error reading image stream (not logged)');
fileWriteStream.end();
reject(error);
});
fileWriteStream
.on('finish', () => {
console.log('finished copying file (logged)');
resolve(fullFilename);
})
.on('error', error => {
console.log('error writing file (not logged)');
reject(error);
})
}))
}
Promise.resolve()
.then(() => {
let array = ['file1', 'file2', 'file3'];
return Promise.all(
array.map(file => myFunction(file))
);
})
.then(() => console.log('never gets logged'))
.catch(error => console.error('error never gets logged'));
Edit 1: As a bit of additional information, if the NFS NAS directory is unmounted, leaving /mnt on the local system, the above code works (the promise chain finishes, logging never gets logged).
Edit 2: modified code to include forced 'end' event and added some additional code to the minimum working example to show more of the context.

Upload synthesized speech from firebase function node.js server's tmp directory

I am trying to upload the audio returned by Google's Text-to-Speech API in a Firebase Function and having trouble writing the audio file to the Node.js server's temp directory. I receive the following error in my functions log:
Write ERROR: { Error: ENOENT: no such file or directory, open '/tmp/synthesized/output.mp3' at Error (native) errno: -2, code: 'ENOENT', syscall: 'open', path: '/tmp/synthesized/output.mp3' }
Here's my imports:
// Cloud Storage
import * as Storage from '#google-cloud/storage';
const gcs = new Storage();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as fs from 'fs';
import * as fse from 'fs-extra';
// Cloud Text to Speech
import * as textToSpeech from '#google-cloud/text-to-speech';
const client = new textToSpeech.TextToSpeechClient();
...and the part of my function I'm having trouble with:
// Construct the text-to-speech request
const request = {
input: { text: text },
voice: { languageCode: 'en-US', ssmlGender: 'NEUTRAL' },
audioConfig: { audioEncoding: 'MP3' },
};
// Creat temp directory
const workingDir = join(tmpdir(), 'synthesized');
const tmpFilePath = join(workingDir, 'output.mp3');
// Ensure temp directory exists
await fse.ensureDir(workingDir);
// Performs the Text-to-Speech request
client.synthesizeSpeech(request)
.then(responses => {
const response = responses[0];
// Write the binary audio content to a local file in temp directory
fs.writeFile(tmpFilePath, response.audioContent, 'binary', writeErr => {
if (writeErr) {
console.error('Write ERROR:', writeErr);
return;
}
// Upload audio to Firebase Storage
gcs.bucket(fileBucket).upload(tmpFilePath, {
destination: join(bucketDir, pageName)
})
.then(() => { console.log('audio uploaded successfully') })
.catch((error) => { console.log(error) });
});
})
.catch(err => {
console.error('Synthesize ERROR:', err);
});
What is wrong with my temp directory creation or fs.writeFile() function?
(Answer edited in response to question edit...)
In your original question, you invoked
client.synthesizeSpeech(request, (err, response) => {...})
following Node's http callback pattern, in which the callback function may initiate before the response is complete. Your subsequent code calls methods that assume response content; if the response is still empty, fs.writeFile() writes nothing initially, and subsequent methods cannot find the non-existent file. (Because fs.writeFile() follows the same callback pattern, you might even discover that output.mp3 file after the program exits, because fs will stream the input. But I bet your Firebase methods aren't waiting.)
The solution is to use Promises or async/await. Looking at the Google TextToSpeechClient class docs, it looks like the synthesizeSpeech method supports this:
Returns: Promise -> Array. The first element of the array is an object representing SynthesizeSpeechResponse.
Example:
client.synthesizeSpeech(request)
.then(responses => {
var response = responses[0];
// doThingsWith(response)
})
.catch(err => {
console.error(err);
});
That should solve the problem with client.synthesizeSpeech, but unfortunately fs.writeFile is still synchronous. If you were using Node >10 you could use a native fsPromise.writeFile method, and if you were using Node >8 you could use util.promisify() to convert fs.writeFile to promises. But you've indicated in comments that you are using Node 6, so we'll have to do things manually. Thieving from this reference:
const writeFilePromise = (file, data, option) => {
return new Promise((resolve, reject) => {
fs.writeFile(file, data, option, error => {
if (error) reject(error);
resolve("File created! Time for the next step!");
});
});
};
client.synthesizeSpeech(request)
.then(responses => {
const response = responses[0];
return writeFilePromise(tmpFilePath, response.audioContent, 'binary');
})
.then(() => {
return gcs.bucket(fileBucket).upload(tmpFilePath, {
destination: join(bucketDir, pageName)
});
})
.then(() => {
console.log('audio uploaded successfully');
return null;
})
.catch((error) => { console.log(error) });
I've written all of this using .then constructs, but naturally, you could also use async/await if you would rather do that. I hope this fixes things--it will force your Firebase code to wait until fs.writeFile has completed its job. I have also, unfortunately, smooshed all of the error checking into one final .catch block. And made things a bit verbose for clarity. I'm sure you can do better.

Resources