Concat two PDFs in Firebase Cloud Functions with pdf-lib - node.js

I'm trying to merge 2 pdf files using pdf-lib (I got the example of code from the official site of pdf-lib). The goal is to trigger the cloud function when new file is uploaded to bucket. The function then collect urls of files to be merged in the same bucket with the new one. I am able to get urls but I have an error in pdf-lib. Maybe I'm importing it the wrong way. Because in example it is in ES6 syntax (import) but nodejs needs require. I'm new to backed and nodejs. So any help is highly appreciated.
const functions = require('firebase-functions');
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const admin = require('firebase-admin');
admin.initializeApp();
const { PDFDocument } = require('pdf-lib');
const fetch = require('node-fetch');
exports.testCloudFunc = functions.storage.object().onFinalize(async object => {
const filePath = object.name;
const { Logging } = require('#google-cloud/logging');
console.log(`Logged: FILEPATH: ${filePath}`);
const id = filePath.split('/');
console.log(`Logged: ID: ${id[0]}/${id[1]}`);
const bucket = object.bucket;
console.log(`Logged: BUCKET: ${object.bucket}`);
async function listFilesByPrefix() {
const options = {
prefix: id[0] + '/' + id[1]
};
const [files] = await storage.bucket(bucket).getFiles(options);
const endFiles = files.filter(el => {
return (
el.name === id[0] + '/' + id[1] + '/' + 'invoiceReport.pdf' ||
el.name === id[0] + '/' + id[1] + '/' + 'POD.pdf' ||
el.name === id[0] + '/' + id[1] + '/' + 'rateConfirmation.pdf'
);
});
endFiles.forEach(el => console.log(el.name));
const promises = [];
for (let i = 0; i < endFiles.length; i++) {
console.log(endFiles[i].name);
promises.push(
endFiles[i].getSignedUrl({
action: 'read',
expires: '03-17-2025'
})
);
}
const urlsArray = await Promise.all(promises);
return urlsArray;
}
listFilesByPrefix()
.then(results => {
results.forEach(el => console.log(el));
copyPages(results[0], results[1]);
return results;
})
.catch(console.error);
});
async function copyPages(url1, url2) {
const firstDonorPdfBytes = await fetch(url1).then(res => res.arrayBuffer());
const secondDonorPdfBytes = await fetch(url2).then(res => res.arrayBuffer());
const firstDonorPdfDoc = await PDFDocument.load(firstDonorPdfBytes);
const secondDonorPdfDoc = await PDFDocument.load(secondDonorPdfBytes);
const pdfDoc = await PDFDocument.create();
const [firstDonorPage] = await pdfDoc.copyPages(firstDonorPdfDoc, [0]);
const [secondDonorPage] = await pdfDoc.copyPages(secondDonorPdfDoc, [742]);
pdfDoc.addPage(firstDonorPage);
pdfDoc.insertPage(0, secondDonorPage);
const pdfBytes = await pdfDoc.save();
}
But in firebase cloud console logs I'm getting this:
TypeError: Cannot read property 'node' of undefined
at PDFDocument.<anonymous> (/srv/node_modules/pdf-lib/cjs/api/PDFDocument.js:459:62)
at step (/srv/node_modules/tslib/tslib.js:136:27)
at Object.next (/srv/node_modules/tslib/tslib.js:117:57)
at fulfilled (/srv/node_modules/tslib/tslib.js:107:62)
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:229:7)

I was facing the same problem
Make sure your files are public or generate a signed url
an example follows:
const options = {
prefix: 'notas', //folder name
};
const optionsBucket = {
version: 'v2',
action: 'read',
expires: Date.now() + 1000 * 60 * 9, // 9 minutes
};
const [files] = await storage.bucket('your-bucket-name').getFiles(options);
const mergedPdf = await PDFDocument.create();
for (let nota of files) {
let fileName = nota.name;
if (fileName.endsWith('.pdf')) {
const [url] = await storage
.bucket(bucketName)
.file(fileName)
.getSignedUrl(optionsBucket); //generate signed url
const arrayBuffer = await fetch(url).then(res => res.arrayBuffer());
const pdf = await PDFDocument.load(arrayBuffer);
const copiedPages = await mergedPdf.copyPages(pdf, pdf.getPageIndices());
copiedPages.forEach((page) => {
mergedPdf.addPage(page);
});
}
const mergedPdfFile = await mergedPdf.save();
const file = bucket.file(`folder/filename.pdf`);
await file.save(
mergedPdfFile
);
}

Related

google cloud function not uploading to bucket but no error in function

I have a NodeJS function that writes several small svg files locally and then is attempting to upload those files to cloud bucket.
in the function log, i am only seeing the message that file written to local disk. now will upload. But there is no file in the bucket and no error logged anywhere. i have made sure the timeout is set to 9 min (max) so i am sure its not timing out. what else shoudl i check?
any pointers will be appreciated.
exports.createQRCode = functions.storage.object().onFinalize(async (object) =>{
const qrcodeMonkeyKey = functions.config().qrcodemonkey.key;
//console.log(`key for qrcode monkey is ${qrcodeMonkeyKey}`);
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
console.log(fileBucket);
console.log(filePath);
if(!filePath.toLowerCase().endsWith('.csv'))
return console.log('not a csv so no need to anything fancy');
const bucket = admin.storage().bucket(fileBucket);
const filePathComps = filePath.split('/');
const folderName = filePathComps[filePathComps.length-3];
if(folderName !== "qrcode")
return console.log('not a qr code csv so no need to anything fancy');
const fileName = filePathComps[filePathComps.length-1];
console.log(fileName);
const path = require('path');
const os = require('os');
const fs = require('fs');
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
const csv = require('csv-parser')
const results = [];
fs.createReadStream(tempFilePath)
.pipe(csv({headers:
['uri','filename','foldername']
,skipLines:1
}))
.on('data', async (data) => {
const x = data;
results.push(data);
//results.push({id:x.id,phoneNumber:x.phoneNumber,isInternational:x.isInternational,message:x.messageText,respStatus:resp.status,responsedata:resp.data});
})
.on('end',async () => {
pArray = [];
results.forEach(x =>{
pArray.push(createQRCodeAndUpload(qrcodeMonkeyKey,x.filename,x.uri,x.foldername));
});
const finaloutput = await Promise.all(pArray);
console.log(JSON.stringify(finaloutput));
return;
});
});
const createQRCodeAndUpload = async (qrcodeMonkeyKey,fileName, url,foldername) =>{
const bucket = admin.storage().bucket('vmallapp.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
var axios = require("axios").default;
console.log('processing ' + url);
if(url !==""){
const dataToSend = {
data : url,
config :{
body:'circle',
eye:'frame14',
eyeBall:'ball16',
bodyColor:"#032b5c",
bgColor:"#84d4e2",
"logo":"ae600e1267b9e477f0b635b60ffaec1d1c18d93b.png"
},
size:1200,
download:false,
file:'svg',
gradientOnEyes:true
}
var options = {
method: 'POST',
url: 'https://qrcode-monkey.p.rapidapi.com/qr/custom',
headers: {
'content-type': 'application/json',
'x-rapidapi-host': 'qrcode-monkey.p.rapidapi.com',
'x-rapidapi-key': qrcodeMonkeyKey
},
data: dataToSend
};
var response = await axios.request(options);
console.log('qrcode monkey returned status' + response.status);
const outputFilePath = path.join(os.tmpdir(), `${fileName}.svg`);
fs.writeFileSync(outputFilePath, response.data);
console.log(`${fileName}.svg written to local disk. now will upload`);
try{
await bucket.upload(outputFilePath, {
destination: `qrcode/output/${fileName}.svg`
});
}catch(error){
console.log('error in uploding ' + error);
}
console.log('lets delete the file now and clean up local storage');
fs.unlinkSync(outputFilePath);
return 'all done';
}
}

Azure Function NodeJS: blobService.createAppendBlobFromLocalFile Promise does not resolve

I am new to azure functions and I am currently just trying to save an image as a blob to my storage. However, my Promise never resolve but the functions ends successfully. The image exists in my directory.
Here is my code so far:
module.exports = async function (context, req) {
const path = require('path');
const storage = require('azure-storage');
const STORAGE_ACCOUNT_NAME = 'something';
const ACCOUNT_ACCESS_KEY = 'also something';
const blobService = storage.createBlobService(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
const filePath = './product-example.jpg';
function uploadLocalFile (filePath) {
return new Promise((resolve, reject) => {
const fullPath = path.resolve(filePath);
context.log('before call');
blobService.createAppendBlobFromLocalFile('productimageupload', 'image-upload-post', fullPath, function(err) {
if(err) {
context.log('err');
reject(err);
} else {
context.log('resolve');
resolve({message: 'resolved successfully'});
}
});
});
};
const output = uploadLocalFile(filePath);
context.log(output);
};
Here is the Protokoll output when executing the function:
2019-04-23T11:03:46 Welcome, you are now connected to log-streaming service.
2019-04-23T11:03:59.185 [Information] Executing 'Functions.branding-tool-app' (Reason='This function was programmatically called via the host APIs.', Id=xyz)
2019-04-23T11:04:00.793 [Information] before call
2019-04-23T11:04:00.794 [Information] Promise { <pending> }
2019-04-23T11:04:00.926 [Information] Executed 'Functions.xyz' (Succeeded, Id=xzy)
As you can see the Promise has still the status 'pending' and is not resolve in the createBlob function. What am I doing wrong here?
Here is the complete function for uploading localfile to blob
await uploadLocalFile(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded`);
async function uploadLocalFile(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
return await uploadFileToBlockBlob(aborter, filePath, blockBlobURL);
}
For reference i am passing the complete quick start functionality.
const {
Aborter,
BlockBlobURL,
ContainerURL,
ServiceURL,
SharedKeyCredential,
StorageURL,
uploadStreamToBlockBlob,
uploadFileToBlockBlob
} = require('#azure/storage-blob');
const fs = require("fs");
const path = require("path");
if (process.env.NODE_ENV !== "production") {
require("dotenv").config();
}
const STORAGE_ACCOUNT_NAME = process.env.AZURE_STORAGE_ACCOUNT_NAME;
const ACCOUNT_ACCESS_KEY = process.env.AZURE_STORAGE_ACCOUNT_ACCESS_KEY;
const ONE_MEGABYTE = 1024 * 1024;
const FOUR_MEGABYTES = 4 * ONE_MEGABYTE;
const ONE_MINUTE = 60 * 1000;
async function showContainerNames(aborter, serviceURL) {
let response;
let marker;
do {
response = await serviceURL.listContainersSegment(aborter, marker);
marker = response.marker;
for(let container of response.containerItems) {
console.log(` - ${ container.name }`);
}
} while (marker);
}
async function uploadLocalFile(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
return await uploadFileToBlockBlob(aborter, filePath, blockBlobURL);
}
async function uploadStream(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath).replace('.md', '-stream.md');
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
const stream = fs.createReadStream(filePath, {
highWaterMark: FOUR_MEGABYTES,
});
const uploadOptions = {
bufferSize: FOUR_MEGABYTES,
maxBuffers: 5,
};
return await uploadStreamToBlockBlob(
aborter,
stream,
blockBlobURL,
uploadOptions.bufferSize,
uploadOptions.maxBuffers);
}
async function showBlobNames(aborter, containerURL) {
let response;
let marker;
do {
response = await containerURL.listBlobFlatSegment(aborter);
marker = response.marker;
for(let blob of response.segment.blobItems) {
console.log(` - ${ blob.name }`);
}
} while (marker);
}
async function execute() {
const containerName = "demo";
const blobName = "quickstart.txt";
const content = "hello!";
const localFilePath = "./readme.md";
const credentials = new SharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
const aborter = Aborter.timeout(30 * ONE_MINUTE);
console.log("Containers:");
await showContainerNames(aborter, serviceURL);
await containerURL.create(aborter);
console.log(`Container: "${containerName}" is created`);
await blockBlobURL.upload(aborter, content, content.length);
console.log(`Blob "${blobName}" is uploaded`);
await uploadLocalFile(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded`);
await uploadStream(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded as a stream`);
console.log(`Blobs in "${containerName}" container:`);
await showBlobNames(aborter, containerURL);
const downloadResponse = await blockBlobURL.download(aborter, 0);
const downloadedContent = downloadResponse.readableStreamBody.read(content.length).toString();
console.log(`Downloaded blob content: "${downloadedContent}"`);
await blockBlobURL.delete(aborter)
console.log(`Block blob "${blobName}" is deleted`);
await containerURL.delete(aborter);
console.log(`Container "${containerName}" is deleted`);
}
execute().then(() => console.log("Done")).catch((e) => console.log(e));
Reference
https://github.com/Azure-Samples/azure-storage-js-v10-quickstart

How do your pipe to multiple streams? [duplicate]

This question already has answers here:
How to pipe one readable stream into two writable streams at once in Node.js?
(3 answers)
Closed 4 years ago.
I'm new to Node and I'm trying to download a file, hash it, then save it to disk using the hash as the file name. I'm getting correct hashes (I think) but the files are blank.
const fs = require("fs-extra")
const fetch = require("node-fetch")
const crypto = require('crypto')
const response = await fetch(url)
const sha256 = crypto.createHash("sha256")
sha256.setEncoding('hex')
response.body.pipe(sha256)
await new Promise(resolve => response.body.on("end", resolve))
sha256.end()
const hash = sha256.read()
const file = fs.createWriteStream(hash + ".jpg")
response.body.pipe(file)
My trick for resolving your problem is storing your file with unique name (using current timestamp as name), and then you can hash stream (from response) and rename it.
I've tested this code and it's working well:
const fs = require("fs-extra")
const path = require('path');
const fetch = require("node-fetch")
const crypto = require('crypto')
const downloadImage = async (url) => {
try {
const response = await fetch(url);
/** Store file with name current timestamp */
const filename = "IMG_" + Date.now() + ".jpg";
const dest = path.join(__dirname, filename);
response.body.pipe(fs.createWriteStream(dest))
/** Hash file */
const sha256 = crypto.createHash("sha256")
sha256.setEncoding('hex')
response.body.pipe(sha256)
await new Promise(resolve => response.body.on("end", resolve))
sha256.end()
const hash = sha256.read()
/** Rename file with hash value */
await fs.rename(dest, path.join(__dirname, hash + ".jpg"))
} catch (err) {
console.log(err);
}
}
const url = "https://i0.wp.com/wptavern.com/wp-content/uploads/2016/07/stack-overflow.png?ssl=1";
downloadImage(url);
But you can create function for hashing stream (response you received) as utility, this is my recommend for your code:
const fs = require("fs-extra")
const path = require('path');
const fetch = require("node-fetch")
const crypto = require('crypto')
function streamHash(stream, algorithm = 'md5') {
return new Promise((resolve, reject) => {
let shasum = crypto.createHash(algorithm);
try {
stream.on('data', function (data) {
shasum.update(data)
})
stream.on('end', function () {
const hash = shasum.digest('hex')
return resolve(hash);
})
} catch (error) {
return reject(error);
}
});
}
const downloadImage = async (url) => {
try {
const response = await fetch(url);
/** Store file with name current timestamp */
const filename = "IMG_" + Date.now() + ".jpg";
const dest = path.join(__dirname, filename);
response.body.pipe(fs.createWriteStream(dest))
/** Hash file */
const hash = await streamHash(response.body, 'sha256');
/** Rename file with hash value */
await fs.rename(dest, path.join(__dirname, hash + ".jpg"))
} catch (err) {
console.log(err);
}
}
const url = "https://i0.wp.com/wptavern.com/wp-content/uploads/2016/07/stack-overflow.png?ssl=1";
downloadImage(url);

Cloud functions. The request signature we calculated does not match the signature you provided

I had a task, after registering users in the application (registering through facebook) to keep facebook avatar in firebase storage, as facebook links have a limited period of work. I implemented the function I'll write below, but I get the following error
The request signature we calculated does not match the signature you provided. Check your Google secret key and signing method
When I try to use a link to an image. Please tell me how it can be fixed?
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gcs = require('#google-cloud/storage')({keyFilename: "service-account-credentials.json"});
const uuid = require('uuid');
const imageDownloader = require('../lib/Images/image-manager.js');
const path = require('path');
const timestamp = require('unix-timestamp');
module.exports = functions.https.onRequest((req, res) => {
const token = req.header('X-Auth-MyApp-Token');
const imageURL = req.body.imagePath;
const bucketName = functions.config().googlecloud.defaultbacketname;
const bucket = gcs.bucket(bucketName);
var userID = '';
const shortID = uuid.v1();
const filename = shortID + '.jpg';
var profileImagePath = '';
return admin.auth().verifyIdToken(token).then(decodedToken => {
userID = decodedToken.uid;
return imageDownloader.downloadImageToLocalDirectory(imageURL, filename)
}).then(localImagePath => {
profileImagePath = path.normalize(path.join('userImages', userID, 'profileImages', filename));
const uploadProm = bucket.upload(localImagePath, {
destination: profileImagePath,
uploadType: "media",
metadata: {
contentType: 'image/jpeg'
}
});
return uploadProm;
}).then(() => {
console.log('success uploaded');
const config = {
action: 'read',
expires: '03-01-2400',
contentType: 'image/jpeg'
};
const userRefPromise = admin.database().ref()
.child('users')
.child(userID)
.once('value');
const profileImageFile = bucket.file(profileImagePath);
return Promise.all([profileImageFile.getSignedUrl(config), userRefPromise])
}).then(results => {
const url = results[0][0];
const userModel = results[1].val();
const userCheckID = userModel['id'];
console.log("get url", url);
// save to database
const userImagesRef = admin.database().ref().child('userImages')
.child(userID)
.child('userProfileImages')
.push();
const timeStamp = timestamp.now();
console.log('timeStamp', timeStamp);
const imageModelID = userImagesRef.key;
const userImagesRefPromise = userImagesRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
const userRef = admin.database().ref()
.child('users')
.child(userID)
.child('currentProfileImage');
const userRefPromise = userRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
return Promise.all([userImagesRefPromise, userRefPromise]);
}).then(() => {
const successJSON = {};
successJSON["message"] = "Success operation";
return res.status(200).send(successJSON);
}).catch(error => {
console.log(error);
const errorJSON = {};
errorJSON["error"] = error;
return res.status(error.code).send(errorJSON);
});
});

Error: read ECONNRESET when working with large data in Firebase Cloud functions

I perform the following task, during the registration of users for the first few months we did not save images of users in Firebase Cloud Storage and took a link that was received from Facebook. Now faced with the problem that some links to images have become expired. Because of this, I decided to make the cloud function and run it once as a script, so that it went through to users who have only one link to the image (which means that this is the first link received from facebook), take the facebook user id and request current profile image. I got a json file with the given users from Firebase, then I get links for each user separately, if the user is deleted then I process this error in a separate catch so that it does not stop the work of other promises. But after running this cloud function, I ran into this error because of this, for almost all users this operation was not successful. Even I increased the memory size in cloud function to 2 gigabytes. Please tell me how it can be fixed?
{ Error: read ECONNRESET
at exports._errnoException (util.js:1018:11)
at TLSWrap.onread (net.js:568:26) code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
My function
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const account_file = require('../account_file.json');
var FB = require('fb');
const path = require('path');
const imageDownloader = require('image-downloader');
const os = require('os');
const shortid = require('shortid');
const imageManager = require('../../lib/Images/image-manager.js');
module.exports = functions.https.onRequest((req, res) => {
const token = req.header('X-Auth-Token');
var errorsCount = 0;
return admin.auth().verifyIdToken(token)
.then(function(decodedToken) {
const adminID = decodedToken.uid;
console.log('adminID is', adminID);
const users = account_file['users'];
var fixPhotoPromises = [];
users.forEach(function(user) {
const userID = user['localId'];
const fixPhotoPromise = fixPhoto(userID).catch(error => {
console.log(error);
errorsCount += 1;
});
fixPhotoPromises.push(fixPhotoPromise);
});
return Promise.all(fixPhotoPromises);
}).then(results => {
console.log('results.length', results.length, 'errorsCount', errorsCount);
console.log('success all operations');
const successJSON = {};
successJSON["message"] = "Success operation";
return res.status(200).send(successJSON);
}).catch(error => {
console.log(error);
const errorJSON = {};
errorJSON["error"] = error;
return res.status(error.code).send(errorJSON);
});
});
function fixPhoto(userID) {
var authUser = {};
var filename = '';
return new Promise((resolve, reject) => {
return admin.auth().getUser(userID)
.then(userModel => {
const user = userModel.toJSON();
const facebookID = user['providerData'][0]['uid'];
const userID = user['uid'];
authUser = {'userID' : userID, 'facebookID' : facebookID};
const userImagesPromise = admin.database().ref()
.child('userImages')
.child(userID)
.once('value');
return Promise.all([userImagesPromise])
}).then(results => {
const userImagesSnap = results[0];
if (userImagesSnap.val() !== null && userImagesSnap.val() !== undefined) {
const userProfileImagesDict = userImagesSnap.val()['userProfileImages'];
const keys = Object.keys(userProfileImagesDict);
var userProfileImages = [];
keys.forEach(function(key){
const userProfileImage = userProfileImagesDict[key];
userProfileImages.push(userProfileImage);
});
if (userProfileImages.length > 1) {
const status = 'user has more than one image';
return resolve(status);
}
}
const facebookAppID = functions.config().facebook.appid;
const facebookAppSecret = functions.config().facebook.appsecret;
const facebookAccessPromise = FB.api('oauth/access_token', {
client_id: facebookAppID,
client_secret: facebookAppSecret,
grant_type: 'client_credentials'
});
return Promise.all([facebookAccessPromise]);
}).then(results => {
const facebookResult = results[0];
const facebookAccessToken = facebookResult['access_token'];
const profileImageURL = 'https://graph.facebook.com/' + authUser.facebookID + '/picture?width=9999&access_token=' + facebookAccessToken;
const shortID = shortid.generate() + shortid.generate() + shortid.generate();
filename = shortID + ".jpg";
const tempLocalFile = path.join(os.tmpdir(), filename);
const options = {
url: profileImageURL,
dest: tempLocalFile // Save to /path/to/dest/image.jpg
};
const imageDownloaderPromise = imageDownloader.image(options);
return Promise.all([imageDownloaderPromise])
}).then(results => {
const imageDownloaderResult = results[0];
const userID = authUser.userID;
const localImagePath = imageDownloaderResult['filename'];
const imageManagerPromise = imageManager.saveUserImageToCloudStorage(localImagePath, filename, userID);
return Promise.all([imageManagerPromise]);
}).then(results => {
const result = results[0];
return resolve(result);
}).catch(function(error) {
reject(error)
})
});
}
exports.saveUserImageToCloudStorage = function saveUserImageToCloudStorage(localImagePath, filename, userID) {
const bucketName = functions.config().googlecloud.defaultbacketname;
const bucket = gcs.bucket(bucketName);
const profileImagePath = path.normalize(path.join('userImages', userID, 'profileImages', filename));
const profileImageFile = bucket.file(profileImagePath);
return new Promise((resolve, reject) => {
bucket.upload(localImagePath, {destination: profileImagePath})
.then(() => {
const config = {
action: 'read',
expires: '03-01-2500'
};
const userRefPromise = admin.database().ref()
.child('users')
.child(userID)
.once('value');
return Promise.all([profileImageFile.getSignedUrl(config), userRefPromise])
}).then(function(results) {
const url = results[0][0];
const userSnap = results[1];
if (userSnap.val() === null || userSnap.val() === undefined) {
return resolve('user was deleted from database');
}
const userModel = userSnap.val();
const userCheckID = userModel['id'];
if (userCheckID !== userID) {
return reject("WARNING userCheckID !== userID");
}
// save to database
const userImagesRef = admin.database().ref().child('userImages')
.child(userID)
.child('userProfileImages')
.push();
const timeStamp = timestamp.now();
const imageModelID = userImagesRef.key;
const userImagesRefPromise = userImagesRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
const userRef = admin.database().ref()
.child('users')
.child(userID)
.child('currentProfileImage');
const userRefPromise = userRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
return Promise.all([userImagesRefPromise, userRefPromise]);
}).then(() => {
const successJSON = {};
successJSON["message"] = "Success operation";
return resolve(successJSON);
}).catch(function(error) {
return reject(error);
});
});
};
I added this code when init google cloud storage and I did not have this error anymore.
var gcs = require('#google-cloud/storage')({keyFilename: "service-account-credentials.json"});
gcs.interceptors.push({
request: function(reqOpts) {
reqOpts.forever = false
return reqOpts
}
});

Resources