I am new to azure functions and I am currently just trying to save an image as a blob to my storage. However, my Promise never resolve but the functions ends successfully. The image exists in my directory.
Here is my code so far:
module.exports = async function (context, req) {
const path = require('path');
const storage = require('azure-storage');
const STORAGE_ACCOUNT_NAME = 'something';
const ACCOUNT_ACCESS_KEY = 'also something';
const blobService = storage.createBlobService(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
const filePath = './product-example.jpg';
function uploadLocalFile (filePath) {
return new Promise((resolve, reject) => {
const fullPath = path.resolve(filePath);
context.log('before call');
blobService.createAppendBlobFromLocalFile('productimageupload', 'image-upload-post', fullPath, function(err) {
if(err) {
context.log('err');
reject(err);
} else {
context.log('resolve');
resolve({message: 'resolved successfully'});
}
});
});
};
const output = uploadLocalFile(filePath);
context.log(output);
};
Here is the Protokoll output when executing the function:
2019-04-23T11:03:46 Welcome, you are now connected to log-streaming service.
2019-04-23T11:03:59.185 [Information] Executing 'Functions.branding-tool-app' (Reason='This function was programmatically called via the host APIs.', Id=xyz)
2019-04-23T11:04:00.793 [Information] before call
2019-04-23T11:04:00.794 [Information] Promise { <pending> }
2019-04-23T11:04:00.926 [Information] Executed 'Functions.xyz' (Succeeded, Id=xzy)
As you can see the Promise has still the status 'pending' and is not resolve in the createBlob function. What am I doing wrong here?
Here is the complete function for uploading localfile to blob
await uploadLocalFile(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded`);
async function uploadLocalFile(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
return await uploadFileToBlockBlob(aborter, filePath, blockBlobURL);
}
For reference i am passing the complete quick start functionality.
const {
Aborter,
BlockBlobURL,
ContainerURL,
ServiceURL,
SharedKeyCredential,
StorageURL,
uploadStreamToBlockBlob,
uploadFileToBlockBlob
} = require('#azure/storage-blob');
const fs = require("fs");
const path = require("path");
if (process.env.NODE_ENV !== "production") {
require("dotenv").config();
}
const STORAGE_ACCOUNT_NAME = process.env.AZURE_STORAGE_ACCOUNT_NAME;
const ACCOUNT_ACCESS_KEY = process.env.AZURE_STORAGE_ACCOUNT_ACCESS_KEY;
const ONE_MEGABYTE = 1024 * 1024;
const FOUR_MEGABYTES = 4 * ONE_MEGABYTE;
const ONE_MINUTE = 60 * 1000;
async function showContainerNames(aborter, serviceURL) {
let response;
let marker;
do {
response = await serviceURL.listContainersSegment(aborter, marker);
marker = response.marker;
for(let container of response.containerItems) {
console.log(` - ${ container.name }`);
}
} while (marker);
}
async function uploadLocalFile(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
return await uploadFileToBlockBlob(aborter, filePath, blockBlobURL);
}
async function uploadStream(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath).replace('.md', '-stream.md');
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
const stream = fs.createReadStream(filePath, {
highWaterMark: FOUR_MEGABYTES,
});
const uploadOptions = {
bufferSize: FOUR_MEGABYTES,
maxBuffers: 5,
};
return await uploadStreamToBlockBlob(
aborter,
stream,
blockBlobURL,
uploadOptions.bufferSize,
uploadOptions.maxBuffers);
}
async function showBlobNames(aborter, containerURL) {
let response;
let marker;
do {
response = await containerURL.listBlobFlatSegment(aborter);
marker = response.marker;
for(let blob of response.segment.blobItems) {
console.log(` - ${ blob.name }`);
}
} while (marker);
}
async function execute() {
const containerName = "demo";
const blobName = "quickstart.txt";
const content = "hello!";
const localFilePath = "./readme.md";
const credentials = new SharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
const aborter = Aborter.timeout(30 * ONE_MINUTE);
console.log("Containers:");
await showContainerNames(aborter, serviceURL);
await containerURL.create(aborter);
console.log(`Container: "${containerName}" is created`);
await blockBlobURL.upload(aborter, content, content.length);
console.log(`Blob "${blobName}" is uploaded`);
await uploadLocalFile(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded`);
await uploadStream(aborter, containerURL, localFilePath);
console.log(`Local file "${localFilePath}" is uploaded as a stream`);
console.log(`Blobs in "${containerName}" container:`);
await showBlobNames(aborter, containerURL);
const downloadResponse = await blockBlobURL.download(aborter, 0);
const downloadedContent = downloadResponse.readableStreamBody.read(content.length).toString();
console.log(`Downloaded blob content: "${downloadedContent}"`);
await blockBlobURL.delete(aborter)
console.log(`Block blob "${blobName}" is deleted`);
await containerURL.delete(aborter);
console.log(`Container "${containerName}" is deleted`);
}
execute().then(() => console.log("Done")).catch((e) => console.log(e));
Reference
https://github.com/Azure-Samples/azure-storage-js-v10-quickstart
Related
anyone know the reason of this problem? what is the issue?
in this code, i am trying to convert a blob into video format. but sometime it is working and sometime not. can anyone help me in this?
const recordVideo = require('../model/recordVideo_model')
const { join, dirname } = require('path');
const { fileURLToPath } = require('url')
const { mkdir, open, unlink, writeFile } = require('fs/promises');
const {Blob, Buffer} = require('buffer');
const { path } = require('#ffmpeg-installer/ffmpeg');
const ffmpeg = require('fluent-ffmpeg');
ffmpeg.setFfmpegPath(path)
// const __dirname = dirname(fileURLToPath(import.meta.url));
const saveData = async(data, roomid, course_id)=>{
//console.log(data);
const videoPath = join(__dirname, '../video');
//final folder name
//const dirName = new Date().toLocaleDateString().replace(/\./g, '_');
const dirName = roomid;
// console.log(`dirName: ${dirName}`);
//const dirPath = `${videoPath}/${dirName}`;
const dirPath = `./public/video/canvideo/${dirName}`;
const fileName= `${Date.now()}-${roomid}.webm`;
//const fileName= `${roomid}.webm`;
const tempFilePath = `${dirPath}/temp-${fileName}`;
const finalFilePath = `${dirPath}/${fileName}`;
let fileHandle;
try {
fileHandle = await open(dirPath);
console.log(`open the file${fileHandle}`)
}
catch {
await mkdir(dirPath);
console.log(`making directory${fileHandle}`)
}
finally {
if (fileHandle) {
fileHandle.close();
console.log(`closing ${fileHandle}`)
}
}
try {
const videoBlob = new Blob(data, {
type: 'video/webm'
})
const videoBuffer = Buffer.from(await videoBlob.arrayBuffer())
const res = await recordVideo.findOne({roomid:roomid, recordType:'canvas'})
if(!res){
await writeFile(tempFilePath, videoBuffer)
await ffmpeg(tempFilePath)
.outputOptions([
'-c:v libvpx-vp9',
'-c:a copy',
'-crf 35',
'-b:v 0',
'-vf scale=1280:720','-max_muxing_queue_size 1024'
])
.on('end', async () => {
await unlink(tempFilePath)
console.log(`*** File ${fileName} created`)
//insert database entry (recorded video entry with same details roomid, finename, finalfilepath, created at)
await recordVideo.create({roomid:roomid,filename:fileName,filePath:finalFilePath,recordType:'canvas', courseid:course_id});
}).on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.save(finalFilePath, dirPath);
}
}
catch (e) {
console.log('*** Erro in code ', e)
}
}
module.exports = {saveData,};
if any help me in this. it would be great for me.
in this code, i am trying to convert a blob into video format. but sometime it is working and sometime not. can anyone help me in this?
Can't set azure content type from node using below code. It's always storing content type as octane stream.
const { BlobServiceClient } = require('#azure/storage-blob');
const { AZURE_STORAGE_CONNECTION_STRING } = process.env;
let blobServiceClient;
async function getBlobServiceClient() {
if (!blobServiceClient) {
blobServiceClient = await BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
}
return blobServiceClient;
}
async function uploadFile(filePath, containerName) {
const bsClient = await getBlobServiceClient();
const containerClient = bsClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient('myImag6.png', { blobHTTPHeaders: { blobContentType: 'image/png' } });
try {
const res = await blockBlobClient.uploadFile(filePath);
console.log(res);
} catch (error) {
console.log(error);
}
}
The following issue seems related to this but I am not sure.
https://github.com/Azure/azure-sdk-for-js/issues/6192
Please give me more info on this and how to solve this issue.
Suppose it's because you don't set the BlockBlobUploadOptions in the uploadFile method, you only use it in the getBlockBlobClient method, in my below code test, it could set the content type.
const { BlobServiceClient, StorageSharedKeyCredential } = require("#azure/storage-blob");
// Enter your storage account name and shared key
const account = "account name";
const accountKey = "account key";
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
const containerName = "test";
async function main() {
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient('test.txt');
const blobOptions = { blobHTTPHeaders: { blobContentType: 'text/plain' } };
const uploadBlobResponse = await blockBlobClient.uploadFile('E:\\project\\jsstorage\\test.txt',blobOptions);
console.log(`Upload block blob test.txt successfully`, uploadBlobResponse.requestId);
}
main();
Did you try setting the blobHttpHeaders and passed to the uploadFile method?
const blobOptions = { blobHTTPHeaders: { blobContentType: 'image/png' } };
const res = await blockBlobClient.uploadFile(filePath, blobOptions);
I have node.js application with frontend in Angular
I need to upload files and images to Azure blob
I have created container and setup the environment according to MS documentation (https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-nodejs )
the v12 version.
My functions works for creating and uploading the created file to Azure blob, I could not figure how do I upload the posted file from the client to Azure Blob, below is my code in Node.js TypeScript
import * as formidable from 'formidable';
import * as fs from 'fs';
const { BlobServiceClient } = require('#azure/storage-blob');
const uuidv1 = require('uuid/v1');
const dotenv = require('dotenv');
dotenv.config();
class BlobController {
private AZURE_STORAGE_CONNECTION_STRING = process.env.CONSTRINGBlob;
constructor(router) {
router.post('/file', this.uploadFile.bind(this));
}
//----Get Lookup tables dynamically-----------//
async uploadFile(req, res) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(this.AZURE_STORAGE_CONNECTION_STRING);
// Create a unique name for the container
//const containerName = 'quickstart' + uuidv1();
const containerName = blobServiceClient.getContainerClient('mycontainer');
console.log('\t', containerName.containerName);
// Get a reference to a container
const containerClient = await blobServiceClient.getContainerClient(containerName.containerName);
let form = new formidable.IncomingForm();
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
}
}
module.exports = BlobController
could anyone help me on how can I upload files posted to Azure blob using Node.js
You were almost there :).
Please change your following code:
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
to:
form.parse(req, async function (err, fields, files) {
const file = files.file;
const blobName = 'test' + uuidv1() + files.file;
const contentType = file.type;
const filePath = file.path;//This is where you get the file path.
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath);
});
//Here's my form.parse code I used to upload pictures.
form.parse(req, async (err: any, fields: any, files: any) => {
const file = files.file;
const filePath = file.path;//This is where you get the file path. (this is the file itself)
const blobName: string = slugify(file.name);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath)
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse)
if (err) return reject(err)
//write to DB
//end write to DB
resolve(fields)
})
For anyone trying to make use of streams, this worked for me:
import formidable from 'formidable';
import { PassThrough } from 'stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method == 'POST') {
const stream = new PassThrough();
const form = new formidable.IncomingForm({
fileWriteStreamHandler: () => {
return stream;
}
});
form.parse(req, (err, fields, files) => {
if (files) {
if (files['<form-file-input-name>']) {
const file = files['<form-file-input-name>'] as formidable.File;
const mimeType = file.mimetype;
const extension = file.originalFilename ? file.originalFilename.substring(file.originalFilename.lastIndexOf('.')) : '.csv';
const newFileName = `<form-file-input-name>-${new Date().toISOString()}${extension}`;
getFilesBlobContainer().getBlockBlobClient(newFileName).uploadStream(stream, undefined, undefined, {
blobHTTPHeaders: {
blobContentType: mimeType,
},
});
}
}
});
return res.status(200).end();
}
}
export const config = {
api: {
bodyParser: false, //Disable NextJS body parsing so formidable can do that itself (fails silently otherwise)
},
};
I'm trying to merge 2 pdf files using pdf-lib (I got the example of code from the official site of pdf-lib). The goal is to trigger the cloud function when new file is uploaded to bucket. The function then collect urls of files to be merged in the same bucket with the new one. I am able to get urls but I have an error in pdf-lib. Maybe I'm importing it the wrong way. Because in example it is in ES6 syntax (import) but nodejs needs require. I'm new to backed and nodejs. So any help is highly appreciated.
const functions = require('firebase-functions');
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const admin = require('firebase-admin');
admin.initializeApp();
const { PDFDocument } = require('pdf-lib');
const fetch = require('node-fetch');
exports.testCloudFunc = functions.storage.object().onFinalize(async object => {
const filePath = object.name;
const { Logging } = require('#google-cloud/logging');
console.log(`Logged: FILEPATH: ${filePath}`);
const id = filePath.split('/');
console.log(`Logged: ID: ${id[0]}/${id[1]}`);
const bucket = object.bucket;
console.log(`Logged: BUCKET: ${object.bucket}`);
async function listFilesByPrefix() {
const options = {
prefix: id[0] + '/' + id[1]
};
const [files] = await storage.bucket(bucket).getFiles(options);
const endFiles = files.filter(el => {
return (
el.name === id[0] + '/' + id[1] + '/' + 'invoiceReport.pdf' ||
el.name === id[0] + '/' + id[1] + '/' + 'POD.pdf' ||
el.name === id[0] + '/' + id[1] + '/' + 'rateConfirmation.pdf'
);
});
endFiles.forEach(el => console.log(el.name));
const promises = [];
for (let i = 0; i < endFiles.length; i++) {
console.log(endFiles[i].name);
promises.push(
endFiles[i].getSignedUrl({
action: 'read',
expires: '03-17-2025'
})
);
}
const urlsArray = await Promise.all(promises);
return urlsArray;
}
listFilesByPrefix()
.then(results => {
results.forEach(el => console.log(el));
copyPages(results[0], results[1]);
return results;
})
.catch(console.error);
});
async function copyPages(url1, url2) {
const firstDonorPdfBytes = await fetch(url1).then(res => res.arrayBuffer());
const secondDonorPdfBytes = await fetch(url2).then(res => res.arrayBuffer());
const firstDonorPdfDoc = await PDFDocument.load(firstDonorPdfBytes);
const secondDonorPdfDoc = await PDFDocument.load(secondDonorPdfBytes);
const pdfDoc = await PDFDocument.create();
const [firstDonorPage] = await pdfDoc.copyPages(firstDonorPdfDoc, [0]);
const [secondDonorPage] = await pdfDoc.copyPages(secondDonorPdfDoc, [742]);
pdfDoc.addPage(firstDonorPage);
pdfDoc.insertPage(0, secondDonorPage);
const pdfBytes = await pdfDoc.save();
}
But in firebase cloud console logs I'm getting this:
TypeError: Cannot read property 'node' of undefined
at PDFDocument.<anonymous> (/srv/node_modules/pdf-lib/cjs/api/PDFDocument.js:459:62)
at step (/srv/node_modules/tslib/tslib.js:136:27)
at Object.next (/srv/node_modules/tslib/tslib.js:117:57)
at fulfilled (/srv/node_modules/tslib/tslib.js:107:62)
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:229:7)
I was facing the same problem
Make sure your files are public or generate a signed url
an example follows:
const options = {
prefix: 'notas', //folder name
};
const optionsBucket = {
version: 'v2',
action: 'read',
expires: Date.now() + 1000 * 60 * 9, // 9 minutes
};
const [files] = await storage.bucket('your-bucket-name').getFiles(options);
const mergedPdf = await PDFDocument.create();
for (let nota of files) {
let fileName = nota.name;
if (fileName.endsWith('.pdf')) {
const [url] = await storage
.bucket(bucketName)
.file(fileName)
.getSignedUrl(optionsBucket); //generate signed url
const arrayBuffer = await fetch(url).then(res => res.arrayBuffer());
const pdf = await PDFDocument.load(arrayBuffer);
const copiedPages = await mergedPdf.copyPages(pdf, pdf.getPageIndices());
copiedPages.forEach((page) => {
mergedPdf.addPage(page);
});
}
const mergedPdfFile = await mergedPdf.save();
const file = bucket.file(`folder/filename.pdf`);
await file.save(
mergedPdfFile
);
}
I perform the following task, during the registration of users for the first few months we did not save images of users in Firebase Cloud Storage and took a link that was received from Facebook. Now faced with the problem that some links to images have become expired. Because of this, I decided to make the cloud function and run it once as a script, so that it went through to users who have only one link to the image (which means that this is the first link received from facebook), take the facebook user id and request current profile image. I got a json file with the given users from Firebase, then I get links for each user separately, if the user is deleted then I process this error in a separate catch so that it does not stop the work of other promises. But after running this cloud function, I ran into this error because of this, for almost all users this operation was not successful. Even I increased the memory size in cloud function to 2 gigabytes. Please tell me how it can be fixed?
{ Error: read ECONNRESET
at exports._errnoException (util.js:1018:11)
at TLSWrap.onread (net.js:568:26) code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
My function
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const account_file = require('../account_file.json');
var FB = require('fb');
const path = require('path');
const imageDownloader = require('image-downloader');
const os = require('os');
const shortid = require('shortid');
const imageManager = require('../../lib/Images/image-manager.js');
module.exports = functions.https.onRequest((req, res) => {
const token = req.header('X-Auth-Token');
var errorsCount = 0;
return admin.auth().verifyIdToken(token)
.then(function(decodedToken) {
const adminID = decodedToken.uid;
console.log('adminID is', adminID);
const users = account_file['users'];
var fixPhotoPromises = [];
users.forEach(function(user) {
const userID = user['localId'];
const fixPhotoPromise = fixPhoto(userID).catch(error => {
console.log(error);
errorsCount += 1;
});
fixPhotoPromises.push(fixPhotoPromise);
});
return Promise.all(fixPhotoPromises);
}).then(results => {
console.log('results.length', results.length, 'errorsCount', errorsCount);
console.log('success all operations');
const successJSON = {};
successJSON["message"] = "Success operation";
return res.status(200).send(successJSON);
}).catch(error => {
console.log(error);
const errorJSON = {};
errorJSON["error"] = error;
return res.status(error.code).send(errorJSON);
});
});
function fixPhoto(userID) {
var authUser = {};
var filename = '';
return new Promise((resolve, reject) => {
return admin.auth().getUser(userID)
.then(userModel => {
const user = userModel.toJSON();
const facebookID = user['providerData'][0]['uid'];
const userID = user['uid'];
authUser = {'userID' : userID, 'facebookID' : facebookID};
const userImagesPromise = admin.database().ref()
.child('userImages')
.child(userID)
.once('value');
return Promise.all([userImagesPromise])
}).then(results => {
const userImagesSnap = results[0];
if (userImagesSnap.val() !== null && userImagesSnap.val() !== undefined) {
const userProfileImagesDict = userImagesSnap.val()['userProfileImages'];
const keys = Object.keys(userProfileImagesDict);
var userProfileImages = [];
keys.forEach(function(key){
const userProfileImage = userProfileImagesDict[key];
userProfileImages.push(userProfileImage);
});
if (userProfileImages.length > 1) {
const status = 'user has more than one image';
return resolve(status);
}
}
const facebookAppID = functions.config().facebook.appid;
const facebookAppSecret = functions.config().facebook.appsecret;
const facebookAccessPromise = FB.api('oauth/access_token', {
client_id: facebookAppID,
client_secret: facebookAppSecret,
grant_type: 'client_credentials'
});
return Promise.all([facebookAccessPromise]);
}).then(results => {
const facebookResult = results[0];
const facebookAccessToken = facebookResult['access_token'];
const profileImageURL = 'https://graph.facebook.com/' + authUser.facebookID + '/picture?width=9999&access_token=' + facebookAccessToken;
const shortID = shortid.generate() + shortid.generate() + shortid.generate();
filename = shortID + ".jpg";
const tempLocalFile = path.join(os.tmpdir(), filename);
const options = {
url: profileImageURL,
dest: tempLocalFile // Save to /path/to/dest/image.jpg
};
const imageDownloaderPromise = imageDownloader.image(options);
return Promise.all([imageDownloaderPromise])
}).then(results => {
const imageDownloaderResult = results[0];
const userID = authUser.userID;
const localImagePath = imageDownloaderResult['filename'];
const imageManagerPromise = imageManager.saveUserImageToCloudStorage(localImagePath, filename, userID);
return Promise.all([imageManagerPromise]);
}).then(results => {
const result = results[0];
return resolve(result);
}).catch(function(error) {
reject(error)
})
});
}
exports.saveUserImageToCloudStorage = function saveUserImageToCloudStorage(localImagePath, filename, userID) {
const bucketName = functions.config().googlecloud.defaultbacketname;
const bucket = gcs.bucket(bucketName);
const profileImagePath = path.normalize(path.join('userImages', userID, 'profileImages', filename));
const profileImageFile = bucket.file(profileImagePath);
return new Promise((resolve, reject) => {
bucket.upload(localImagePath, {destination: profileImagePath})
.then(() => {
const config = {
action: 'read',
expires: '03-01-2500'
};
const userRefPromise = admin.database().ref()
.child('users')
.child(userID)
.once('value');
return Promise.all([profileImageFile.getSignedUrl(config), userRefPromise])
}).then(function(results) {
const url = results[0][0];
const userSnap = results[1];
if (userSnap.val() === null || userSnap.val() === undefined) {
return resolve('user was deleted from database');
}
const userModel = userSnap.val();
const userCheckID = userModel['id'];
if (userCheckID !== userID) {
return reject("WARNING userCheckID !== userID");
}
// save to database
const userImagesRef = admin.database().ref().child('userImages')
.child(userID)
.child('userProfileImages')
.push();
const timeStamp = timestamp.now();
const imageModelID = userImagesRef.key;
const userImagesRefPromise = userImagesRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
const userRef = admin.database().ref()
.child('users')
.child(userID)
.child('currentProfileImage');
const userRefPromise = userRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
return Promise.all([userImagesRefPromise, userRefPromise]);
}).then(() => {
const successJSON = {};
successJSON["message"] = "Success operation";
return resolve(successJSON);
}).catch(function(error) {
return reject(error);
});
});
};
I added this code when init google cloud storage and I did not have this error anymore.
var gcs = require('#google-cloud/storage')({keyFilename: "service-account-credentials.json"});
gcs.interceptors.push({
request: function(reqOpts) {
reqOpts.forever = false
return reqOpts
}
});