How to upload images and files to Azure Blob Node.js - node.js

I have node.js application with frontend in Angular
I need to upload files and images to Azure blob
I have created container and setup the environment according to MS documentation (https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-nodejs )
the v12 version.
My functions works for creating and uploading the created file to Azure blob, I could not figure how do I upload the posted file from the client to Azure Blob, below is my code in Node.js TypeScript
import * as formidable from 'formidable';
import * as fs from 'fs';
const { BlobServiceClient } = require('#azure/storage-blob');
const uuidv1 = require('uuid/v1');
const dotenv = require('dotenv');
dotenv.config();
class BlobController {
private AZURE_STORAGE_CONNECTION_STRING = process.env.CONSTRINGBlob;
constructor(router) {
router.post('/file', this.uploadFile.bind(this));
}
//----Get Lookup tables dynamically-----------//
async uploadFile(req, res) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(this.AZURE_STORAGE_CONNECTION_STRING);
// Create a unique name for the container
//const containerName = 'quickstart' + uuidv1();
const containerName = blobServiceClient.getContainerClient('mycontainer');
console.log('\t', containerName.containerName);
// Get a reference to a container
const containerClient = await blobServiceClient.getContainerClient(containerName.containerName);
let form = new formidable.IncomingForm();
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
}
}
module.exports = BlobController
could anyone help me on how can I upload files posted to Azure blob using Node.js

You were almost there :).
Please change your following code:
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
to:
form.parse(req, async function (err, fields, files) {
const file = files.file;
const blobName = 'test' + uuidv1() + files.file;
const contentType = file.type;
const filePath = file.path;//This is where you get the file path.
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath);
});

//Here's my form.parse code I used to upload pictures.
form.parse(req, async (err: any, fields: any, files: any) => {
const file = files.file;
const filePath = file.path;//This is where you get the file path. (this is the file itself)
const blobName: string = slugify(file.name);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath)
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse)
if (err) return reject(err)
//write to DB
//end write to DB
resolve(fields)
})

For anyone trying to make use of streams, this worked for me:
import formidable from 'formidable';
import { PassThrough } from 'stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method == 'POST') {
const stream = new PassThrough();
const form = new formidable.IncomingForm({
fileWriteStreamHandler: () => {
return stream;
}
});
form.parse(req, (err, fields, files) => {
if (files) {
if (files['<form-file-input-name>']) {
const file = files['<form-file-input-name>'] as formidable.File;
const mimeType = file.mimetype;
const extension = file.originalFilename ? file.originalFilename.substring(file.originalFilename.lastIndexOf('.')) : '.csv';
const newFileName = `<form-file-input-name>-${new Date().toISOString()}${extension}`;
getFilesBlobContainer().getBlockBlobClient(newFileName).uploadStream(stream, undefined, undefined, {
blobHTTPHeaders: {
blobContentType: mimeType,
},
});
}
}
});
return res.status(200).end();
}
}
export const config = {
api: {
bodyParser: false, //Disable NextJS body parsing so formidable can do that itself (fails silently otherwise)
},
};

Related

How to perform an HTTP post request using express on Cloud Functions for Firebase using busboy

Hi I am trying to insert data in the database using a POST Request but the data is not being inserted.
On further investigation I found that to upload form-data, busboy needs to be used for image upload in firebase functions but I am not able to find a solution for using busboy with post method.
Hence if someone can please help me resolve this issue.
Below is the code for reference.
app.js
const express = require('express')
//const router = true;
const router = new express.Router()
const userInfo = require('../models/userProfile')
const multer = require('multer');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
const planCheck = require('../models/planDetails');
const login = require('../models/login_creditionals');
const {Storage} = require('#google-cloud/storage');
const {format} = require('util');
const busboy = require('busboy');
const storage = new Storage({
projectId: "biz-1",
keyFilename: "/Users/akm/pixNodes/functions/pixbiz-65a402.json"
});
const bucket = storage.bucket("gs://biz-1");
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024
}
})
const uploadImageToStorage = (file) => {
return new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${Date.now() + path.extname(file.originalname)}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = format(`https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`);
resolve(url);
});
blobStream.end(file.buffer);
});
}
router.post('/userprofile/check' ,upload.single('profile_pic'), async (req,res) => {
var reqFiles;
var reqFilesUrl;
reqFiles = req.file;
if(reqFiles) {
// const imagerUrl = await uploadImageToStorage(reqFiles)
reqFilesUrl = imagerUrl;
console.log(reqFilesUrl);
const notify = new userInfo({
userId: req.body.userId,
mobile_number : req.body.mobileNumber,
profile_pic: reqFilesUrl
})
try {
console.log('success insert data');
await notify.save((err,post) => {
if(err) {
console.log(err);
}
//console.log('data saved', post);
res.status(201).send(post);
});
// });
// res.status(201).send();
console.log('201');
} catch(e) {
//res.status(401);
return res.send(e);
}

google cloud function not uploading to bucket but no error in function

I have a NodeJS function that writes several small svg files locally and then is attempting to upload those files to cloud bucket.
in the function log, i am only seeing the message that file written to local disk. now will upload. But there is no file in the bucket and no error logged anywhere. i have made sure the timeout is set to 9 min (max) so i am sure its not timing out. what else shoudl i check?
any pointers will be appreciated.
exports.createQRCode = functions.storage.object().onFinalize(async (object) =>{
const qrcodeMonkeyKey = functions.config().qrcodemonkey.key;
//console.log(`key for qrcode monkey is ${qrcodeMonkeyKey}`);
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
console.log(fileBucket);
console.log(filePath);
if(!filePath.toLowerCase().endsWith('.csv'))
return console.log('not a csv so no need to anything fancy');
const bucket = admin.storage().bucket(fileBucket);
const filePathComps = filePath.split('/');
const folderName = filePathComps[filePathComps.length-3];
if(folderName !== "qrcode")
return console.log('not a qr code csv so no need to anything fancy');
const fileName = filePathComps[filePathComps.length-1];
console.log(fileName);
const path = require('path');
const os = require('os');
const fs = require('fs');
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
const csv = require('csv-parser')
const results = [];
fs.createReadStream(tempFilePath)
.pipe(csv({headers:
['uri','filename','foldername']
,skipLines:1
}))
.on('data', async (data) => {
const x = data;
results.push(data);
//results.push({id:x.id,phoneNumber:x.phoneNumber,isInternational:x.isInternational,message:x.messageText,respStatus:resp.status,responsedata:resp.data});
})
.on('end',async () => {
pArray = [];
results.forEach(x =>{
pArray.push(createQRCodeAndUpload(qrcodeMonkeyKey,x.filename,x.uri,x.foldername));
});
const finaloutput = await Promise.all(pArray);
console.log(JSON.stringify(finaloutput));
return;
});
});
const createQRCodeAndUpload = async (qrcodeMonkeyKey,fileName, url,foldername) =>{
const bucket = admin.storage().bucket('vmallapp.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
var axios = require("axios").default;
console.log('processing ' + url);
if(url !==""){
const dataToSend = {
data : url,
config :{
body:'circle',
eye:'frame14',
eyeBall:'ball16',
bodyColor:"#032b5c",
bgColor:"#84d4e2",
"logo":"ae600e1267b9e477f0b635b60ffaec1d1c18d93b.png"
},
size:1200,
download:false,
file:'svg',
gradientOnEyes:true
}
var options = {
method: 'POST',
url: 'https://qrcode-monkey.p.rapidapi.com/qr/custom',
headers: {
'content-type': 'application/json',
'x-rapidapi-host': 'qrcode-monkey.p.rapidapi.com',
'x-rapidapi-key': qrcodeMonkeyKey
},
data: dataToSend
};
var response = await axios.request(options);
console.log('qrcode monkey returned status' + response.status);
const outputFilePath = path.join(os.tmpdir(), `${fileName}.svg`);
fs.writeFileSync(outputFilePath, response.data);
console.log(`${fileName}.svg written to local disk. now will upload`);
try{
await bucket.upload(outputFilePath, {
destination: `qrcode/output/${fileName}.svg`
});
}catch(error){
console.log('error in uploding ' + error);
}
console.log('lets delete the file now and clean up local storage');
fs.unlinkSync(outputFilePath);
return 'all done';
}
}

Blank Image on bucket.upload

I am attempting to upload an image to my firebase storage with nodejs. I've used the following code:
const bucketName = 'mybucket.appspot.com';
// The path to your file to upload
const filePath = 'C:/Users/username/OneDrive/Desktop/testfunctions/dogs.png';
// The new ID for your GCS file
const destFileName = 'your-new-file-name.png';
//function upload() {
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
async function uploadFile() {
await storage.bucket(bucketName).upload(filePath, {
destination: destFileName,
});
console.log(`${filePath} uploaded to ${bucketName}`);
}
uploadFile().catch(console.error);
However, upon uploading to my storage, the image fails to load, it is an infinite spinning bar, or there is none but just the file name.
Any help is appreciated.
enter image description here
enter image description here
I Have Made This Code A While Ago When Searching I Saw That Nobody Solved Your Problem.
This Method Will Save It In The "Storage". In Addition, Created A Link For It In "Firebase Realtime Database"
//Important Imports
var admin = require("firebase-admin");
var serviceAccount = require("./serviceAccountKey.json");
const { uuid } = require("uuidv4");
const fs = require("fs");
//This Is Important Line To Tell The Firebase URL + STORAGE URL + CONNECT IT TO SERVICE ACCOUNT ADMIN
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL:
"https://yourfirebase.europe-west1.firebasedatabase.app",
storageBucket: "gs://yourfirebasestorage.com",
});
// Get A Database Reference
const db = admin.database();
const ref = db.ref("Parent");
const usersRef = ref.child("Child");
uploadImageToFirebase("imageName", ".png");
//Upload Image To Firebase Function
function uploadImageToFirebase(imageName, imageFormat) {
var myBucketLink = "yourfirebasestoragewithoutgs.appspot.com";
// CHANGE: TO WERE YOU WANT THE ROOT TO BE
//const usersRef = ref.child("anotherPath");
var bucket = admin.storage().bucket();
var filename = "./" + imageName + imageFormat;
try {
if (fs.existsSync(filename)) {
var UUID = uuid();
async function uploadFile() {
const metadata = {
metadata: {
// This Line Is Very Important. It's To Create A Download Token.
firebaseStorageDownloadTokens: UUID,
},
contentType: "image/png",
cacheControl: "public, max-age=31536000",
};
// Uploads A Local File To The Bucket
await bucket.upload(filename, {
// Support For HTTP Requests Made With `Accept-Encoding: gzip`
gzip: true,
metadata: metadata,
});
console.log(`${filename} uploaded.`);
var HTTP_ImageLink =
"https://firebasestorage.googleapis.com/v0/b/" +
myBucketLink +
"/o/" +
imageName +
imageFormat +
"?alt=media&token=" +
UUID;
console.log(HTTP_ImageLink);
usersRef.child("images").set({
link: HTTP_ImageLink,
});
}
uploadFile().catch(console.error);
} else {
console.log("Can't Upload");
}
} catch (err) {
console.log(err);
}
}

How to process image data from post request in node without express/multer?

I'm trying to submit a POST request with an image to an azure functions endpoint. The endpoint is going to upload the image to azure blob storage. Everything is hooked up, but I'm not sure how to process the form encoded image data to be uploaded to the blob storage account. I'd like to avoid adding express and am looking for alternatives to multer. I've tried consulting the documentation, but it also uses express/multer.
Below is what I have so far for the azure function. It's currently able to upload it to the azure storage account, but the data is not correct since I cannot display when I try to download it or view it.
export const httpTrigger: AzureFunction = async function (context: Context, req: HttpRequest): Promise<void> {
const storageConnectionString = config.storageConnectionString;
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(storageConnectionString);
// Create a unique name for the container
const containerName = config.storageContainerName;
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = context.bindingData.imageName;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
if (req.method === "POST")
{
const content = req.body;
const blobHeaders: BlobHTTPHeaders = {
blobContentType: "image/jpeg",
}
const options: BlockBlobUploadOptions = {blobHTTPHeaders: blobHeaders};
const uploadBlobResponse = await blockBlobClient.upload(content, content.length, options);
}
};
Here is my request from postman removing the token and the path to image.
curl --location --request POST 'http://localhost:7071/api/images/jaimeLannister.jpg' \
--header 'Cookie: BL_SiteLanguageID=1; __RequestVerificationToken=TOKEN' \
--form 'image=#/C:/jaimeLannister.jpg'
Any help would be appreciated!
As wvilSnobu said, you could use parse-multipart from npm which seems to be a more lightweight alternative to formidable.
Refer to the code as below:
module.exports = async function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
const streamifier = require('streamifier');
const multipart = require('parse-multipart');
const azureStorage = require('azure-storage');
var bodyBuffer = Buffer.from(req.body);
var boundary = multipart.getBoundary(req.headers['content-type']);
var parts = multipart.Parse(bodyBuffer, boundary);
var filedata = parts[0].data; // Image buffer data
var filename = parts[0].filename;
var a=azureStorage.createBlobService('xxxx','xxxxxxxxxxxx');
try {
var b= a.createBlockBlobFromStream('container', filename, streamifier.createReadStream(new Buffer (filedata)), filedata.length,(err, result)=>{
if(err){
console.log("Image upload failed", err);
}
});
} catch (error) {
console.error(error);
}
};
I send image using Postman:
And after uploading image to portal, the blob content is application/octet-stream and I can download it and view it successfully.
For more details, you could refer to this article.
Update:
The azure-storage library and "new Buffer" are both out of date. Refer to the code using #azure/storage-blob.
import { AzureFunction, Context, HttpRequest } from "#azure/functions";
import { config } from "../cosmos/config";
import { BlobServiceClient, BlockBlobUploadStreamOptions } from "#azure/storage-blob";
const streamifier = require("streamifier");
const multipart = require("parse-multipart");
export const httpTrigger: AzureFunction = async function (context: Context, req: HttpRequest): Promise<void> {
const storageConnectionString = config.storageConnectionString;
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(storageConnectionString);
// Create a unique name for the container
const containerName = config.storageContainerName;
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
// Gets the file name from the url, could be a bug if you want the filename from the file contents
const blobName = context.bindingData.imageName;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
if (req.method === "POST")
{
const bodyBuffer = Buffer.from(req.body);
const boundary = multipart.getBoundary(req.headers['content-type']);
const parts = multipart.Parse(bodyBuffer, boundary);
const filedata = parts[0].data;
const filename = parts[0].filename;
const options: BlockBlobUploadStreamOptions = {};
try
{
const result = await blockBlobClient.uploadStream(streamifier.createReadStream(Buffer.from(filedata)), filedata.length);
context.res = { status: 200 };
return;
}
catch(err)
{
console.log(err);
}
}
context.res = { status: 302, headers: { "location": blockBlobClient.url }, body: null};
};
Thanks to #Joey Cai and #evilSnobu's answers I was able to get it to work using parse-multipart and streamifier.
import { AzureFunction, Context, HttpRequest } from "#azure/functions";
import { config } from "../cosmos/config";
import { BlobServiceClient, BlockBlobUploadStreamOptions } from "#azure/storage-blob";
const streamifier = require("streamifier");
const multipart = require("parse-multipart");
export const httpTrigger: AzureFunction = async function (context: Context, req: HttpRequest): Promise<void> {
const storageConnectionString = config.storageConnectionString;
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(storageConnectionString);
// Create a unique name for the container
const containerName = config.storageContainerName;
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
// Gets the file name from the url, could be a bug if you want the filename from the file contents
const blobName = context.bindingData.imageName;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
if (req.method === "POST")
{
const bodyBuffer = Buffer.from(req.body);
const boundary = multipart.getBoundary(req.headers['content-type']);
const parts = multipart.Parse(bodyBuffer, boundary);
const filedata = parts[0].data;
const filename = parts[0].filename;
const options: BlockBlobUploadStreamOptions = {};
try
{
const result = await blockBlobClient.uploadStream(streamifier.createReadStream(Buffer.from(filedata)), filedata.length);
context.res = { status: 200 };
return;
}
catch(err)
{
console.log(err);
}
}
context.res = { status: 302, headers: { "location": blockBlobClient.url }, body: null};
};

Node.js Firebase Function sending Base64 image to External API

I’m using Firebase Functions with a Storage trigger in Node.js to send uploaded image data to an external API endpoint where photos are uploaded.
I’m currently taking images uploaded to a bucket in my Firebase storage, converting them to base64 strings, and plug them into my dictionary for the request.
My current issue is that seems like the dictionary is being cut short. I looked at the console logs on the Firebase console and seems like it ends after the base64 variable.
I’m not sure whether this is a bug with the syntax, or with the way I’m using the base64, or with Firebase Functions. If anyone knows what might be going on, please let me know.
const request = require('request-promise');
const gcs = require('#google-cloud/storage')();
const path = require('path');
const os = require('os');
const fs = require('fs');
const firebase = require('firebase');
exports.identifyUpdate = functions.storage.object().onFinalize((object) => {
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const fileName = path.basename(filePath);
if(!filePath.substring(0,filePath.indexOf('/')) == 'updates') {
console.log("Triggered by non-update photo")
return null;
}
console.log("Update photo added")
// Create Firebase app (for Realtime Database access)
var config = {
apiKey: "[apikey]",
authDomain: "[PROJECT_ID].firebaseapp.com",
databaseURL: "https://[PROJECT_ID].firebaseio.com",
storageBucket: "[PROJECT_ID].appspot.com",
};
if(!firebase.apps.length) {
firebase.initializeApp(config);
}
// Trace back to Update stored in Realtime Database
const database = firebase.database().ref()
const pendingRef = database.child('pendingUpdates')
console.log(filePath)
const splitPath = filePath.split(path.sep)
const patientID = splitPath[1]
console.log('Patient ID: ' + patientID)
const updateID = splitPath[2]
console.log('Update ID: ' + updateID)
const updateRef = pendingRef.child(patientID).child(updateID)
console.log('Found Update reference')
const photoRef = updateRef.child('photoURLs').child(fileName)
console.log('Photo Reference: ' + photoRef)
// Download and convert image to base64
const bucket = gcs.bucket(fileBucket)
const tempFilePath = path.join(os.tmpdir(), fileName)
const metadata = {
contentType: contentType
};
var base64;
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
console.log('Image downloaded locally to', tempFilePath)
}).then(() => {
base64 = base64_encode(tempFilePath)
console.log("Base 64: " + base64)
}).then(() => {
// Send image data to Kairos
var options = {
method: 'POST',
uri: 'https://api.kairos.com/recognize',
body: {
'image': base64,
'gallery_name': 'gallerytest1'
},
headers: {
'app_id': '[id]',
'app_key': '[key]'
},
json: true
}
return new Promise (() => {
console.log(options)
request(options)
.then(function(repos) {
console.log('API call succeeded');
console.log('Kairos response: ' + repos);
const apiResult = repos['images']['transaction']['subject_id']
console.log("Transaction " + JSON.stringify(apiResult))
})
.catch(function(err) {
console.log('API call failed')
})
});
})
// Delete app instance (to prevent concurrency leaks)
const deleteApp = () => app.delete().catch(() => null);
deleteApp.call
})
function base64_encode(file) {
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
Image Output:

Resources