I'm trying to create and image on the Google Firebase server with node-canvas and store it in Firebase Storage.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gcs = require('#google-cloud/storage')();
const path = require('path');
const Canvas = require('canvas-prebuilt');
const env = require('dotenv').config();
try {admin.initializeApp(functions.config().firebase);} catch(e) {}
//Trigger on creation of a new post
exports.default = functions.database.ref('/posts/{postId}').onCreate(event => {
//Get the postID
const postId = event.params.postId;
console.log('We have a new post ' + postId);
//Get data from the postid
return admin.database().ref('/posts/' + postId).once('value').then(function(snapshot) {
const text = snapshot.val().text;
const canvas = new Canvas(1024, 512);
const ctx = canvas.getContext('2d');
//Draw Background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, 1024 , 512);
//Draw Text
ctx.font = 'bold 66px Arial';
ctx.textAlign = 'center';
ctx.fillStyle = '#fff';
ctx.fillText(text, 120, 256, 784);
// Use the postID to name the file Ex : -L1rVJUAtSbc_FampT0D.png
var filename = postId + '.png';
// Create the file metadata
var metadata = {
contentType: 'image/png'
};
const bucket = gcs.bucket('images');
const filePath = 'images/' + filename;
return canvas.toDataURL('image/png', function(err, png){
//Save on Firebase Storage
return bucket.upload(png, {
destination: filePath,
metadata: metadata
}).then(() => {
console.log('Image uploaded to Storage at ', filePath);
});
});
});
});
But, when I try to save it with toDataURL I get this error :
ENAMETOOLONG: name too long, stat 'data:image/png;base64,iVBORw0 ...'
And when I try with toBuffer I get this one :
TypeError: Path must be a string. Received
at assertPath (path.js:7:11)
at Object.basename (path.js:1362:5)
at Bucket.upload (/user_code/node_modules/#google-cloud/storage/src/bucket.js:2259:43)
at /user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:777:22
at Bucket.wrapper [as upload] (/user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:761:12)
at /user_code/sendTweet.js:107:21
I also try toBlob but the function doesn't exist server side with node-canvas.
Anyone know how I should save the image server side before transfer it to Firebase Storage?
Thanks!
Related
I'm working on a small project to convert a large xml to several formatted pdf documents. The large xml contains multiple similar format xmls. So I'm using a single html template for printing all the documents. After producing all the pdf documents I also need to produce a metadata file with some basic info on each document that was printed.
I thought using the fan out fan in scenario of durable functions is a perfect for my use case. I'm working with Nodejs. I setup all my code and it seems to be working fine locally. The Orchestration function looks like the below.
const df = require("durable-functions");
module.exports = df.orchestrator(function* (context) {
var xmldata = yield context.df.callActivity("DurablegetblobJS1","");
var tasks = [];
for (file of xmldata) {
tasks.push(context.df.callActivity("Durableactivityjs2", file));
}
const outputs = yield context.df.Task.all(tasks);
var finalout = "";
for (out of outputs){
console.log('I am done1 :' + out );
finalout = finalout + out;
}
return finalout;
});
DurablegetblobJS1 : Fetches the entire xmls and splits it into multiple smaller xmls(1 per document).
Durableactivityjs2 : Fetches the html template, extracts the different values from the individual xmls and applies them to the html and finally prints out the pdf into an azure storage. It returns the name of the pdf document that was printed for creation of the metadata file. The code for this is below.
var fs = require('fs');
var xml2js = require('xml2js');
var html_to_pdf = require('html-pdf-node');
var parser = new xml2js.Parser();
module.exports = async function (context) {
//console.log("Hello from activity :")
var xmldict = {}
var xmltext = context.bindings.name;
//Extract the nodes and attributes
metadata(xmltext,xmldict);
report(xmltext,xmldict);
context.log(xmldict)
const { BlobServiceClient } = require("#azure/storage-blob");
// Load the .env file if it exists
require("dotenv").config();
const AZURE_STORAGE_CONNECTION_STRING = process.env.STORAGE_CONNECTION_STRING || "";
const blobServiceClient = BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
var containerClient = blobServiceClient.getContainerClient('test');
var blobname = 'comb_template.html';
var blockBlobClient = containerClient.getBlockBlobClient(blobname);
var downloadBlockBlobResponse = await blockBlobClient.download(0);
var html_template = await streamToText(downloadBlockBlobResponse.readableStreamBody);
let options = { format: 'A4'};
let file = { content: html_template};
const x = await writepdf1(file, options,blobServiceClient,xmldict);
console.log("Written Blob PDF");
return x;
};
async function writepdf1(file, options,blobServiceClient,xmldict){
const pdfBuffer = await html_to_pdf.generatePdf(file, options);
const containerClient = blobServiceClient.getContainerClient('test2');
const targetblob = xmldict['OU'] + '/' + xmldict['ReportName'] + '/' + xmldict['OU'] + '_' + xmldict['ReportName'] + '_' + xmldict['DocumentID'] + '_' + '.pdf';
console.log('Blob name :' + targetblob);
const blockBlobClient_t = containerClient.getBlockBlobClient(targetblob);
const uploadBlobResponse = await blockBlobClient_t.upload(pdfBuffer, pdfBuffer.length);
return targetblob;
}
async function streamToText(readable) {
readable.setEncoding('utf8');
let data = '';
for await (const chunk of readable) {
data += chunk;
}
return data;
}
function metadata(xmltext,xmldict){
parser.parseString(xmltext, function (err, result) {
var test1 = result['HPDPSMsg']['DocumentRequest'][0]['MetaData'][0];
Object.entries(test1).forEach(([key, value]) => {
xmldict[key] = value[0];
});
});
}
function report(xmltext,xmldict){
parser.parseString(xmltext, function (err, result) {
var test2 = result['HPDPSMsg']['DocumentRequest'][0]['Report'][0]['$'];
Object.entries(test2).forEach(([key, value]) => {
xmldict[key] = value;
});
});
}
However, when I deploy the entire project into a azure premium function(EP1 - Windows), I see some errors in app insights when I try and execute my function and the pdfs are never generated.
Activity function 'Durableactivityjs2' failed: Could not find browser
revision 818858. Run "PUPPETEER_PRODUCT=firefox npm install" or
"PUPPETEER_PRODUCT=firefox yarn install" to download a supported
Firefox browser binary
I'm a bit clueless how I'm supposed to resolve this. Any help or suggestions would be appreciated.
I'm trying to upload excel file to azure storage blob in chunks, using the stage block and commitblock from BlobBlockClient Class. File upload seems to success but when i try to download and open the file, there it seems to be broken.
I'm using react and node js to do this. Code follows below
In UI
const chunkSize = (1024 * 1024) * 25; // file chunk size
// here slicing the file and sending it to api method
const fileReader = new FileReader();
const from = currentChunkIndexRef.current * chunkSize;
const to = from + chunkSize;
const blob = file.slice(from, to);
fileReader.onload = ((e: any) => uploadChunksToBlob(e, file, obj));
fileReader.readAsDataURL(blob);
// api method
const uploadChunksToBlob = async (event: any, file: File, obj: any) => {
try {
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadChunkURL = `/upload?currentChunk=${currentChunkIndexRef.current}&totalChunks=${totalChunks}&file=${file.name}&type=${file.type}`;
console.log(event.target.result)
const fileUpload = await fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: JSON.stringify(event.target.result),
});
const fileUploadJson = await fileUpload.json();
const isLastChunk = (totalChunks - 1) === currentChunkIndexRef.current;
if(!isLastChunk) {
console.log({ Chunk: currentChunkIndexRef.current });
currentChunkIndexRef.current = currentChunkIndexRef.current + 1;
// eslint-disable-next-line #typescript-eslint/no-use-before-define
uploadFileToAzureBlob(file, obj);
} else {
console.log("File Uploaded")
}
//
} catch (error) {
console.log("uploadFileToAzureBlob Catch Error" + error);
}
}
// In Node
const sharedKeyCredential = new StorageSharedKeyCredential(
config.StorageAccountName,
config.StorageAccountAccessKey
);
const pipeline = newPipeline(sharedKeyCredential);
const blobServiceClient = new BlobServiceClient(
`https://${config.StorageAccountName}.blob.core.windows.net`,
pipeline
);
const containerName = getContainerName(req.headers.key, req.headers.clientcode);
const identifier = uuid.v4();
const blobName = getBlobName(identifier, file);
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
try {
let bufferObj = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8"); // Create buffer object, specifying utf8 as encoding
let base64String = bufferObj.toString("base64"); // Encode the Buffer as a base64 string
blockIds = [...blockIds, base64String];
const bufferedData = Buffer.from(req.body);
let resultOfUnitArray = new Uint8Array(bufferedData.length);
for (let j = 0; j < bufferedData.length; j++) {
resultOfUnitArray[j] = bufferedData.toString().charCodeAt(j);
} // Converting string to bytes
const stageBlockResponse = await blockBlobClient.stageBlock(base64String, resultOfUnitArray, resultOfUnitArray.length, {
onProgress: (e) => {
console.log("bytes sent: " + e.loadedBytes);
}
});
if ((Number(totalChunks) - 1) === (Number(currentChunk))) {
const commitblockResponse = await blockBlobClient.commitBlockList(blockIds, {blobHTTPHeaders: req.headers});
res.json({ uuid: identifier, message: 'File uploaded to Azure Blob storage.' });
} else {
res.json({ message: `Current Chunks ${currentChunk} is Successfully Uploaded` });
}
} catch (err) {
console.log({ err })
res.json({ message: err.message });
}
I don't know, what i'm doing wrong here.
Any help would be appreciated
Thank you
The problem is that you convert it into dataURL, that’s where things break.
It appears to me that you're under the wrong impression that you need to first encode a blob into string in order to send it. Well, you don't have to, browser fetch API is capable to handle raw binary payload.
So on the client (browser) side, you don’t need to go through FileReader. Just send the chunk blob directly.
const blob = file.slice(from, to);
// ...
fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: blob,
});
On the server (node.js) side, you'll receive the blob in raw binary form, so you can simply forward that blob untouched to azure storage. There's no need to decode from string and move bytes onto resultOfUnitArray like you currently do.
const base64String = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8").toString("base64");
const bufferedData = Buffer.from(req.body);
const stageBlockResponse = await blockBlobClient.stageBlock(
base64String,
bufferedData,
bufferedData.length
);
I want to upload large files to mongodb with GridFS and but the bucket name is set to fs.file/fs.chunks. Is there any way I can change the name to a custom name.
const uri = 'mongodb://localhost:27017';
const dbName = 'registration';
const client = new mongodb.MongoClient(uri);
client.connect(function(error) {
assert.ifError(error);
const db = client.db(dbName);
var bucket = new mongodb.GridFSBucket(db);
//UPLOAD FILE TO DB THROUGH STREAMING
fs.createReadStream('./uploads/' + filename + '.html').
pipe(bucket.openUploadStream(filename + ".html")).
on('error', function(error) {
assert.ifError(error);
}).
on('finish', function(res) {
//process.exit(0);
});
I am trying to upload file onto IPFS.
My code is as below
var express = require('express')
var fs = require('fs')
const { create, globSource } = require('ipfs-http-client')
const ipfs = create()
....
const filesAdded = await ipfs.add(
{ path: fileName, content: file },
{
progress: (len) => console.log('Uploading file...' + len),
},
)
console.log(filesAdded)
const fileHash = filesAdded.cid.toString()
This code return hash value.
But i can't see my file(image) on https://ipfs.io/ipfs/{hash}
I am trying to implement the Profile Picture feature in my Android app. So I have used Generate Thumbnail sample from firebase. So whenever I am uploading an full sized image, it generates and thumbnail for me. But I want to update the URL of thumbnail in my realtime database once the thumbnail is generated.
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const spawn = require('child-process-promise').spawn;
const LOCAL_TMP_FOLDER = '/tmp/';
// Max height and width of the thumbnail in pixels.
const THUMB_MAX_HEIGHT = 100;
const THUMB_MAX_WIDTH = 100;
// Thumbnail prefix added to file names.
const THUMB_PREFIX = 'thumb_';
/**
* When an image is uploaded in the Storage bucket We generate a thumbnail automatically using
* ImageMagick.
*/
exports.generateThumbnail = functions.storage.object().onChange(event => {
const filePath = event.data.name;
const filePathSplit = filePath.split('/');
const fileName = filePathSplit.pop();
const fileDir = filePathSplit.join('/') + (filePathSplit.length > 0 ? '/' : '');
const thumbFilePath = `${fileDir}${THUMB_PREFIX}${fileName}`;
const tempLocalDir = `${LOCAL_TMP_FOLDER}${fileDir}`;
const tempLocalFile = `${tempLocalDir}${fileName}`;
const tempLocalThumbFile = `${LOCAL_TMP_FOLDER}${thumbFilePath}`;
// Exit if this is triggered on a file that is not an image.
if (!event.data.contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Exit if the image is already a thumbnail.
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (event.data.resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
const bucket = gcs.bucket(event.data.bucket);
return bucket.file(filePath).download({
destination: tempLocalFile
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick.
return spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile]).then(() => {
console.log('Thumbnail created at', tempLocalThumbFile);
// Uploading the Thumbnail.
return bucket.upload(tempLocalThumbFile, {
destination: thumbFilePath
}).then(() => {
console.log('Thumbnail uploaded to Storage at', thumbFilePath);
// Don't know what to write here.
});
});
});
});
});
Once the final promise of this code completes, I want to take the downloadable URL from the generated profile pic at /users/{userId}/profilePic
You'll need to add a couple things to achieve what you want here. Firstly you need to require firebase-admin and initialize the app.
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
This should be done at the top of the file after you require the other components.
Once you've got that you can add some code into the final promise to update the database with the new reference.
.then(() => {
console.log('Thumbnail uploaded to Storage at', thumbFilePath);
//userID here would be the userID that you want to update. I couldn't see a reference to it in the code you provided.
var databaseRef = admin.database().ref('users').child(userID).child("profilePic");
databaseRef.transaction(function(oldValue) {
return bucket.file(thumbFilePath).getDownloadURL().then(function(url) {
return url;
}
});
});
You can read about the transaction to update the reference here. I've only included the mandatory transactionUpdate
parameter and not the optional parameters that you may want. https://firebase.google.com/docs/reference/admin/node/admin.database.Reference#transaction