How to display base64 string as image in email, using AWS SES - node.js

I am trying to show an image inside my e-mail. But that image is not getting displayed. I am using base64 string, which I am fetching from S3 bucket.
I am able to get email in inbox, but only thing image is not working when passing url, if directly using base64 hard coded string in html its working.
I need to fetch image from s3 and that image should be inline with email.
"use strict";
const fs = require("fs");
const path = require("path")
const Handlebars = require('handlebars');
const {SESClient, sendEmailCommand} = require("#aws-sdk/client-ses");
const {S3Client, GetObjectCommand} = require("#aws-sdk/client-s3");
let S3=null, SES=null;
const streamToBuffer = async(stream) =>{
return new Promise((resolve, reject) =>{
const chunks = [];
stream.on("data", (chunk) =>{chunks.push(chunk)});
stream.on("error", reject);
stream.on("end", () =>{resolve(Buffer.conact(chunks))});
})
}
export.handler = async(event) =>{
if(S3 === null){
S3 = new S3Client ({region: MY_REGION})
}
if(SES === null){
SES = new SESClient ({region: MY_REGION})
}
try{
let deatils = event.detail.fullDocument;
let imageKey = `${deatils.dir}/myimage.png`;
let imageFileFromS3 = await S3.send(
new GetObjectCommand({
Bucket: MY_BUCKET_NAME, key: imageKey
}))
let imageFileBuffer = await streamToBuffer(imageFileFromS3.Body)
let bufferToBase64 = imageFileBuffer.toString("base64");
const emailSubject = "Hey!! Test mail with image";
const emailData = {
Name: "Email Tester"
ImageSrc: `data:image/png;base64, ${bufferToBase64}`
}
let htmlTemplate = Handlebars.complie(fs.readFileSync(path.join(__dirname, 'templateSrc', email.html)).toString())
let textTemplate = Handlebars.complie(fs.readFileSync(path.join(__dirname, 'templateSrc', email.txt)).toString())
let emailResult = await SES.send( new SendEmailCommand({
Source: "Source_test#email.com", //dummy email for now
Destination :{
ToAddress: ["to_test#email.com"] // dummy address
},
Message: {
Subject: {
Charset: 'UTF-8',
Data: emailSubject
},
Body: {
Text: {
Charset: 'UTF-8',
Data: textTemplate(emailData)
},
Html:{
Charset: 'UTF-8',
Data: htmlTemplate(emailData)
}
}
}
}))
return emailResult
}catch(error){
console.log(error)
}
}
email.txt
Dear {{Name}}
Thanks for asking images on email.
Please find your requested images below
Face image
Bus image
-----Thanks
Email.html
<h1>Dear {{Name}}</h1>
<p>Thanks for asking images on email.</p>
<p>Please find your requested image below</p>
<p>face Image</p>
<img src={{ImageSrc}} />
<p>Bus Image</p>
<img src="data:image/gif;base64,R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==">
//This image is working
<p>-------Thanks</p>

I have just resolved this issue...
So I thought, about posting answer for others help.
The root cause of this was- large size of my buffer response form S3, and email only supports 128MB data, as I found in cloud watch logs ( I can comment about AWS SES only, not sure about other email clients)
So the ultimate solution for my problem is just to resize the buffer response, which we are getting from S2.
So I have used sharp https://www.npmjs.com/package/sharp
And add these line in index.js
//Here I will resize the image
const resizedImageFileBuffer =
await sharp(imageFileBuffer)
.resize ({
width:200,
height:200,
fit: 'contain'
})
.toFormat('png')
.png({
quality:100,
compressionLevel: 6
})
.toBuffer()
//Now we will convert resized buffer to base64
let bufferToBase64 =
resizedImageFileBuffer.toString("base64");

Related

File chunk upload to azure storage blob, file seems broken

I'm trying to upload excel file to azure storage blob in chunks, using the stage block and commitblock from BlobBlockClient Class. File upload seems to success but when i try to download and open the file, there it seems to be broken.
I'm using react and node js to do this. Code follows below
In UI
const chunkSize = (1024 * 1024) * 25; // file chunk size
// here slicing the file and sending it to api method
const fileReader = new FileReader();
const from = currentChunkIndexRef.current * chunkSize;
const to = from + chunkSize;
const blob = file.slice(from, to);
fileReader.onload = ((e: any) => uploadChunksToBlob(e, file, obj));
fileReader.readAsDataURL(blob);
// api method
const uploadChunksToBlob = async (event: any, file: File, obj: any) => {
try {
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadChunkURL = `/upload?currentChunk=${currentChunkIndexRef.current}&totalChunks=${totalChunks}&file=${file.name}&type=${file.type}`;
console.log(event.target.result)
const fileUpload = await fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: JSON.stringify(event.target.result),
});
const fileUploadJson = await fileUpload.json();
const isLastChunk = (totalChunks - 1) === currentChunkIndexRef.current;
if(!isLastChunk) {
console.log({ Chunk: currentChunkIndexRef.current });
currentChunkIndexRef.current = currentChunkIndexRef.current + 1;
// eslint-disable-next-line #typescript-eslint/no-use-before-define
uploadFileToAzureBlob(file, obj);
} else {
console.log("File Uploaded")
}
//
} catch (error) {
console.log("uploadFileToAzureBlob Catch Error" + error);
}
}
// In Node
const sharedKeyCredential = new StorageSharedKeyCredential(
config.StorageAccountName,
config.StorageAccountAccessKey
);
const pipeline = newPipeline(sharedKeyCredential);
const blobServiceClient = new BlobServiceClient(
`https://${config.StorageAccountName}.blob.core.windows.net`,
pipeline
);
const containerName = getContainerName(req.headers.key, req.headers.clientcode);
const identifier = uuid.v4();
const blobName = getBlobName(identifier, file);
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
try {
let bufferObj = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8"); // Create buffer object, specifying utf8 as encoding
let base64String = bufferObj.toString("base64"); // Encode the Buffer as a base64 string
blockIds = [...blockIds, base64String];
const bufferedData = Buffer.from(req.body);
let resultOfUnitArray = new Uint8Array(bufferedData.length);
for (let j = 0; j < bufferedData.length; j++) {
resultOfUnitArray[j] = bufferedData.toString().charCodeAt(j);
} // Converting string to bytes
const stageBlockResponse = await blockBlobClient.stageBlock(base64String, resultOfUnitArray, resultOfUnitArray.length, {
onProgress: (e) => {
console.log("bytes sent: " + e.loadedBytes);
}
});
if ((Number(totalChunks) - 1) === (Number(currentChunk))) {
const commitblockResponse = await blockBlobClient.commitBlockList(blockIds, {blobHTTPHeaders: req.headers});
res.json({ uuid: identifier, message: 'File uploaded to Azure Blob storage.' });
} else {
res.json({ message: `Current Chunks ${currentChunk} is Successfully Uploaded` });
}
} catch (err) {
console.log({ err })
res.json({ message: err.message });
}
I don't know, what i'm doing wrong here.
Any help would be appreciated
Thank you
The problem is that you convert it into dataURL, that’s where things break.
It appears to me that you're under the wrong impression that you need to first encode a blob into string in order to send it. Well, you don't have to, browser fetch API is capable to handle raw binary payload.
So on the client (browser) side, you don’t need to go through FileReader. Just send the chunk blob directly.
const blob = file.slice(from, to);
// ...
fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: blob,
});
On the server (node.js) side, you'll receive the blob in raw binary form, so you can simply forward that blob untouched to azure storage. There's no need to decode from string and move bytes onto resultOfUnitArray like you currently do.
const base64String = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8").toString("base64");
const bufferedData = Buffer.from(req.body);
const stageBlockResponse = await blockBlobClient.stageBlock(
base64String,
bufferedData,
bufferedData.length
);

Delivering image from S3 to React client via Context API and Express server

I'm trying to download a photo from an AWS S3 bucket via an express server to serve to a react app but I'm not having much luck. Here are my (unsuccessful) attempts so far.
The Workflow is as follows:
Client requests photo after retrieving key from database via Context API
Request sent to express server route (important so as to hide the true location from the client)
Express server route requests blob file from AWS S3 bucket
Express server parses image to base64 and serves to client
Client updates state with new image
React Client
const [profilePic, setProfilePic] = useState('');
useEffect(() => {
await actions.getMediaSource(tempPhoto.key)
.then(resp => {
console.log('server resp: ', resp.data.data.newTest) // returns ����\u0000�\u0000\b\u0006\
const url = window.URL || window.webkitURL;
const blobUrl = url.createObjectURL(resp.data.data.newTest);
console.log("blob ", blobUrl);
setProfilePic({ ...profilePic, image : resp.data.data.newTest });
})
.catch(err => errors.push(err));
}
Context API - just axios wrapped into its own library
getMediaContents = async ( key ) => {
return await this.API.call(`http://localhost:5000/${MEDIA}/mediaitem/${key}`, "GET", null, true, this.state.accessToken, null);
}
Express server route
router.get("/mediaitem/:key", async (req, res, next) => {
try{
const { key } = req.params;
// Attempt 1 was to try with s3.getObject(downloadParams).createReadStream();
const readStream = getFileStream(key);
readStream.pipe(res);
// Attempt 2 - attempt to convert response to base 64 encoding
var data = await getFileStream(key);
var test = data.Body.toString("utf-8");
var container = '';
if ( data.Body ) {
container = data.Body.toString("utf-8");
} else {
container = undefined;
}
var buffer = (new Buffer.from(container));
var test = buffer.toString("base64");
require('fs').writeFileSync('../uploads', test); // it never wrote to this directory
console.log('conversion: ', test); // prints: 77+977+977+977+9AO+/vQAIBgYH - this doesn't look like base64 to me.
delete buffer;
res.status(201).json({ newTest: test });
} catch (err){
next(ApiError.internal(`Unexpected error > mediaData/:id GET -> Error: ${err.message}`));
return;
}
});
AWS S3 Library - I made my own library for using the s3 bucket as I'll need to use more functionality later.
const getFileStream = async (fileKey) => {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
// This was attempt 1's return without async in the parameter
return s3.getObject(downloadParams).createReadStream();
// Attempt 2's intention was just to wait for the promise to be fulfilled.
return await s3.getObject(downloadParams).promise();
}
exports.getFileStream = getFileStream;
If you've gotten this far you may have realised that I've tried a couple of things from different sources and documentation but I'm not getting any further. I would really appreciate some pointers and advice on what I'm doing wrong and what I could improve on.
If any further information is needed then just let me know.
Thanks in advance for your time!
Maybe it be useful for you, that's how i get image from S3, and process image on server
Create temporary directory
createTmpDir(): Promise<string> {
return mkdtemp(path.join(os.tmpdir(), 'tmp-'));
}
Gets the file
readStream(path: string) {
return this.s3
.getObject({
Bucket: this.awsConfig.bucketName,
Key: path,
})
.createReadStream();
}
How i process file
async MainMethod(fileName){
const dir = await this.createTmpDir();
const serverPath = path.join(
dir,
fileName
);
await pipeline(
this.readStream(attachent.key),
fs.createWriteStream(serverPath + '.jpg')
);
const createFile= await sharp(serverPath + '.jpg')
.jpeg()
.resize({
width: 640,
fit: sharp.fit.inside,
})
.toFile(serverPath + '.jpeg');
const imageBuffer = fs.readFileSync(serverPath + '.jpeg');
//my manipulations
fs.rmSync(dir, { recursive: true, force: true }); //delete temporary folder
}

Use original file name in AWS s3 uploader

I have implemented a s3 uploader per these instructions https://aws.amazon.com/blogs/compute/uploading-to-amazon-s3-directly-from-a-web-or-mobile-application/
This is the Lambda function code
AWS.config.update({ region: process.env.AWS_REGION })
const s3 = new AWS.S3()
const URL_EXPIRATION_SECONDS = 300
// Main Lambda entry point
exports.handler = async (event) => {
return await getUploadURL(event)
}
const getUploadURL = async function(event) {
const randomID = parseInt(Math.random() * 10000000)
const Key = `${randomID}.jpg`
// Get signed URL from S3
const s3Params = {
Bucket: process.env.UploadBucket,
Key,
Expires: URL_EXPIRATION_SECONDS,
Currently the filename (key) is generated using a random ID.
I would like to change that to use the original filename of the uploaded file.
I tried a couple approaches such as using the the fs.readfile() to get the filename but have not had any luck.
There is a webpage with a form that works in conjunction with the Lambda to upload the file to s3.
How do I get the filename?
If you want to save the file with the original filename, you have to pass that filename as part of the key you use to request the signed url. You don't show how you're getting the file to upload, but if it is part of a web site, you get this from the client.
On the client side you have the user identify the file to upload and pass that to your code that calls getUploadURL(). Maybe in your code it is part of event? Then you send the signed URL back to the client and then the client can send the file to the signed URL.
Therefore to upload a file, your client has to send two requests to your server -- one to get the URL and one to upload the file.
You do mention that you're using fs.readFile() If you're able to get the file with this call, then you already have the file name. All you have to do is pass the same name to getUploadURL() as an additional parameter or as part of event. You may have to parse the filename first or within getUploadURL() if it includes a path to someplace other than your current working directory.
The code above looks like it may be a Lambda that's getting called with some event. If that event is a trigger of some sort that you can include a file name, then you can look pull it from that variable. For example:
const getUploadURL = async function(event) {
const randomID = parseInt(Math.random() * 10000000)
const Key = `${event.fileNameFromTrigger}`
// Get signed URL from S3
const s3Params = {
Bucket: process.env.UploadBucket,
Key,
Expires: URL_EXPIRATION_SECONDS.
...
}
If the file name includes the extension, then you don't need to append that as you were with the random name.
I modified the Lambda
changed this
const randomID = parseInt(Math.random() * 10000000)
const Key = `${randomID}.jpg`
to this
const Key = event.queryStringParameters.filename
And this the frontend code with my endpoint redacted. Note the query ?filename= appended to the endpoint and how I used this.filename = file.name
<script>
const MAX_IMAGE_SIZE = 1000000
/* ENTER YOUR ENDPOINT HERE */
const API_ENDPOINT = '{api-endpoint}/uploads?filename=' // e.g. https://ab1234ab123.execute-api.us-east-1.amazonaws.com/uploads
new Vue({
el: "#app",
data: {
image: '',
uploadURL: '',
filename: ''
},
methods: {
onFileChange (e) {
let files = e.target.files || e.dataTransfer.files
//let filename = files[0].name
if (!files.length) return
this.createImage(files[0])
},
createImage (file) {
// var image = new Image()
let reader = new FileReader()
reader.onload = (e) => {
//console.log(file.name)
console.log('length: ', e.target.result.includes('data:image/jpeg'))
if (!e.target.result.includes('data:image/jpeg')) {
return alert('Wrong file type - JPG only.')
}
if (e.target.result.length > MAX_IMAGE_SIZE) {
return alert('Image is loo large.')
}
this.image = e.target.result
this.filename = file.name
}
reader.readAsDataURL(file)
},
removeImage: function (e) {
console.log('Remove clicked')
this.image = ''
this.filename = ''
},
uploadImage: async function (e) {
console.log('Upload clicked')
// Get the presigned URL
const response = await axios({
method: 'GET',
url: API_ENDPOINT + this.filename
})
console.log('Response: ', response)
console.log('Uploading: ', this.image)
let binary = atob(this.image.split(',')[1])
let array = []
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i))
}
let blobData = new Blob([new Uint8Array(array)], {type: 'image/jpeg'})
console.log('Uploading to: ', response.uploadURL)
const result = await fetch(response.uploadURL, {
method: 'PUT',
body: blobData
})
console.log('Result: ', result)
// Final URL for the user doesn't need the query string params
this.uploadURL = response.uploadURL.split('?')[0]
}
}
})
</script>

Firebase Cloud Functions - create pdf, store to bucket and send via mail

I'm developing a Firebase Function, which is triggered when a new order is added to the Realtime Database. The first thing it does is to creat a pdf and pipe it to a google cloud storage bucket.
On the .on("finish") event of the bucket stream, the next function gets started, which should send the piped pdf via email to the customer.
Everything seems to work, at least a bit.
First I had the problem, that the attached pdf always was empty. (Not just blank. I also opened it in notepad++ and it really was all empty). When I checked the doc and bucketFileSream vars inside the bucketFileStream.on("finished") function both had a length of 0. A check of the doc var directly after doc.end showed a length of somewhat 612.
I then changed the flow, that in the sendOrderEmail function I also open a new Read Stream from the newly created File in the bucket.
Now I get at least some stuff of the PDF in the attachement, but never the whole content.
When I check the PDF uploaded to the bucket, it looks like it should.
I googled alot and found some answers that were also targeting this topic, but as also seen in comments on these questions, they were not completly helpful.
PDF Attachment NodeMailer
Where to generate a PDF of Firebase Database data - mobile app, or Firebase Hosting web app
How to attach file to an email with nodemailer
I also checked with the nodemailer documentation how to pass the attachement correctly and implemented it as documented. But no success.
I think that the mail gets sent before the Read Stream has finished.
Here the Package Versions I use:
"#google-cloud/storage": "1.5.2"
"#types/pdfkit": "^0.7.35",
"firebase-admin": "5.8.0",
"firebase-functions": "^0.7.3"
"nodemailer": "4.4.1",
Can anyone tell me what I'm doing wrong or provide a working example, which uses current package versions, for this usecase?
Here is the code which drives me crazy...
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const nodemailer = require("nodemailer");
const pdfkit = require("pdfkit");
const storage = require("#google-cloud/storage")({projectId: `${PROJECT_ID}`})
const mailTransport = nodemailer.createTransport({
host: "smtp.office365.com",
port: 587,
secureConnection: false,
auth: {
user: "userName",
pass: "userPassword"
},
tls: {
ciphers: "SSLv3",
}
});
exports.added = function(event) {
const order = event.data.val();
const userId = event.params.userId;
// Load User Data by userId
return admin
.database()
.ref("/users/" +userId)
.once("value")
.then(function (snapshot) {
return generateOrderPDF(snapshot.val(), userId);
});
};
function generateOrderPDF(user, userId) {
const doc = new pdfkit();
const bucket = storage.bucket(functions.config().bucket);
const filename = `/${userId}/test-` + Date.now() + ".pdf";
const file = bucket.file(filename);
const bucketFileStream = file.createWriteStream();
// Pipe its output to the bucket
doc.pipe(bucketFileStream);
// Do creation Stuff....
doc.end();
bucketFileStream.on("finish", function () {
return sendOrderEmail(user, filename);
});
bucketFileStream.on("error", function(err) {
console.error(err);
});
}
function sendOrderEmail(user, filename) {
const email = user.email;
const firstname = user.firstName;
const mailOptions = {
from: "test#test.test",
to: email,
subject: "Order"
};
const bucket = storage.bucket(functions.config().bucket);
const file = bucket.file(filename);
mailOptions.html = mailTemplate;
mailOptions.attachments = [{
filename: "test.pdf",
content: file.createReadStream()
}];
return mailTransport.sendMail(mailOptions).then(() => {
console.log("New order email sent to:", email);
}).catch(error => {
console.error(error);
});
}
The problem in my appraoch was inside the pdfkit library and not inside nodemailer or firebase. The lines below seem to trigger the end event. So the pdf got sent after these lines. After out commenting them everything worked as it should. It was not that finish was never reached like Hari mentioned.
/* doc.lineCap("underline")
.moveTo(72, 321)
.lineTo(570, 321)
.stroke();*/
After finishing the MVP I will take a root cause analysis and post the final answer as comment below this answer.
This is a working sample of Source-Code for this UseCase. It also ensures, that the firebase function won't finish before all work is done. That is handled by wrapping the event driven doc.on() function into a promise, that is resolved when doc.on("end") is called.
exports.added = function(event) {
const order = event.data.val();
const userId = event.params.userId;
// Load User Data by userId
return admin.database().ref("/users/" + userId).once("value").then(function (snapshot) {
return generatePDF(snapshot.val(), userId);
});
};
function generatePDF(user, userId) {
const doc = new pdfkit();
const bucket = admin.storage().bucket(functions.config().moost.orderbucket);
const filename = "/${userId}/attachement.pdf";
const file = bucket.file(filename);
const bucketFileStream = file.createWriteStream();
var buffers = [];
let p = new Promise((resolve, reject) => {
doc.on("end", function() {
resolve(buffers);
});
doc.on("error", function () {
reject();
});
});
doc.pipe(bucketFileStream);
doc.on('data', buffers.push.bind(buffers));
//Add Document Text and stuff
doc.end();
return p.then(function(buffers) {
return sendMail(buffers);
});
}
function sendMail(buffers) {
const pdfData = Buffer.concat(buffers);
const mailOptions = {
from: "FromName <from#example.com>",
to: "to#example.com",
subject: "Subject",
html: mailTemplate,
attachments: [{
filename: 'attachment.pdf',
content: pdfData
}]
};
return mailTransport.sendMail(mailOptions).then(() => {
console.log("New email sent to:", "to#example.com");
}).catch(error => {
console.error(error);
});
}
The main problem in your code is that the stream.on('finish') never completes. I've also encountered the same issue.
Instead of streaming, convert the pdf into buffer and send the same as attachment.
The following works fine for me,
const doc = new pdfkit()
const filename = '/${userId}/test-' + Date.now() + ".pdf"
const file = bucket.file(filename);
const bucketFileStream = file.createWriteStream();
doc.pipe(bucketFileStream);
doc.end();
var buffers = []
doc.on('data', buffers.push.bind(buffers));
doc.on('end',function(){
let pdfData = Buffer.concat(buffers);
'<<nodemailer stuffs goes here>
'attach the doc as content
});

Posting An Image from Webcam to Azure Face Api

I am trying to upload an image that I get from my webcam to the Microsoft Azure Face Api. I get the image from canvas.toDataUrl(‘image/png’) which contains the Data Uri. I change the Content Type to application/octet-stream and when I attach the Data Uri to the post request, I get a Bad Request (400) Invalid Face Image. If I change the attached data to a Blob, I stop receiving errors however I only get back an empty array instead of a JSON object. I would really appreciate any help for pointing me in the right direction.
Thanks!
Oh you're in such luck, i've just (successfully!) attempted this 2 days ago.
Sending base64-encoded JPEGs to Face API is seriously inefficient, The ratio of encoded output bytes to input bytes is 4:3 (33% overhead). Just send a byte array, it works, the docs mention it briefly.
And try to read as JPEG not PNG, that's just wasting bandwidth for webcam footage.
...
var dataUri = canvas.toDataURL('image/' + format);
var data = dataUri.split(',')[1];
var mimeType = dataUri.split(';')[0].slice(5)
var bytes = window.atob(data);
var buf = new ArrayBuffer(bytes.length);
var byteArr = new Uint8Array(buf);
for (var i = 0; i < bytes.length; i++) {
byteArr[i] = bytes.charCodeAt(i);
}
return byteArr;
Now use byteArr as your payload (data:) in $.ajax() for jQuery or iDontUnderStandHowWeGotHereAsAPeople() in any other hipster JS framework people use these days.
The reverse-hipster way of doing it is:
var payload = byteArr;
var xhr = new XMLHttpRequest();
xhr.open('POST', 'https://SERVICE_URL');
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
xhr.send(payload);
To extend Dalvor's answer: this is the AJAX call that works for me:
fetch(data)
.then(res => res.blob())
.then(blobData => {
$.post({
url: "https://westus.api.cognitive.microsoft.com/face/v1.0/detect",
contentType: "application/octet-stream",
headers: {
'Ocp-Apim-Subscription-Key': '<YOUR-KEY-HERE>'
},
processData: false,
data: blobData
})
.done(function(data) {
$("#results").text(JSON.stringify(data));
})
.fail(function(err) {
$("#results").text(JSON.stringify(err));
})
Full demo code here: https://jsfiddle.net/miparnisari/b1zzpvye/
For saving someone's 6 hours, I appended my right code.
I hope this code helps you.
Tools
React
Typescript
React-webcam
Mac OS
Axios
Code
index.tsx
Constants and ref
/**
* Constants
*/
const videoConstraints = {
width: 1280,
height: 720,
facingMode: 'user',
};
/**
* Refs
*/
const webcamRef = React.useRef<Webcam>(null);
Call back function
const capture = React.useCallback(() => {
const base64Str = webcamRef.current!.getScreenshot() || '';
const s = base64Str.split(',');
const blob = b64toBlob(s[1]);
callCognitiveApi(blob);
}, [webcamRef]);
In render
<Webcam audio={false} ref={webcamRef} screenshotFormat="image/jpeg" videoConstraints={videoConstraints} />
<button onClick={capture}>Capture photo</button>
base64toBlob
Thanks to creating-a-blob-from-a-base64-string-in-javascript
export const b64toBlob = (b64DataStr: string, contentType = '', sliceSize = 512) => {
const byteCharacters = atob(b64DataStr);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, { type: contentType });
return blob;
};
callCognitiveApi
import axios from 'axios';
const subscriptionKey: string = 'This_is_your_subscription_key';
const url: string = 'https://this-is-your-site.cognitiveservices.azure.com/face/v1.0/detect';
export const callCognitiveApi = (data: any) => {
const config = {
headers: { 'content-type': 'application/octet-stream', 'Ocp-Apim-Subscription-Key': subscriptionKey },
};
const response = axios
.post(url, data, config)
.then((res) => {
console.log(res);
})
.catch((error) => {
console.error(error);
});
};
Result
So I got the answer finally by sending the image as a blob object. You first grab the image from canvas with:
let data = canvas.toDataURL('image/jpeg');
Afterwards, you can reformat it to a blob data object by running:
fetch(data)
.then(res => res.blob())
.then(blobData => {
// attach blobData as the data for the post request
}
You will also need to switch the Content-Type of the post request to "application/octet-stream"

Resources