I am writing an iOS app in Swift 2.2 using Xcode 7.3.1. Then, for backend services I am using AWS Mobilehub (S3 and Lambda)
What the app should do: Take a screenshot, send it to an AWS S3 bucket, send the screenshot through SendGrid by using an AWS Lambda function trigger.
My problem: I can't seem to attach the damn image in the S3 bucket to the email. Locally it works fine, but when uploaded to Lambda is throws the following error:
Error: ENOENT: no such file or directory, open '...'
Using the following code:
'use strict';
var fs = require('fs');
console.log('Loading function');
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = (event, context, callback) => {
// Get the object from the event and show its content type
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var helper = require('sendgrid').mail;
let from_email = new helper.Email("from#gmail.com");
let to_email = new helper.Email("to#gmail.com");
let subject = "Hello World from the SendGrid Node.js Library";
let content = new helper.Content("text/plain", "Email content");
let mail = new helper.Mail(from_email, subject, to_email, content);
var bse64 = base64_encode(INeedThisFrigginPath);
let attachment = new helper.Attachment();
attachment.setContent(bse64);
attachment.setType("image/png");
attachment.setFilename(key);
attachment.setDisposition("attachment");
mail.addAttachment(attachment);
var sg = require('sendgrid')('SG.sengridkey');
var requestBody = mail.toJSON();
var emptyRequest = require('sendgrid-rest').request;
var requestPost = JSON.parse(JSON.stringify(emptyRequest));
requestPost.method = 'POST';
requestPost.path = '/v3/mail/send';
requestPost.body = requestBody;
sg.API(requestPost,
function (error, response)
{
console.log(response.statusCode);
console.log(response.body);
console.log(response.headers);
}
);
};
// function to encode file data to base64 encoded string
function base64_encode(file)
{
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
Specifically this line:
var bse64 = base64_encode(INeedThisFrigginPath);
It's quite obvious what the problem is, so what I need to know is, what is the correct path to my image.
I have tried using the key value, and the image link:
https://s3.eu-central-1.amazonaws.com/bucketname/public/test0.png
No luck.
It would be great if anyone can help me by supplying code, a tutorial or just general pointers to boost me into the right direction. Maybe using AWS S3, AWS Lambda and SendGrid isn't necessarily the best technologies to use here?
Thanks a bunch!
You are trying to build a path to that file and then you are trying to open it as a local file. The file isn't local, it's on S3, so you can't do that. You have two options:
Download the file from S3 to Lambda first (into the /tmp directory) and then refer to the local path when you open the file.
Open a ReadStream to the file using the AWS S3 SDK.
Related
I'm following up on this article to download objects from GCP Cloud storage bucket: https://cloud.google.com/storage/docs/downloading-objects#storage-download-object-nodejs
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadIntoMemory() {
// Downloads the file into a buffer in memory.
const contents = await storage.bucket(bucketName).file(fileName).download();
return contents;
);
}
downloadIntoMemory().catch(console.error);
I'm currently getting a buffer data in contents. I've this code hooked upto a API on NodeJS backend. I'm using React Typescript on frontend. Calling the API, gives me data buffer. How can I use it to download the file instead of the data buffer?
I tried the above method explicitly providing file destination, but I'm still getting the following error: EISDIR: illegal operation on a directory, open '{file_path_which_i_was_set}. Err: -21
As rightly pointed out by #John Hanley, you are referring to the documentation, where the code sample downloads an object into memory/ buffer in memory. If you want to download an object from a bucket to a file, refer to this code sample, where the ‘options’ parameter has to be passed to the download() method.
The code goes like this :
// The ID of your GCS bucket
const bucketName = 'your-unique-bucket-name';
// The ID of your GCS file
const fileName = 'your-file-name';
// The path to which the file should be downloaded
const destFileName = '/local/path/to/file.txt';
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
async function downloadFile() {
const options = {
destination: destFileName,
};
// Downloads the file to the destination file path
await storage.bucket(bucketName).file(fileName).download(options);
console.log(
`gs://${bucketName}/${fileName} downloaded to ${destFileName}.`
);
}
downloadFile().catch(console.error);
i have to make an api call passing a compressed file as input. i have a working example on premise but I would like to move the solution in cloud. i was thinking to use azure blob storage and the azure function trigger. i have the below code that works for file but I don't know how to do the same with azure blob storage and azure function in nodejs
const zlib = require('zlib');
const fs = require('fs');
const def = zlib.createDeflate();
input = fs.createReadStream('claudio.json')
output = fs.createWriteStream('claudio-def.json')
input.pipe(def).pipe(output)
this code read a file as stream , compress the file and write another file as stream.
what I would like to do is reading the file any time I upload it in a container of azure blob storage, then I want to compress it and save in a different container with different name, then make an API call passing as input the compressed file saved in the other container
I tried this code for compressing the incoming file
const fs = require("fs");
const zlib = require('zlib');
const {Readable, Writable} = require('stream');
module.exports = async function (context, myBlob) {
context.log("JavaScript blob trigger function processed blob \n Blob:", context.bindingData.blobTrigger, "\n Blob Size:", myBlob.length, "Bytes");
// const fin = fs.createReadStream(context.bindingData.blobTrigger);
const def = zlib.createDeflate();
const s = Readable.from(myBlob.toString())
context.log(myBlob);
context.bindings.outputBlob = s.pipe(def)
};
the problem with this approach is that in the last line of the code
context.bindings.outputBlob = s.pipe(def)
i don't have the compressed file, while if i use this
s.pipe(def).pipe(process.stdout)
i can read the compressed file
as you can see above i also tried to use the fs.createReadStream(context.bindingData.blobTrigger) that contains the name of the uploaded file with the container name, but it doesn't work
any idea?
thank you
this is the solution
var input = context.bindings.myBlob;
var inputBuffer = Buffer.from(input);
var deflatedOutput = zlib.deflateSync(inputBuffer);
context.bindings.myOutputBlob = deflatedOutput;
https://learn.microsoft.com/en-us/answers/questions/500368/compress-and-write-a-file-in-another-container-wit.html
I am trying to consume Azure Forms Recognizer API, where I have to provide the body in the form of "[Binary PNG data]" as stated here.
The connection seems the be working fine, however I am getting this response:
{"error":{"code":"InvalidImage","innerError":{"requestId":"73c86dc3-51a3-48d8-853b-b6411f54c51e"},"message":"The input data is not a valid image or password protected."}}
I am using a png that is my local directory and I've tried converting it in many different ways including:
fs.readFile('test.png', function(err, data){
if (err) throw err;
// Encode to base64
let encodedImage = new Buffer(data, 'binary').toString('base64');
// Decode from base64
var decodedImage = new Buffer(encodedImage, 'base64').toString('binary');});
or
let data_string = fs.createReadStream('test.png');
and many others. None of them seem to work and I always get the same response from my post request.
I would appreciate if anyone could share how to convert this png into the correct format. Thank you in advance
To base 64:
const file = fs.readFileSync('/some/place/image.png')
const base64String = Buffer.from(file).toString('base64')
Then pass the base64String to Azure
If you want just a BLOB so a binary file, you can do this
const file = fs.readFileSync('/some/place/image.png')
const blob = Buffer.from(file)
const processFile = (file: any) => {
const reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = function(){
const binaryData = Buffer.from(reader.result as string,'binary');
console.log(binaryData);
};
}
I have a node.js function for AWS Lambda. It reads a JSON file from an S3 bucket as a stream, parses it and prints the parsed objects to the console. I am using stream-json module for parsing.
It works on my local environment and prints the objects to console. But it does not print the objects to the log streams(CloudWatch) on Lambda. It simply times out after the max duration. It prints other log statements around, but not the object values.
1. Using node.js 6.10 in both environments.
2. callback to the Lambda function is invoked only after the stream ends.
3. Lambda has full access to S3
4. Also tried Promise to wait until streams complete. But no change.
What am I missing? Thank you in advance.
const AWS = require('aws-sdk');
const {parser} = require('stream-json');
const {streamArray} = require('stream-json/streamers/StreamArray');
const {chain} = require('stream-chain');
const S3 = new AWS.S3({ apiVersion: '2006-03-01' });
/** ******************** Lambda Handler *************************** */
exports.handler = (event, context, callback) => {
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = event.Records[0].s3.object.key;
const params = {
Bucket: bucket,
Key: key
};
console.log("Source: " + bucket +"//" + key);
let s3ReaderStream = S3.getObject(params).createReadStream();
console.log("Setting up pipes");
const pipeline = chain([
s3ReaderStream,
parser(),
streamArray(),
data => {
console.log(data.value);
}
]);
pipeline.on('data', (data) => console.log(data));
pipeline.on('end', () => callback(null, "Stream ended"));
};
I have figured out that it is because my Lambda function is running inside a private VPC.
(I have to run it inside a private VPC because it needs to access my ElastiCache instance. I removed related code when I posted the code, for simplification).
Code can access S3 from my local machine, but not from the private VPC.
There is a process to ensure that S3 is accessible from within your VPC. It is posted here https://aws.amazon.com/premiumsupport/knowledge-center/connect-s3-vpc-endpoint/
Here is another link that explains how you should setup a VPC end point to be able to access AWS resources from within a VPC https://aws.amazon.com/blogs/aws/new-vpc-endpoint-for-amazon-s3/
I have some certificate files over s3 (public) and I am to download and use these files in my code, If I write an equivalent code in nodejs at my local, it just runs fine, but in AWS lambda it just crashes.
var apn = require('apn');
var https = require('https');
var fs = require('fs');
exports.handler = function(event, context) {
console.log("Running aws apn push message function");
console.log("==================================");
console.log("event", event);
var certPath = event.certPath;
var keyPath = event.keyPath;
var certFileName = event.certFileName;
var keyFileName = event.keyFileName;
var passphrase = event.passphrase;
var apnId = event.apnId;
var content = event.content;
var certfile = fs.createWriteStream(certFileName);
var certrequest = https.get(certPath, function(certresponse) {
certresponse.pipe(certfile);
console.log("downloaded the certificate");
var keyfile = fs.createWriteStream(keyFileName);
var keyrequest = https.get(keyPath, function(keyresponse) {
keyresponse.pipe(keyfile);
console.log("downloaded the key file");
var options = {
"cert":certFileName,
"key":keyFileName,
"passphrase":passphrase,
"batchFeedback": true,
"interval": 10
};
var apnConnection = new apn.Connection(options);
var myDevice = new apn.Device(apnId);
var note = new apn.Notification();
note.expiry = Math.floor(Date.now() / 1000) + 3600; // Expires 1 hour from now.
note.payload = {'COMMAND': content};
apnConnection.pushNotification(note, myDevice);
console.log('message sent to ' + apnId);
context.done();
});
});
}
Error I get is related to accessing files i suppose -
events.js:72
throw er; // Unhandled 'error' event
^
Error: EACCES, open 'PushChatCert.pem'
So while on AWS Lambda is there some specific concerns when one is downloading a file and using it, related to its path or something, where do the files stay when they get downloaded, in fact I don't even see the log of file getting downloaded.
The only available local file system you can write to in Lambda is /tmp so make sure the path for the local file you are trying to write to is in the /tmp directory and you should be all set.
Just note as of last year (2020) Lambdas support EFS as a mount as well so you can write to EFS mount point. Excessive for your case..but it might help for someone doing large file processing https://aws.amazon.com/blogs/compute/using-amazon-efs-for-aws-lambda-in-your-serverless-applications/