EROFS: read-only file system while creating node js function AWS Lambda - node.js

I have created one function on the AWS lambda using Node js version 6.10.
I need to create PDF file from html string and send that file in the email.
Following is my code:
exports.handler = function index(event, context, callback) {
var toAddress = event.to;
var fromAddress = event.from;
var subject = event.subject;
var MailBody = event.mailBody;
var PDFHTML = event.PDFHTML;
var LabelHTML = event.LabelHtml;
var options = { format: 'Legal',"header": {
"height": "25mm",
}
}
pdf.convertHTMLString(LabelHTML, '/tmp/LabelDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(err, false);
}
else
{
pdf.convertHTMLString(PDFHTML, '/tmp/DiagramDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(null, false);
}
else
{
merge(['/tmp/LabelDetails.pdf','/tmp/DiagramDetails.pdf'],'/tmp/Final.pdf',function(err){
if(err)
{
console.log(err);
callback(null, false);
}
else
{
/*Send mail code */
callback(null, true);
}
});
}
});
}
});
};
var fs = require("fs");
var pdf = require('html-to-pdf');
var merge = require('easy-pdf-merge');
var nodemailer = require('nodemailer');
var path = require("path");
When I try to convert the html string to the PDF file it throws the error EROFS: read only file system.
My simple node js code works perfectly fine.
After doing more research on this issue I found out that AWS gives write credentials only to the /tmp folder. So I applied file path like /tmp/FileName.pdf but still issue seems to be there.

Related

How to upload file from S3 bucket to server (SFTP) in NodeJS?

I am trying to use ssh2-sftp-client in NodeJS to send a file from an S3 bucket in AWS to my server.
I have seen many examples and I have tried them all with no success. I feel that this one is close but still is not working for me:
async function sendFileViaSftp(srcEvent){
let Client = require('ssh2-sftp-client');
let Path = '/';
let sftp = new Client();
await sftp.connect({
host: '73.XXX.XX.XXX',
port: 22,
username: 'username',
password: 'mypassword'
}).then(() => {
console.log("Connected...");
return sftp.list(Path);
}).then((list) => {
console.log("It worked");
console.log("file to transfer: " + srcEvent.file);
var fs = require('fs');
var path = require('path');
var params = {
Bucket: srcEvent.bucket,
Key: srcEvent.key
};
var tempFileName = path.join('/tmp/', srcEvent.file);
var tempFile = fs.createWriteStream(tempFileName);
s3.getObject(params).createReadStream().pipe(tempFile);
console.log("file is in tmp");
let data = fs.createReadStream(tempFileName);
let remote = '/';
sftp.put(data, remote);
console.log("the code makes it to here and prints this");
return list;
}).catch((err) => {
console.log('Catch Error: ', err);
throw new Error(err);
});
}
I am calling this function like this:
if (folder === 'something') {
await sendFileViaSftp(srcEvent);
return {
statusCode: 200,
body: srcEvent
};
}
I do not get any error message, it looks like it just times out which I don't understand because I am using async/await. In my example, I am attempting to pull the file from the S3 bucket, and storing it in /tmp/ and then sending /tmp/test.xls. srcEvent.file is test.xls in this case.
Even if I can simply send a blank txt file to my server from this function, that would be helpful.
Thanks!
I figured it out. Not sure where exactly the problem was but this will successfully pull a file from S3 and then upload to SFTP server:
async function sendFileViaSftp(srcEvent) {
let Client = require('ssh2-sftp-client');
var path = require('path');
var localFolder = "/tmp";
var remoteFolder = "/complete";
var localfile = path.join(localFolder, srcEvent.file);
var remotePath = path.join(remoteFolder, srcEvent.file);
getFileFromS3(srcEvent, localFolder);
let sftp = new Client();
await sftp.connect(sftpCredentials).then(() => {
try {
return sftp.fastPut(localfile, remotePath);
} catch (err) {
console.log("Could not upload file: " + err);
}
})
.then(() => {
console.log("ending connections");
sftp.end();
})
.catch(err => {
console.error(err.message);
});
}
async function getFileFromS3(srcEvent, localFolder) {
var params = {
Bucket: srcEvent.bucket,
Key: srcEvent.key
};
var tempFileName = path.join(localFolder, srcEvent.file);
var tempFile = fs.createWriteStream(tempFileName);
s3.getObject(params).createReadStream().pipe(tempFile);
console.log("Put file in temp");
}

Unable to read content of .txt using fs

I am using fs mudule to read .txt file content , but the result always empty . My .txt file do has content in it could any one give me a hand pls ? This is my test code :
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
});
console.log(content);
The content is empty in console .
You are writing the result too early. You should log the result in the readFile callback.
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
console.log(content);
});
// The console log below will be executed right after the readFile call.
// It won't wait the file to be actually read.
// console.log(content);
Or you can write the same logic like this:
const fs = require('fs');
async function main() {
try {
const content = await fs.promises.readFile('2.txt', 'utf8');
console.log(content);
} catch (ex) {
console.trace(ex);
}
}
main();

Recursively upload files to S3, how to detect if finished?

with a little help I've built an S3 uploader using Node.JS
It all works great and the files get there, they're set correctly and have the right permissions, but i'm stumped on how to detect whether the process has finished.
const async = require('async');
const AWS = require('aws-sdk');
const mime = require('mime');
const fs = require('fs');
const path = require("path");
require('dotenv').config();
const uploadDirToS3 = function(uploadPath) {
// instantiate aws object for s3
var s3 = new AWS.S3();
// async version
function walk(currentDirPath, callback) {
fs.readdir(currentDirPath, function (err, files) {
if (err) {
throw new Error(err);
}
files.forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walk(filePath, callback);
}
});
});
}
walk(uploadPath, function(filePath) {
fs.readFile(filePath, function (err, data) {
if (err) { throw err; }
// get content-type (html,jpeg,gif,etc...)
var metaData = mime.getType(filePath)
// set bucket, key (filename), body (file),
// public read-only and content-type
var params = {
Bucket: process.env.AWS_BUCKET,
Key: filePath,
Body: data,
ACL: 'public-read',
ContentType: metaData
};
// upload file to s3
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded "+filePath);
}
});
});
})
}
uploadDirToS3("./media/media-1517245218111")
Could it literally be a case of checking wether a callback exists and 'break;' ...ing out of the loop?
Any ideas?
You need to use IterateOver Pattern.
When you find a file to copy, increment a variable and when S3 copy is done, track with another variable that it is copied.
When the totalfind == totalcopied, then initiate the callback from the calling function.
function WaterfallOver(list, iterator, callback) {
var nextItemIndex = 0; //keep track of the index of the next item to be processed
function report() {
nextItemIndex++;
// if nextItemIndex equals the number of items in list, then we're done
if(nextItemIndex === list.length)
callback();
else
// otherwise, call the iterator on the next item
iterator(list[nextItemIndex], report);
}
// instead of starting all the iterations, we only start the 1st one
iterator(list[0], report);
}
Hope it helps.

Upload Images, Videos to Node.js WebService and store in Azure Blob Storage

I have created a Node.js Webservice which takes Json object in the post body and in the same object I need to pass the image/video (not sure whether its possible) media files and the same media file needs to up uploaded to Azure Blob Storage.
Azure storage gives library where we upload the stream. But how do I upload the files to node.js server from Apps before uploading to Azure blob storage.
The concept has to work on Windows, Android and IOS platform.
If your server is hosted on Web apps and assuming it’s built by expressjs, #Alex Lau provided a good point.
Also, here are another 2 libs for express handling upload files. I’d like to give you some code snippets to handle upload files and put to blob storage in expressjs with these libs:
1,connect-busboy
var busboy = require('connect-busboy');
var azure = require('azure-storage');
var fs = require('fs');
var path = require('path');
var blobsrv = azure.createBlobService(
accountname,
accountkey
)
router.post('/file', function (req, res, next) {
var fstream;
var uploadfolder = path.join(__dirname, '../files/');
if (mkdirsSync(uploadfolder)) {
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename) {
console.log("Uploading: " + filename);
fstream = fs.createWriteStream(uploadfolder + filename);
file.pipe(fstream);
fstream.on('close', function () {
//res.redirect('back');
blobsrv.createBlockBlobFromLocalFile('mycontainer',filename,uploadfolder + filename, function (error, result, response) {
if (!error) {
res.send(200, 'upload succeeded');
} else {
res.send(500, 'error');
}
})
});
});
}
})
function mkdirsSync(dirpath, mode) {
if (!fs.existsSync(dirpath)) {
var pathtmp;
dirpath.split("\\").forEach(function (dirname) {
console.log(dirname);
if (pathtmp) {
pathtmp = path.join(pathtmp, dirname);
}
else {
pathtmp = dirname;
}
if (!fs.existsSync(pathtmp)) {
if (!fs.mkdirSync(pathtmp, mode)) {
return false;
}
}
});
}
return true;
}
2,formidable
var formidable = require('formidable')
router.post('/fileform', function (req, res, next) {
var form = new formidable.IncomingForm();
form.onPart = function (part){
part.on('data', function (data){
console.log(data);
var bufferStream = new stream.PassThrough();
bufferStream.end(data);
blobsrv.createBlockBlobFromStream('mycontainer', part.filename, bufferStream, data.length, function (error, result, response){
if (!error) {
res.send(200,'upload succeeded')
} else {
res.send(500,JSON.stringify(error))
}
})
})
}
form.parse(req);
//res.send('OK');
})
If you are using a Mobile Apps with Node.js as a backend to handle these workflows, we can create a custom API, and transfer media content in base64 code.
In mobile app:
var azure = require('azure');
var fs = require('fs');
var path = require('path');
exports.register = function (api) {
api.post('upload',upload);
}
function upload(req,res){
var blobSvc = azure.createBlobService(
req.service.config.appSettings.STORAGE_ACCOUNTNAME,
req.service.config.appSettings.STORAGE_ACCOUNTKEY
);
var decodedImage = new Buffer(req.body.imgdata, 'base64');
var tmpfilename = (new Date()).getTime()+'.jpg';
var tmpupload = 'upload/';
mkdirsSync(tmpupload);
var filePath = tmpupload+tmpfilename;
fs.writeFileSync(filePath,decodedImage); blobSvc.createBlockBlobFromFile(req.body.container,tmpfilename,filePath,req.body.option,function(error,result,response){
if(!error){
res.send(200,{result:true});
}else{
res.send(500,{result:error});
}
})
}
In mobile application, I used iconic framework integrated ng-cordova plugin to handle camera events.
Here are controller and server script snippet. For your information:
Controller js:
$scope.getpic = function(){
var options = {
quality: 10,
destinationType: Camera.DestinationType.DATA_URL,
sourceType: Camera.PictureSourceType.CAMERA,
allowEdit: false,
encodingType: Camera.EncodingType.JPEG,
targetWidth: 100,
targetHeight: 100,
popoverOptions: CameraPopoverOptions,
saveToPhotoAlbum: false
};
$cordovaCamera.getPicture(options).then(function(imageData) {
console.log(imageData);
return blobService.uploadBlob(objectId,imageData);
}, function(err) {
// error
}).then(function(res){
console.log(JSON.stringify(res));
});
};
Server js(blobService):
factory('blobService',function($q){
return{
uploadBlob:function(container,imgdata,option){
var q = $q.defer();
mobileServiceClient.invokeApi('blobstorage/upload',{
method:"post",
body:{
container:container,
imgdata:imgdata,
option:{contentType:'image/jpeg'}
}
}).done(function(res){
console.log(JSON.stringify(res.result));
if(res.result.blob !== undefined){
q.resolve(res.result.blob);
}
if(res.result.url !== undefined){
q.resolve(res.result.url);
}
});
return q.promise;
}
}
})
Perhaps you may consider using multipart/form-data instead of JSON as there is a good library (expressjs/multer, assuming you are using express) to handle file uploading in node.js.
As long as you get the file from multer, the rest can be very simple as below:
app.post('/profile', upload.single('avatar'), function (req, res, next) {
blobService.createBlockBlobFromLocalFile('avatars', req.file.originalname, req.file.path, function(error, result, response) {
});
});
For iOS and Android, there are also plenty of libraries that allow multipart/form-data request like AFNetworking in iOS and OkHttp in Android.

File upload nodejs to server

I want to upload files of my form to my server.
I have already test this but i haven't a success.
What is the best npm module for that ?
Can i test it in localhost ?
Thanks
For Express Use,
https://www.npmjs.com/package/multer
For Hapi.js
https://gist.github.com/joyrexus/0c6bd5135d7edeba7b87
Hope This Helps!
Using Hapijs
I have done Image upload in one of my projects
I had Used Nginx to define my root location for this file upload.
var mkdirp = require('mkdirp');
var path = require('path');
var mv = require('mv');
exports.imageUpload = function (req, reply) {
var payload = req.payload;
commonImageUpload(payload.uploadFile,urid,function(err,res){
});
}
var commonImageUpload = function (file, idUser, callback) {
if (null != file) {
var extention = path.extname(file.filename);
var extentionsList = [];
extentionsList.push('.jpg');
extentionsList.push('.png');
extentionsList.push('.jpeg');
extentionsList.push('.gif');
var index = extentionsList.indexOf(extention.toLowerCase());
if (index < 0) {
callback(true,"Invalid Media Type");
} else {
var filepath;
filepath = '../cdn/idcard/';
var fname = filepath + idUser + extention;
console.log(fname);
mkdirp(filepath, function (err) {
if (err) {
console.log(err);
callback(true,"Internal Server Error");
}
else {
mv(file.path, fname, function (err) {
});
}
});
}
} else {
callback(true);
}
}
Let me know if this solves your problem.

Resources