How to handle large file processing through AJAX and express - node.js

I'm uploading a PDF file using AJAX and express and sending the data to external REST API for some file processing.
Whenever I upload more than 3MB file I will get an error in AJAX before the REST API response.
I get correct results from the external REST API, but processing takes more time so AJAX is not receiving the success msg.
How do I handle this?
Tried with AXIOS and other promise handler but same result
module.exports = (req, res) =>
{
var pdf = [],
form = new formidable.IncomingForm(),
pdfFile,
dirname = process.cwd();
form.multiples = false;
// Upload directory for the images
form.uploadDir = path.join(dirname, 'tmp_uploads');
form.on('file', function(name, file) {
let token;
console.log('FORM ON')
if (req.cookies.userData.token) {
token = req.cookies.userData.token;
let buffer = null,
type = null,
filename = '',
renameFile = '';
// Read a chunk of the file.
buffer = readChunk.sync(file.path, 0, 262);
// filename = Date.now() + '-' + file.name;
filename = file.name;
renameFile = path.join(dirname, 'uploads/' + filename);
fs.rename(file.path, renameFile, function(err) {
if (err) throw err;
console.log('renamed complete');
pdfFile = path.join(dirname, 'uploads/' + filename);
let readFileStream = fs.createReadStream(pdfFile),
data = '',
formData = {};
formData = {
name: filename,
my_file: readFileStream,
Width: 1024,
Height: 768
};
function postData() {
// Setting URL and headers for request
var options = {
url: EXTERNALURL,
headers: {
"Authorization": 'bearer ' + token,
"content-type": "multipart/form-data"
},
formData: formData
};
// Return new promise
return new Promise(function(resolve, reject) {
// Do async job
request.post(options, function(err, resp, body) {
if (err) {
reject(err);
} else {
resolve(body);
}
})
})
}
function getData(url) {
// Return new promise
return new Promise(function(resolve, reject) {
// Do async job
request.get({ url: url, encoding: null }, function(err, resp, body) {
if (err) {
reject(err);
} else {
resolve(body);
}
})
})
}
var errHandler = function(err) {
console.log(err);
}
var filePath;
function main() {
var dataPromise = postData();
// Get user details after that get followers from URL
dataPromise.then(JSON.parse, errHandler)
.then(function(result) {
fData = result;
var fileName = fData.Id,
file_url = fData.PresentationZipUrl;
filePath = path.join(dirname, fData.HtmlPath);
// Do one more async operation here
var downloadPromise = getData(file_url).then();
return downloadPromise;
}, errHandler)
.then(function(data) {
//console.log(data);
var zip = new AdmZip(data);
if (!fs.existsSync(filePath)) {
fs.mkdirSync(filePath);
zip.extractAllTo(filePath, true);
}
}, errHandler)
.then(function(data) {
console.log('Done');
res.status(200).json({ 'status': "success" });
}, errHandler);
}
// console.log('before');
main();
}); // END RENAME FILE
} else { //
console.log('ERROR')
res.redirect('/');
}
});
form.on('error', function(err) {
console.log('Error occurred during processing - ' + err);
});
// Invoked when all the fields have been processed.
form.on('end', function() {
console.log('All the request fields have been processed.');
});
// Parse the incoming form fields.
form.parse(req, function(err, fields, files) {})}
AJAX:
$.ajax({
enter code here
url: '/presentation/uploadFiles',
method: 'post',
data: formData,
processData: false,
contentType: false,
timeout: 0}).done(function(d) {
console.log(d);
if (d.status == 'success') {
location.reload()
}}).fail(function(e, t) {
console.log(e);
console.log(t)})
.always(function() {
});

Related

Upload a stream to s3

I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.
I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}

busboy on field and on file not firing

It worked yesterday, and now it stopped without any changes made to the code. What is going on?
Client
async function uploadFile(file) {
let formData = new FormData();
formData.append("recordUid", recordUid);
formData.append("fieldUid", fieldUid);
formData.append("file", file);
await fetchPostFormData("/api/files", formData);
}
async function fetchPostFormData(url, formData) {);
try {
let result = await (
await fetch(url, {
method: "POST",
withCredentials: true,
credentials: "include",
headers: {
Authorization: localStorage.getItem("token"),
},
body: formData,
})
).json();
return result;
} catch (err) {
return err;
}
}
Server
router.post("/api/files", async (req, res, next) => {
try {
console.log("starting upload..."); // <------------------- THIS ONE IS LOGGED
let bb = busboy({
headers: req.headers,
limits: {
fileSize: 20 * 1024 * 1024, // 20 mb
},
});
let fields = {};
// Get any text values
bb.on("field", (fieldname, val, fieldnameTruncated, valTruncated) => {
console.log("on.field", fieldname, val); // <------------------ NOT FIRING
fields[fieldname] = val;
});
bb.on("file", (fieldname, file, filename, encoding, mimetype) => {
console.log("on.file"); // <----------------------------------- NOT FIRING
let parts = filename.filename.split(".");
let name = parts[0];
let extension = parts[parts.length - 1];
let finalName = `${+new Date()}-${name}.${extension}`;
let filePath = `${filesFolderPath}${finalName}`;
// Open writeable stream to path
let writeStream = fs.createWriteStream(filePath);
// Pipe the file to the opened stream
file.pipe(writeStream);
// Check for errors
writeStream.on("error", (err) => {
console.log(err);
});
writeStream.on("close", async (err) => {
let sizeBytes = fs.statSync(filePath).size;
});
});
bb.on("finish", () => {
res.status(200).send({ success: true });
});
} catch (err) {
next(err);
}
});
Managed to solve it.
The problem was the missing req.pipe(bb) at the very end.
// previous code... ^^^^^
bb.on("finish", () => {
res.status(200).send({ success: true });
});
req.pipe(bb) // <------------- THIS SHIT RIGHT HERE
} catch (err) {
next(err);
}
});

Direct Image to S3 download function - access to data.key

I've been trying to figure this out for two days now.
I have the below function working, it basically takes URL and upload the file to S3. However the problem is I'm trying to return the file location on S3 (data.key) out of the main function but I'm either getting undefined or promise(pending).
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
async function downloadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// This returns undefined
var imageUrl
// This returns Promise { <pending>}
// var imageUrl = await uploadS3()
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
contentType = response.headers['content-type'];
response.data.pipe(uploadS3());
}
}).catch(function (error) {
console.log(error.message);
})
// This is where I can't get return
return imageUrl
function uploadS3() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
s3.upload(params, function (err, data) {
if (err) {
console.log(err)
} else {
// I have access to data.key only here
imageUrl = data.key
console.log(imageUrl)
}
})
return pass
}
}
I was trying something like this. This gets the value up to the Then of the first function, but still no luck getting it out of the main funciton
async function uploadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// var imageUrl = await uploadStream()
// var imageUrl = await Promise.all
var imageUrl
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
upload = response.data.pipe(writeStream);
promise.then((data) => {
// I have access to it here, but not
console.log(data.key)
imageUrl = data.key
}).catch((err) => {
console.log('upload failed.', err.message);
});
}
}).catch(function (error) {
console.log(error.message);
})
return imageUrl
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
}
I was able to get it to work the following way if anybody is interested. I have a Promise created which returns a result (which is file location on S3) once upload is completed.
I then use this inside of my main functions to basically do download and then return the location.
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
// This is now can be run from any other function while getting access to file location (result)
uploadImageAsync("imageURLGoesHere").then(function (result) {
// result contains the location on S3 data.key (this can be changed in the code to return anything else from data)
console.log(result)
}, function (err) {
console.log(err);
})
async function uploadImageAsync(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
return new Promise(function (resolve, reject) {
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
response.data.pipe(writeStream);
return new Promise(function (resolve, reject) {
promise.then((data) => {
resolve(data.key);
}).catch((err) => {
console.log('upload failed.', err.message);
});
});
}
}).catch(function (error) {
console.log(error.message);
})
imageRequest.then(function (result) {
resolve(result);
})
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
})
}

Auto append unwanted character in nodejs put request

I have an api to file upload on Bluemix Object Storage by using request module. All are good but there is some unwanted character which append automatically.
example:
--38oi85df-b5d1-4d42-81ce-c547c860b512 //this is unwanted character
Email
abc#gmail.com
hsl#gmsl.com
pjeyjle#cse.com
--38oi85df-b5d1-4d42-81ce-c547c860b512-- // this is unwanted character
Here is my code:-
import request from 'request';
exports.putObjectStorageFile = function(authToken, file, csv, cb) {
var s = new stream.Readable();
s._read = function noop() {};
s.push(csv); //csv is string
s.push(null);
var options = {
url: 'https://xxxx.objectstorage.open.xxxx.com/v1/AUTH_' + config.objectStorage.projectId + '/xxxx/' + file,
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
encoding: 'utf-8',
headers: {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': 1,
'X-Auth-Token': authToken
},
multipart: {
chunked: false,
data: [
{ body: s }
]
} };
function callback(error, response) {
if (error) cb(error);
if (!error && response.statusCode == 201) {
cb(null);
}
}
request(options, callback);
You are sending a multipart message with a preambleCRLF and postambleCRLF in your request which is causing those lines.
You should use the pkgcloud library for uploading data to Object Storage:
https://github.com/pkgcloud/pkgcloud
Below is a sample of using pkgcloud with the Object Storage service on Bluemix (credentials from VCAP).
(function (module) {
var pkgcloud = require('pkgcloud'),
fs = require('fs');
function ObjectStorage(container, credentials) {
this.container = container;
this.config = {
provider: 'openstack',
useServiceCatalog: true,
useInternal: false,
keystoneAuthVersion: 'v3',
authUrl: credentials.auth_url,
tenantId: credentials.projectId,
domainId: credentials.domainId,
username: credentials.username,
password: credentials.password,
region: credentials.region
};
this.client = pkgcloud.storage.createClient(this.config);
}
ObjectStorage.prototype.validate = function () {
return new Promise(function (resolve, reject) {
this.client.auth(function (error) {
if (error) {
return reject(error);
}
resolve();
});
}.bind(this));
};
ObjectStorage.prototype.makeContainer = function () {
return new Promise(function (resolve, reject) {
this.client.createContainer({name: this.container}, function (error) {
if (error) {
return reject(error);
}
return resolve();
});
}.bind(this));
};
ObjectStorage.prototype.uploadFile = function (path, name) {
return new Promise(function (resolve, reject) {
var myPicture = fs.createReadStream(path);
var upload = this.client.upload({
container: this.container,
remote: name
});
upload.on('error', function (error) {
reject(error);
});
upload.on('success', function (file) {
resolve(file);
});
myPicture.pipe(upload);
}.bind(this));
};
module.exports = ObjectStorage;
})(module);
I got that lines due to sending data with multipart. I found a solution by adding just content-type,content-lenght and send data in body i.e,
var options = {
url: 'https://dal.objectstorage.open.softlayer.com/v1/AUTH_' + config.objectStorage.projectId + '/nrich-storage/' + file,
method: 'PUT',
headers: {
'Content-Type': 'text/csv',
'Content-Length': csv.length,
'X-Auth-Token': authToken
},
body:csv
};
function callback(error, response) {
if (error) cb(error);
if (!error && response.statusCode == 201) {
cb(null);
}
}
request(options, callback);

Upload file use request module in nodejs

I have a function upload file:
handleUpload: function (e) {
e.preventDefault();
var files = this.refs.fileupload.getDOMNode().files; //get all the files to be upload.
if (files.hasOwnProperty(0) //get the first file.
&& files[0] instanceof File) {
var formData = new FormData();
formData.append('fileUpload',
files[0]); //append file to be sent to the server.
formData['fileUpload'] = files[0]
Service.uploadContact(null,null,formData)
.then(function(data){
if (data.statusCode == 0) { //upload success
this.setState({
validationmessage: data.message,
success: true,
showSpinner: false
})
React.findDOMNode(this.refs.fileupload).value = '' //clear upload file.
this.filterContact_Paging(1); //refresh the contact list.
} else {
this.setState({
validationmessage: data.error, //upload fail error
success: false,
showSpinner: false
})
React.findDOMNode(this.refs.fileupload).value = '' //clear upload file.
}
}, function(_err){
console.log('err: ', _err)
})
}
},
And code process upload use request module:
serviceInterface[item.name] = function (params, csrfToken, formData) {
return new Promise(function (resolve, reject) {
var token = storage.get('token') || '',
request_params = {
url: CONFIG.apiPath + item.path,
method: item.method,
crossOrigin: true,
withCredentials: false,
json: true,
headers: {'Authorization': 'Bearer ' + token},
};
//This is where you append form data and in most cases for file upload.
if (formData) {
request_params['formData'] = formData;
}
//This is a cross site reference token
if (csrfToken) {
headers['x-csrf-token'] = csrfToken.token;
headers['x-csrf-sec'] = csrfToken.secret;
}
//This is where you determine post/get to send the data through body or querystring.
if (item.method == 'GET') {
request_params['qs'] = params;
} else {
request_params['body'] = params;
}
request(request_params, function (error, response, body) {
if (error) {
return reject(error);
}
if (response.statusCode === 418) {
return reject(body);
}
return resolve(body);
});
});
};
But when I run code, it throw a error:
TypeError: self._form.on is not a function

Resources