Get buffer from Rackspace download using pkgcloud - node.js

This may not be possible but I am trying to return a buffer object of an image on Rackspace using the pkgcloud module without having to write to the filesystem. I've seen this done before however both examples show piping the download to the File System.
function get() {
return new Promise(function (resolve, reject) {
_this._RackClient.download(options, function(err, results) {
if (err !== null) {
return reject(err);
console.log("Errow Downloading:", err);
}
resolve(buffer);
});
});
}
return get();
This is ideally how I would like it to work but there currently is not a body present in the request. Can I use a stream.passThrough() and return that similar to uploading a buffer?

.download() returns a Readable stream, so it should just be a matter of buffering that output. For example:
var stream = _this._RackClient.download(options);
var buf = [];
var nb = 0;
var hadErr = false;
stream.on('data', function(chunk) {
buf.push(chunk);
nb += chunk.length;
}).on('end', function() {
if (hadErr)
return;
switch (buf.length) {
case 0:
return resolve(new Buffer(0));
case 1:
return resolve(buf[0]);
default:
return resolve(Buffer.concat(buf, nb));
}
}).on('error', function(err) {
hadErr = true;
reject(err);
});

Related

Buffer/Base64 encoding issue in Node.js and Azure Web APP

My code is working perfectly on local, the file being downloaded is never corrupt (I tried it dozens of time and the files always come complete), however when I run it on Azure (Web App); in a lot of times the returned file is corrupt (and with the wrong file size) - it's extremely random, sometimes it is returned fine and sometimes it is not.
My function that downloads the file from Azure blob storage is the following, the "setTimeOut" things are desperate attempts from my side to make it work without corruption
storageUtils.downloadBlobFromCloud = (subDirectory, fileName) => {
var dir = './temp';
var tempFilePath = dir + '/' + Date.now().toString() + '.temp';
console.log(tempFilePath)
var absolutePath = pathLib.resolve(tempFilePath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
if (!fileName) {
return null;
}
return new Promise( (resolve, reject) => {
blobService.getBlobToLocalFile(
container,
`${subDirectory}/${fileName}`,
tempFilePath,
async function (error, serverBlob) {
if (!error) {
try {
let base64 = await fileStream(tempFilePath);
console.log("STREAMMMINNNGGGG")
setTimeout(() => {
resolve(base64);
}, 2000);
setTimeout(()=>{
console.log('deleting file')
fs.unlinkSync(tempFilePath);
},10000);
} catch (e) {
console.log('error1')
console.log(e)
reject(e);
}
} else {
console.log("fi error2")
console.log(error)
reject(error);
}
}
);
});
};
The stream function is
function fileStream( filePath) {
return new Promise((resolve, reject) => {
let buffers = [];
let myStream = fs.createReadStream(filePath);
myStream.on('data', (chunk) => { buffers.push(chunk); });
myStream.once('end', () => {
let buffer = Buffer.concat(buffers);
console.log("BUFFER SIZE " + buffer.length)
let base64 = buffer.toString('base64')
/* fs.writeFile('./temp/buffer.txt', base64, function (err) {
if (err) return console.log(err);
console.log(err)
}); */
resolve(base64);
});
myStream.once('error', (err) => {
reject(err);
});
});
}
the function that returns the response to client:
let downloadFile = async (directory, fileName, attempts) => {
attempts++;
let file = await storageUtils.downloadBlobFromCloud(directory, fileName);
if(attempts < 20 && !file) {
file = await downloadFile(directory, fileName, attempts);
}
return file;
}
server.post('/api/download-file', async function (req, res) {
try{
console.log(`downloading file ${req.body.fileName} ${req.body.directory}` )
let attempts = 0;
let file = await downloadFile(req.body.directory, req.body.fileName, attempts);
return res.json({ file });
}catch(error) {
console.log(error);
return res.json(error);
};
});
you may notice the commented "buffer.txt", I've checked the base64 code in the txt and it's indeed incomplete (ie. expecting 1.5MB file but it's 1MB) when the file is returned corrupt to client.
I am out of ideas, what am I doing wrong? and why things working perfectly on local but not on the cloud hosting?
NOTE: I have even did a side by side comparison between the base64 of the file downloaded fine vs when it's corrupt using a text checker tool, the two base64 blocs are identical for a large portion while scrolling down, then...it's blank for the "corrupt one", while the non-corrupt base64 is longer. As if it's something is "cutting" the base64 encoding.

How to use a nested loop in request.head - Node JS

I am trying to move download images from parse and save it to my local. I have this piece of code that does the job for me. This works well when there is only one request but when I put in a loop, it doesn't hold good.
`for(var i = 0; i < 5; i++) {
console.log(i);//to debug
var filename = results_jsonObj[i].imageFile.name;
var uri = results_jsonObj[i].imageFile.url;
request.head(uri, function(err, res, body){
if (err){
console.log(err);
console.log(item);
return;
}else {
console.log(i); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
}`
Irrespective of the loop condition I have, only one image downloads to the mentioned directory.
Below is the op
The input is from a Json file and I have the request, fs, parse module included in the node js program.
Any help on how to go about this?
I have got this fixed now. As advised in the comments it was async which helped me do the trick.
for(var i = 0; i < 900; i++) {
async.forEachOf(results_jsonObj[i], function(value, key, callback){
var image = {};
image.key = key;
image.value = value;
if(image.key == 'imageFile')
{
var filename = image.value.name;
var uri = image.value.url;
// console.log(filename, uri);
}
request.head(uri, function(err, res, body){
if (err){
console.log(err);
// console.log(item);
return;
}else {
// console.log(i,res.headers['content-type']); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
callback();
}, function(err){
if (err) {
console.log('one of the api failed, the whole thing will fail now');
}
});
}

Converting files to binary taking hours - Node.js

I'm reading csv data and then writing it to binary. However, it seems the process of reading in a csv file, converting each data point to a float, and then writing that data to a file takes an insane amount of time. What am I doing wrong and how can I speed this up?
Is this a limitation on node? Should I be using a different language?
var csvStream = csv.createWriteStream({headers: true});
var writeStream = fs.createWriteStream(csvPath);
csvStream.pipe(writeStream);
var binaryWriteStream = fs.createWriteStream(binaryPath, {'flags': 'a'});
Object.keys(dataSources).map(function (dataSource) {
firstPromise = firstPromise.then(function () {
return new Promise(function (resolve, reject) {
var allTheData = [];
var readStream = fs.createReadStream(dataSource);
//stream contents into csv reader
csv.fromStream(readStream, {delimiter: delimiterCharacter}).on("data", function (data) {
//get all data
allTheData.push(data);
}).on("end", function () {
var promise = Promise.resolve();
//for each row
allTheData.map(function (someData) {
promise = promise.then(function () {
return new Promise(function (resolve, reject) {
var someOtherPromise = Promise.resolve();
//preallocate buffer
var buf = Buffer.allocUnsafe(4 * someData.length);
var counter = 0;
//for each individual data sample
someData.map(function (data) {
someOtherPromise = someOtherPromise.then(function () {
return new Promise(function (resolve, reject) {
//convert to float
buf.writeFloatLE(data, counter * 4);
counter++;
resolve();
});
});
});
someOtherPromise.then(function () {
//write buffer to file
binaryWriteStream.write(buf);
resolve();
});
someOtherPromise.catch(function (err) {
console.log(err);
reject(err);
});
});
});
});
promise.then(function () {
resolve();
});
promise.catch(function (err) {
console.log(err);
reject(err);
});
});
});
});
});
});
TLDR:
reading in csv data from files in sequence
allocating and converting the csv data into binary line by line
writing it to a file
With this methodology it takes me five hours to read 614 files and subsequently read, allocate, convert, and write 99 million samples to a single unified binary file.

Upload image from buffer in request

I am trying to handle a image upload request from an iOS/Android app. In the request I am getting a buffer of the image and want to upload that to Rackspace files without having to download the image to re-upload it. I could write the file to file system and read from that but I want to know if it is possible to create a readableStream with the buffer in order to pipe that to cloud.
var options = {
container: _this3._container,
remote: filename,
contentType: contentType
}
var readStream = new Readable(data);
readStream._read();
readStream.on('data', function() {
console.log("Has data!");
});
function upload() {
return new Promise(function (resolve, reject) {
var writeStream = _this3._RackClient.upload(options);
writeStream.on('success', function() {
resolve();
});
writeStream.on('error', function(err) {
if (err !== null) {
return reject(err);
}
});
readStream.pipe(writeStream);
});
}
return upload();
Is how I am currently trying to do it but I continue to get not implemented errors.
I was actually able to achieve this using a PassThrough stream.
var options = {
container: _this3._container,
remote: filename,
contentType: contentType
};
function upload() {
return new Promise(function (resolve, reject) {
var writeStream = _this3._RackClient.upload(options);
var bufferStream = new stream.PassThrough();
bufferStream.end(new Buffer(data));
writeStream.on('success', function(file) {
resolve(file);
});
writeStream.on('error', function(err) {
if (err !== null) {
console.log(err);
return reject(err);
}
});
bufferStream.pipe(writeStream);
});
}
return upload();

Unable to send a response while multipart form is being streamed

I'm using nodejs combined with sailsjs and skipper to upload files to a server.
I have a use case where I need to check the file length and send a 413 error if the upload exceeds A certain file size. However when I try send any kind of response nothing happens. What exactly is happening here? Does the form upload need to be completed before a response can be sent?
var getReceiver = function () {
var receiver = new Writable({objectMode: true});
receiver._write = function (file, enc, done) {
file.fd = directory + file.fd;
var fileStream = fs.createWriteStream(file.fd);
fileStream.on('error', function (err) {
done(err);
}).once('finish', function () {
done();
});
var fileLength = 0;
file.on('data', function (chunk) {
fileLength = fileLength + chunk.length;
if (fileLength > maxFileSize) {
var err = new Error('upload exceeds maxFileSize.');
file.unpipe();
fs.unlink(file.fd, function (fsErr) {
if (fsErr && (typeof fsErr !== 'object' || fsErr.code !== 'ENOENT')) {
return done([err].concat([fsErr]));
}
});
return done(err);
}
});
file.pipe(fileStream);
};
return receiver;
};
req.file('file').upload(getReceiver(), function (err, uploadedFiles) {
if (err) {
return res.badRequest(err);
}
// Do stuff
}
});
});
});

Resources