Buffer/Base64 encoding issue in Node.js and Azure Web APP - node.js

My code is working perfectly on local, the file being downloaded is never corrupt (I tried it dozens of time and the files always come complete), however when I run it on Azure (Web App); in a lot of times the returned file is corrupt (and with the wrong file size) - it's extremely random, sometimes it is returned fine and sometimes it is not.
My function that downloads the file from Azure blob storage is the following, the "setTimeOut" things are desperate attempts from my side to make it work without corruption
storageUtils.downloadBlobFromCloud = (subDirectory, fileName) => {
var dir = './temp';
var tempFilePath = dir + '/' + Date.now().toString() + '.temp';
console.log(tempFilePath)
var absolutePath = pathLib.resolve(tempFilePath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
if (!fileName) {
return null;
}
return new Promise( (resolve, reject) => {
blobService.getBlobToLocalFile(
container,
`${subDirectory}/${fileName}`,
tempFilePath,
async function (error, serverBlob) {
if (!error) {
try {
let base64 = await fileStream(tempFilePath);
console.log("STREAMMMINNNGGGG")
setTimeout(() => {
resolve(base64);
}, 2000);
setTimeout(()=>{
console.log('deleting file')
fs.unlinkSync(tempFilePath);
},10000);
} catch (e) {
console.log('error1')
console.log(e)
reject(e);
}
} else {
console.log("fi error2")
console.log(error)
reject(error);
}
}
);
});
};
The stream function is
function fileStream( filePath) {
return new Promise((resolve, reject) => {
let buffers = [];
let myStream = fs.createReadStream(filePath);
myStream.on('data', (chunk) => { buffers.push(chunk); });
myStream.once('end', () => {
let buffer = Buffer.concat(buffers);
console.log("BUFFER SIZE " + buffer.length)
let base64 = buffer.toString('base64')
/* fs.writeFile('./temp/buffer.txt', base64, function (err) {
if (err) return console.log(err);
console.log(err)
}); */
resolve(base64);
});
myStream.once('error', (err) => {
reject(err);
});
});
}
the function that returns the response to client:
let downloadFile = async (directory, fileName, attempts) => {
attempts++;
let file = await storageUtils.downloadBlobFromCloud(directory, fileName);
if(attempts < 20 && !file) {
file = await downloadFile(directory, fileName, attempts);
}
return file;
}
server.post('/api/download-file', async function (req, res) {
try{
console.log(`downloading file ${req.body.fileName} ${req.body.directory}` )
let attempts = 0;
let file = await downloadFile(req.body.directory, req.body.fileName, attempts);
return res.json({ file });
}catch(error) {
console.log(error);
return res.json(error);
};
});
you may notice the commented "buffer.txt", I've checked the base64 code in the txt and it's indeed incomplete (ie. expecting 1.5MB file but it's 1MB) when the file is returned corrupt to client.
I am out of ideas, what am I doing wrong? and why things working perfectly on local but not on the cloud hosting?
NOTE: I have even did a side by side comparison between the base64 of the file downloaded fine vs when it's corrupt using a text checker tool, the two base64 blocs are identical for a large portion while scrolling down, then...it's blank for the "corrupt one", while the non-corrupt base64 is longer. As if it's something is "cutting" the base64 encoding.

Related

Write excel file to file stream with nodejs and download it with api call in browser

I am stuck at problem on how to write excel file to filestream and download it in browser.. I can only create new file in server but this is not what I want.. I don't want to create in on server (ok if it must be created then i also want to delete it when user downloads it in browser).
But I can't achieve the download..
So the general idea is that I read the csv file, than parse the data.
I also read a template Excele file which I overwrite and write it to the file stream. When I call the get API, then I can the download starts (I will integrate it in Angular app later)..
I am using Exceljs npm package.
I don't have any errors but code is not working as I want
I uploaded whole code in github so you can easily see the code and duplicate my code.
https://github.com/zigax1/mean-generate-download-excel/tree/master
My excel-builder script:
export const generateExcel = async (req: Request, res: Response) => {
try {
await csvParse();
res.setHeader("Content-disposition", `attachment;`);
res.contentType(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
);
return res.status(200).json("Success");
} catch (err) {
return res.status(500).json("False");
}
};
const csvParse = async () => {
fs.createReadStream("./content/TestCsv.csv")
.pipe(csv.parse())
.on("error", (error: any) => console.log("Error"))
.on("data", (row: any) => {
let line: any = String(row);
line = line.split(";");
//let parsedData = line[0];
let parsedData = line;
allParsedData.push(parsedData);
})
.on("end", (rowCount: any) => {
let test = allParsedData.toString();
generateFile(test);
});
};
const generateFile = (data: any) => {
return new Promise<fs.ReadStream>((resolve, reject) => {
const workbook = new Excel.Workbook();
workbook.xlsx.readFile("./utilities/template.xlsx").then(() => {
workbook.xlsx.writeFile("./content/Test.xlsx").then(
() => {
let stream = fs.createReadStream("./content/Test.xlsx");
stream.on("close", () => {
fs.unlink("./content/Test.xlsx", (error) => {
if (error) {
throw error;
}
});
});
resolve(stream);
},
(err) => {
throw err;
}
);
});
});
};
Thanks to everyone!
const csv = require('fast-csv');
const fs = require('fs');
function exportCSVFile(res, path, data) {
const ws = fs.createWriteStream(path + ".csv");
ws.on("finish", function () {
res.download(path + ".csv", () => {
fs.unlinkSync(path + ".csv");
});
});
csv.write(data, {headers: true}).pipe(ws);
}
You use this export csv function for your response

Graphql "Network Error : Failed to Fetch" when upload images by taking image from direct camera using Oppo

I have a problem when upload images by taking direct from OPPO camera. I tried this from other smartphone is working as expected.
This is my code
const form = new FormData();
function processImages() {
return new Promise((resolve, reject) => {
images.forEach(async (image, index, array) => {
try {
const data = await getBufferFromStreams(image);
form.append('images', data.fileBuffer, data.filename);
if (index === array.length - 1) {
resolve(true);
}
} catch (error) {
reject(error);
}
});
});
}
const success = await processImages();
const response = await this.post(`${url}`, form, {});
export const getBufferFromStreams = async (file) => {
if (!file) {
return new Error('file is undefined');
}
const { stream, filename } = await file;
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => {
chunks.push(chunk);
});
stream.on('error', (error) => {
reject(error);
});
stream.on('end', () => {
resolve({ fileBuffer: Buffer.concat(chunks), filename });
});
});
};
I always get error "Network Error: Failed to Fetch" when uploading images when taking picture from camera only on OPPO smartphone.
Do you have encounter same issues and solved ?

Read from Dropbox stream into Buffer

I need to download a file from Dropbox into buffer on my server. Due to a security issues I can't download a file directly to a client. Therefore I send request to my server, then fetch the file from Dropbox and then forward it to the client. I managed to implement this writing Dropbox stream to a file on my server and then sending it to a client.
I need to implement this mechanism without writing Dropbox stream into file on my server. I need to create a buffer and write into it and then forward the buffer to a client.
export const downloadFileFromDropbox = async function downloadFileFromDropbox(fileName,
folderNameOnDropbox) {
let isSucceeded;
const message = [];
let downloadResult;
let fileBuffered = "";
// authentication
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${folderNameOnDropbox}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).pipe(fs.createWriteStream(/* need to implement buffer here */));
});
await dropboxPromise.then(async function (resultObj) {
isSucceeded = true;
message.push("fileDownload_OK");
}).catch(async function (err) {
isSucceeded = false;
message.push(err.message);
});
downloadResult = {
isSucceeded,
message,
/* Buffer */
};
return downloadResult;
};
There is no way to convert a file into buffer without writing it in some location
What worked for me is:
write the file in a temporary folder
Read it and convert into buffer
send the buffer back to the client
delete the file from the temporary location
here's my code :
dropbox = dropboxV2Api.authenticate({ token: credentials.access_token });
dropbox(
{
resource: 'files/download',
parameters: {
path: `${req.query.folder}` + `/${req.query.filename}`,
},
},
(err, result, response) => {
//download completed
if (err) {
return res.status(500).send(err);
} else {
console.log('download completed');
}
}
).pipe(
fs
.createWriteStream(`./app/tmp/` + `${req.query.filename}`)
.on('finish', () => {
fs.readFile(
`./app/tmp/` + `${req.query.filename}`,
function (err, buffer) {
if (err) {
res.status(500).send(err);
} else {
fs.unlink(`./app/tmp/` + `${req.query.filename}`, (err) => {
if (err) {
console.error(err);
return;
}
//file removed
res.status(200).send(buffer);
});
}
}
);
})
);
I hope this will help you even though it's a little bit late

download large files and continue uploading to minio with nodejs, KILLED MY SERVICE?

I have an API built with NodeJS, in that API there is a process where I download a large file using modules request-promise and then it is made a new buffer uploaded to minio. But I have a problem that my API always crashes if the file is above 80-100MB and its NodeJS Killed on the server, how to handle it?
This function is to download the file and convert it into a buffer :
const convertLink = async link => {
const options = {
uri: link,
encoding: null,
headers: {
'Content-type': 'application/octet-stream'
}
};
const res = rp.get(options)
.then((body) => {
console.log(body)
const a = new Buffer.from(body);
return a;
})
.catch(err => {
console.log(err)
return err;
});
return res;
};
this is function for uploading files to minio from miniosdk :
const streamUpload = async (bucketName, objectName, newBuffer) => {
try {
const isUploaded = await minioClient.putObject(bucketName, objectName, newBuffer);
if (isUploaded) {
return isUploaded;
}
} catch (err) {
return err;
}
};
I think the issue here is you are downloading the file and keeping it in the memory and then uploading it to your minioClient. which is not recommended for large files. you should download that file as a stream and then upload it as a stream too. keeping large files in memory can be the reason to kill your node.js server.
you can try as following example in which I am using request npm library and downloading the file and saving it as a stream to a temporary location and then reading the file from that temporary location and uploading to another URL:-
Downloading file:-
const downloadFile = async (url) => {
try {
let tempLocation = "./temp/";
let fileName="myfile";
return new Promise((resolve, reject) => {
request
.get(url)
.on('response', function (response) {
console.log(response.statusCode) // 200
console.log(response.headers['content-type'])
})
.on('error', function (error) {
console.log('downloading error', error)
reject()
})
.on('end', async function () {
console.log("donwload finished")
resolve();
})
.pipe(fs.createWriteStream(tempLocation + '/' + fileName))
});
} catch (error) {
console.log("error in downloadFile", error)
throw error;
}
}
now you can upload the file to your minioClient as a stream. you can use fs.createReadStream(file) to read the file as a stream data from that temporary location.

Creating a GIF from remote stream in graphicsmagick

I am creating a GIF from remote files in node currently by downloading each image to the file system into a tmp folder.
I want to bypass saving the image to a tmp folder and save in memory instead. Is this possible?
As you can see, I have a download function in my AWS class which saves to a tmp folder:
download(key){
return new Promise((resolve, reject) => {
request.head(`${this.base_url}/${this.bucket}/${key}`, (err, res, body) => {
request(`${this.base_url}/${this.bucket}/${key}`)
.pipe(fs.createWriteStream(`tmp/${key}`)).on('close', resolve )
})
})
};
Once they have all downloaded, I have a createGif function in my GifService class which adds each file path as a custom argument of gm, adds a delay of 50ms, resizes then outputs as buffer which I am then uploading to AWS s3.
import gm from 'gm';
...
constructor(){
this.gm = gm()
}
generateGif(images, prefix){
return new Promise((resolve, reject) => {
// for each image we want in array, we pass to gm
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
// Now we create the gif with 50sec delay between images, sized to 600px x 2
this.gm
.delay(50)
.resize(600,600)
.toBuffer('gif', async (err, buffer) => {
if (err) reject(err)
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try{
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
}catch(err) {
console.log('err', err)
reject(err)
}
});
})
}
Ideally if I could pass a remote file stream as custom argument, or pass a buffer in as a custom argument as opposed to how I am currently passing in the tmp file path:
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
I managed to make it work using only streams by converting first the images to miff and concat them into a single stream. Then passing the buffer or the stream into gm again with delay does the trick.
You will need to instal concat-stream npm for this to work.
Sorry for the mixed ES5 code.
import gm from 'gm';
var concat = require('concat-stream');
...
constructor() {
this.gm = gm()
}
start() {
return getYourReadAbleStreamsSomehow().then(streams => {
return generateGif(streams);
}).then(gifBuffer => {
return uploadToAWS(gifBuffer, prefix);
}).catch(err => {
console.log(err)
})
}
uploadToAWS(buffer, prefix) {
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try {
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
} catch (err) {
console.log('err', err)
reject(err)
}
}
generateGif(imageStreams, delay) {
return new Promise((resolve, reject) => {
var write = concat(function(buffer) {
gm(buffer)
.delay(delay)
.toBuffer('gif', function(err, buffer) {
if (err)
reject(err);
resolve(buffer);
})
})
//Convert to miff and concat streams
var i = 0;
var streamHandler = function() {
gm(imageStreams[i])
.resize('600', '600')
.stream('miff', function(err, stdout, stderr) {
if (err)
reject(err)
var lastOne = i === streams.length - 1;
if (!lastOne)
stdout.once('end', streamHandler)
stdout.pipe(write, {
end: lastOne
});
i++;
});
}
streamHandler();
})
}

Resources