I am using request module in NodeJS to read data from AWS S3. When I am downloading the file(docx or image or pdf) using below code,its giving me an invalid/corrupted file. But when I am downloading .txt file it's not getting corrupted and I am able to see file in notepad.
I did a bit of googling and as suggested also tried by setting encoding to binary, still its not giving required result.
File upload is working fine. And I am able to see the uploaded file in AWS console.
File download code
var s3 = new properties.AWS.S3();
var params = {Bucket: properties.AWS_BUCKET, Key: req.headers['x-org'] + "/" + "talk" + "/" + req.body.fileName};
s3.getSignedUrl('getObject', params, function (err, URL) {
if (err) {
console.log("Error inside the S3");
console.log(err, err.stack); // an error occurred
res.send(null);
} else {
console.log("After getObject:-" + URL);
request({
url: URL, //URL to hit
method: 'GET',
encoding: 'binary'
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
//console.log(response.statusCode, body);
res.set('content-disposition', 'attachment; filename=' + req.body.fileName);
res.send(body);
}
});
}
});
Update:-
I have narrow down the error, and just trying to send the file by reading file from local file system. Even that also is giving the corrupted files on client.
Here's the code for same
var filePath = path.join(__dirname, '..', '..', '..', 'downloads', req.body.fileURL);
var stat = fs.statSync(filePath);
var filename = path.basename(filePath);
var mimetype = mime.lookup(filePath);
console.log("mimetype=" + mimetype);
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
res.setHeader('Content-type', mimetype + ";charset=UTF-8");
res.setHeader('Content-Length', stat.size);
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
Finally able to solve the problem.
Got solution hint from this blog https://templth.wordpress.com/2014/11/21/handle-downloads-with-angular/.
Per this blog
When testing with binary content like zip files or images, we see
that the downloaded content is corrupted. This is due to the fact that
Angular automatically applies transformation on the received data.
When handling binary contents, we want to get them as array buffer.
Final working code is:-
var filePath = path.join(__dirname, '..', '..', '..', 'downloads', req.body.fileURL);
var file = fs.createWriteStream(filePath);
s3.getObject(params).
on('httpData', function(chunk) {
//console.log("inside httpData");
file.write(chunk);
}).
on('httpDone', function() {
console.log("inside httpDone");
file.end();
//file.pipe(res);
}).
send(function() {
console.log("inside send");
res.setHeader('Content-disposition', 'attachment; filename=' + filePath);
res.setHeader('Content-type', mimetype);
res.setHeader('Transfer-Encoding', 'chunked');
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
});
Related
I am using nest js, I have converted html file and saved to pdf on my localhost when opening the pdf file from the save location it is fine,
but when I am downloading I'm unable to open the file.
My api controller answers that the file is download successfully.
async exportPDF(#Res({ passthrough: true }) res: Response, #Body() dto: ExportReadingsPDFDto) {
const stream = await this.metersService.exportPDF(dto);
const filename = stream.replace(/^.*[\\\/]/, "");
const fileReadStream = fs.createReadStream(stream, { encoding: "base64" });
const stat = fs.statSync(stream);
res.set({
"Content-Type": "application/pdf",
"Content-Length": stat.size,
"Content-Disposition": 'attachment; filename="' + filename + '"'
});
fileReadStream.pipe(res);
}
Please help, I couldn't find any other example for creating pdf files and sending them to the user
You can simply create the PDF file within server, then using this piece of code you can download it as a response for the user.
const filename = '123.pdf';
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
const filestream = createReadStream('files/' + filename);
filestream.pipe(res);
I am trying to upload pdf/xls/xlsx on the file server but whenever I try to send the file on my back end using FormData, no folder is being created and the file isn't uploaded either.
Here is my code on the front end (Vue):
let toSubmit = {
document_title: this.document_title.toUpperCase(),
added_by: this.userInfo.employeeCode,
FileName: `${this.document_title.replace(/ +/g, "_").toUpperCase()}.${this.$refs.files.files[0].name.split(".").pop().toLowerCase()}`
}
const formData = new FormData
formData.append("toSubmit", JSON.stringify(toSubmit))
_.forEach(this.uploadFiles, file=>{
formData.append("files", file)
})
const url = `${this.api}add/template/document`
axios.post(url, formData, {
headers: {
'Content-Type': 'multipart/form-data',
dataType: 'json',
}
}).then(res=>{
console.log('document template', res.data)
})
And on my back-end, I used multer for uploading the file but I have no idea if the problem lies on multer itself or if I have something missing in my code. Anyway, this is my code on the back end (Node):
API
router.post("/add/template/document", uploadTemplate.array("files"), (req, res)=>{
let myData = JSON.parse(req.body.toSubmit)
res.send(myData)
})
Uploading
const uploadTemplate = multer({storage: storageTemplate});
const storageTemplate = multer.diskStorage({
destination: (req, file, cb) => {
var dir = `./uploads/DocumentTemplates/`;
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
cb(null, dir);
},
filename: function(req, file, cb){
let myData = JSON.parse(req.body.toRequest);
let fileName = myData.FileName
let new_filename = `${fileName}`
cb(
null,
new_filename
)
}
})
I still can't figure out why no folder is being created. Am I missing something?
you're creating a subfolder without recursive flag, that's why the folder is not created
also, there is no body in multer middleware, only file, so you cannot send custom data to file like that, you need to change upload middleware
to create subfolders, add this flag:
fs.mkdirSync(dir, {recursive: true});
there is no body in multer, use file (you can add mimetype validation, check that only certain types are uploaded):
filename: function(req, file, cb){
console.log('file', file);
// validate expected file against file.mimetype
// if it fails, return error: cb(yourErrorMessage, null);
cb(
null,
file.originalname // handle on front-end
)
}
and on the frontend:
formData.append("files", file, 'filename goes here');
I'm using the Google Drive REST API to upload a ZIP file but all my ZIP files become corrupted after the upload. When I download the file and then try to unzip it on my computer, on MacOS it says "Unable to expand 'FILE_NAME.zip' into FOLDER. Error 79 - Inappropriate file type or format.". I made sure it wasn't just my computer by having another person on a different computer try to unzip it and they had the same problem. I also confirmed that the ZIP file wasn't becoming corrupted before I uploaded it to Google Drive.
Below is a simplified version of my code.
const async = require('async');
const requestModule = require('request');
const fs = require('fs');
var api = {};
var tasks = {
// first, get the zip file contents
'getFile': function(cb) {
fs.readFile('my_file.zip', {'encoding':'UTF-8'}, function(err, data) {
if (err) {
console.error(err);
return cb();
}
api.file_data = data;
cb();
});
},
// second, upload the file contents to google drive via their API
'uploadFile': function(cb) {
var metadata = {
'mimeType': 'application/zip',
'name': 'my_file.zip'
};
var request = {
'url': 'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&supportsAllDrives=true',
'method': 'POST',
'headers': {
'Authorization': 'Bearer ' + GOOGLE_ACCESS_TOKEN,
'Content-Type': 'multipart/related; boundary="SECTION"'
},
'body': '--SECTION\r\n' +
'Content-Type: application/json; charset=UTF-8\r\n' +
'\r\n' +
JSON.stringify(metadata) + '\r\n' +
'\r\n' +
'--SECTION\r\n' +
'Content-Type: application/zip\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
new Buffer.from(api.file_data).toString('base64') + '\r\n' +
'\r\n' +
'--SECTION--'
};
requestModule(request, function(err, res, body) {
if (err) {
console.error(err);
return cb();
}
cb();
});
}
};
async.series(tasks, function() {
console.log('Done');
});
Note: I'm doing a Q&A-style post and will be answering my own question.
After a lot of trail and error, it came down to how I was reading the file before being uploaded. As an artifact from a copy/paste, the encoding on the readFile function was kept. When I removed {'encoding':'UTF-8'} then uploaded the file, the resulting zip file was able to be unzipped just perfectly.
I simply removed the encoding on readFile, so with the changes the code now looks like this:
fs.readFile('my_file.zip', function(err, data) {
// ...
});
I am able to successfully upload a file via an upload button to my vendors API. My vendors API also returns a .png file in blob format that I need to upload to Azure Blob Storage. I have tried a few approaches, but am getting the following error in my Node console:
[Error] statusCode: 414
My front end code is in an Angular Controller which passes data back to my Node backend that contains my Azure Blob Storage calls. I have the formidable and request modules installed and required, but am not using them in my current backend code since the data I receive is already in blob format.
Here is my front end upload code. The success "result" is the blob data I am returned:
$scope.sendToProduction = function () {
var parts = document.getElementById("file").value.split("\\");
var uploadedfilename = parts[parts.length - 1];
var basefilename = uploadedfilename.split(".")[0];
var fileextension = uploadedfilename.split(".")[1];
var filename = basefilename + '.' + fileextension;
var file = document.getElementById("file").files[0];
var formdata = new FormData();
formdata.append(filename, file);
$.ajax({
url: 'http://myvendorsapi/fileuploadendpoint',
type: "POST",
data: formdata,
mimeType: "multipart/form-data",
processData: false,
contentType: false,
crossDomain: true,
success: function (result) {
var filename = 'Test.png';
var file = result;
console.log(file);
$http.post('/postAdvanced', {filename: filename, file: file }).success(function (data) {
console.log(data);
}, function (err) {
console.log(err);
});
},
error: function (error) {
console.log("Something went wrong!");
}
});
};
Here is my node backend for uploading to Azure Blob Storage:
app.post('/postAdvanced', function (req, res, next) {
var filename = req.body.filename;
var file = req.body.file;
blobSvc.createBlockBlobFromText('blob5', file, filename, function (error, result, response) {
if (!error) {
console.log("Uploaded" + result);
}
else {
console.log(error);
}
});
})
How do I upload an AJAX response into Azure Blob Storage?
The problem is that in this line of code:
blobSvc.createBlockBlobFromText('blob5', file, filename, function (error, result, response) {
you have the wrong parameter order. It should be:
blobSvc.createBlockBlobFromText('blob5', filename, file, function (error, result, response) {
HTTP status code 414 means "Request-URI Too Long". Did you pass the correct blob name into blobSvc.createBlockBlobFromText?
gridfs-stream upload a file not working, but download a file working plz give solution
app.post('/file', function(req, res) {
var busboy = new Busboy({
headers: req.headers
});
var fileId = new mongo.ObjectID();
var file = filename();
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
if (!file) return res.send({
result: 'NO_FILE_UPLOADED This Error by not upload any file'
});
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
var writeStream = gfs.createWriteStream({
_id: fileId,
filename: my_file.txt,
mode: 'w',
chunkSize: 1024,
root: fs,
content_type: mimetype
});
// here how can i give a path to read stream ?
// File id is a mongodbid its ok. but what about file name
fs.createReadStream('/some/path').pipe(writestream);
}).on('finish', function() {
res.writeHead(200, {
'content-type': 'text/html'
});
res.end('download file');
});
req.pipe(busboy);//its working
});
gridfs-stream file upload code is not working file download code is working very well.how can i set the path of the readstream files in the path.
busboy never directly writes to disk. What you get is just a plain readable stream (file), so pipe that to your writeStream:
file.pipe(writeStream);
instead of:
fs.createReadStream('/some/path').pipe(writestream);
Also your if (!file) check will never execute, because file will always be defined, and console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); is wrong because there is no data variable in that scope.