I have the following code at server side to download a zip(destpath),zip file is getting downloaded at the client but when we try to open it shows invaild zip content.
this.route('download', {
where: 'server',
path: '/download/:_id',
action: function() {
destPath="/home/rootuser/botbuilder/botBuilderdevelo/Python.zip"
if (fs.existsSync(destPath)) {
filetext = fs.readFileSync(destPath, "utf-8");//tried encoding binary
}
var headers = {
'Content-Type': 'application/octet-stream',//tried application/zip
'Content-Disposition': "attachment; filename=" + "pyth" + '.zip'
};
this.response.writeHead(200, headers);
return this.response.end(filetext);
// i tried this.response.end(destpath);
}
})
specify the encoding when calling res.end() solved the problem:
this.response.end(filetext1,"binary");
Related
I am using nest js, I have converted html file and saved to pdf on my localhost when opening the pdf file from the save location it is fine,
but when I am downloading I'm unable to open the file.
My api controller answers that the file is download successfully.
async exportPDF(#Res({ passthrough: true }) res: Response, #Body() dto: ExportReadingsPDFDto) {
const stream = await this.metersService.exportPDF(dto);
const filename = stream.replace(/^.*[\\\/]/, "");
const fileReadStream = fs.createReadStream(stream, { encoding: "base64" });
const stat = fs.statSync(stream);
res.set({
"Content-Type": "application/pdf",
"Content-Length": stat.size,
"Content-Disposition": 'attachment; filename="' + filename + '"'
});
fileReadStream.pipe(res);
}
Please help, I couldn't find any other example for creating pdf files and sending them to the user
You can simply create the PDF file within server, then using this piece of code you can download it as a response for the user.
const filename = '123.pdf';
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
const filestream = createReadStream('files/' + filename);
filestream.pipe(res);
I'm using the Google Drive REST API to upload a ZIP file but all my ZIP files become corrupted after the upload. When I download the file and then try to unzip it on my computer, on MacOS it says "Unable to expand 'FILE_NAME.zip' into FOLDER. Error 79 - Inappropriate file type or format.". I made sure it wasn't just my computer by having another person on a different computer try to unzip it and they had the same problem. I also confirmed that the ZIP file wasn't becoming corrupted before I uploaded it to Google Drive.
Below is a simplified version of my code.
const async = require('async');
const requestModule = require('request');
const fs = require('fs');
var api = {};
var tasks = {
// first, get the zip file contents
'getFile': function(cb) {
fs.readFile('my_file.zip', {'encoding':'UTF-8'}, function(err, data) {
if (err) {
console.error(err);
return cb();
}
api.file_data = data;
cb();
});
},
// second, upload the file contents to google drive via their API
'uploadFile': function(cb) {
var metadata = {
'mimeType': 'application/zip',
'name': 'my_file.zip'
};
var request = {
'url': 'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&supportsAllDrives=true',
'method': 'POST',
'headers': {
'Authorization': 'Bearer ' + GOOGLE_ACCESS_TOKEN,
'Content-Type': 'multipart/related; boundary="SECTION"'
},
'body': '--SECTION\r\n' +
'Content-Type: application/json; charset=UTF-8\r\n' +
'\r\n' +
JSON.stringify(metadata) + '\r\n' +
'\r\n' +
'--SECTION\r\n' +
'Content-Type: application/zip\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
new Buffer.from(api.file_data).toString('base64') + '\r\n' +
'\r\n' +
'--SECTION--'
};
requestModule(request, function(err, res, body) {
if (err) {
console.error(err);
return cb();
}
cb();
});
}
};
async.series(tasks, function() {
console.log('Done');
});
Note: I'm doing a Q&A-style post and will be answering my own question.
After a lot of trail and error, it came down to how I was reading the file before being uploaded. As an artifact from a copy/paste, the encoding on the readFile function was kept. When I removed {'encoding':'UTF-8'} then uploaded the file, the resulting zip file was able to be unzipped just perfectly.
I simply removed the encoding on readFile, so with the changes the code now looks like this:
fs.readFile('my_file.zip', function(err, data) {
// ...
});
I'm currently calling an API that returns a pdf
This is my code.
const options = {
method: 'GET',
url: 'myurl',
headers: {
accept: 'application/pdf',
encoding: null,
authorization: 'Bearer ' + token
},
}
Then i make the request
try{
var file = await request(options,(error,response,body) => {
if(error) throw new Error(error)
})
}catch(e){
console.log(e)
}
console.log(file)
The console.log prints what i assume is the pdf encoded
/Filter /FlateDecode
/Length 15130
>>
stream
-qO�▲$�֤���� K�;�Cn��V��!R�♂�PA�D
// And prints a lot more of these characters
However when i try to download it as pdf, it downloads an empty pdf. I was doing this:
fs.writeFileSync("./report/report.pdf",file);
res.download("./report/report.pdf",'report.pdf')
If someone could help me,
Thanks!
I build an admin panel for my firebase app with nodejs.
Now I want to upload a pdf file to firebase/google storage.
My Javascript-client, to upload those files from computer, looks like this:
$("#upload-pdf").on("change", function(e) {
file = e.target.files[0];
document.getElementById("pdfName").value = file.name;
if (file) {
var reader = new FileReader();
reader.readAsDataURL(e.target.files[0]);
reader.onload = function (event) {
var fileContent = reader.result;
$('#pdfPreview').attr('src', event.target.result);
document.getElementById("pdfContent").value = fileContent;
}
}
});
The imagecontent of the file (value of pdfContent) will now be sent to my nodejs server via ajax:
...
$.ajax({
url: '/editMenucard',
type: 'POST',
data: JSON.stringify(output),
contentType: 'application/json',
success: function(data) {
window.location.assign('/restaurant/overview');
}
// ...
}).catch(function(error) {
// Handle error
});
In my nodejs-server I have implemented the following:
var bucket = admin.storage().bucket('myBucketName');
const contents = data.PDF.PDFContent;
mimeType = contents.match(/data:([a-zA-Z0-9]+\/[a-zA-Z0-9-.+]+).*,.*/)[1];
fileName = data.PDF.PDFName + '-original.' + mimeTypes.detectExtension(mimeType);
base64EncodedPDFString = contents.replace("data:application\/pdf;base64,", '');
PDFBuffer = new Buffer.from(base64EncodedPDFString, 'base64');
const file = bucket.file('restaurant-menucards/' + fileName);
console.log(mimeType);
file.save(PDFBuffer, {
contentType: mimetype + ";charset=utf-8",
gzip : false,
metadata: { contentType: mimetype + ";charset=utf-8" },
public: true,
validation: 'md5'
}, function (error) {
if (!error) {
console.log("PDF successfully written")
}
});
The file is successfully uploaded to my firebase storage, but unforunately, when I want to open it (or embedd it in my html view via signed URL, that I created for) it doesn't work.
The error appears:
Fehler
Fehler beim Laden des PDF-Dokuments.
In english it means something like "Error; Failed to load pdf document".
So I think my PDF-file is corrupted. I also tested it in Safari, and the pdf didn't appear either.
Is there anything, I do wrong with my upload?
I'm using, as you can see, the NodeJS method "save" from the object "file".
Does anyone have suggestions for me?
I've tried on different browsers and it doesn't work.
The base64 string is not corrupted. I tried it in an online Base64-Converter and the pdf showed correctly.
I am using request module in NodeJS to read data from AWS S3. When I am downloading the file(docx or image or pdf) using below code,its giving me an invalid/corrupted file. But when I am downloading .txt file it's not getting corrupted and I am able to see file in notepad.
I did a bit of googling and as suggested also tried by setting encoding to binary, still its not giving required result.
File upload is working fine. And I am able to see the uploaded file in AWS console.
File download code
var s3 = new properties.AWS.S3();
var params = {Bucket: properties.AWS_BUCKET, Key: req.headers['x-org'] + "/" + "talk" + "/" + req.body.fileName};
s3.getSignedUrl('getObject', params, function (err, URL) {
if (err) {
console.log("Error inside the S3");
console.log(err, err.stack); // an error occurred
res.send(null);
} else {
console.log("After getObject:-" + URL);
request({
url: URL, //URL to hit
method: 'GET',
encoding: 'binary'
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
//console.log(response.statusCode, body);
res.set('content-disposition', 'attachment; filename=' + req.body.fileName);
res.send(body);
}
});
}
});
Update:-
I have narrow down the error, and just trying to send the file by reading file from local file system. Even that also is giving the corrupted files on client.
Here's the code for same
var filePath = path.join(__dirname, '..', '..', '..', 'downloads', req.body.fileURL);
var stat = fs.statSync(filePath);
var filename = path.basename(filePath);
var mimetype = mime.lookup(filePath);
console.log("mimetype=" + mimetype);
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
res.setHeader('Content-type', mimetype + ";charset=UTF-8");
res.setHeader('Content-Length', stat.size);
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
Finally able to solve the problem.
Got solution hint from this blog https://templth.wordpress.com/2014/11/21/handle-downloads-with-angular/.
Per this blog
When testing with binary content like zip files or images, we see
that the downloaded content is corrupted. This is due to the fact that
Angular automatically applies transformation on the received data.
When handling binary contents, we want to get them as array buffer.
Final working code is:-
var filePath = path.join(__dirname, '..', '..', '..', 'downloads', req.body.fileURL);
var file = fs.createWriteStream(filePath);
s3.getObject(params).
on('httpData', function(chunk) {
//console.log("inside httpData");
file.write(chunk);
}).
on('httpDone', function() {
console.log("inside httpDone");
file.end();
//file.pipe(res);
}).
send(function() {
console.log("inside send");
res.setHeader('Content-disposition', 'attachment; filename=' + filePath);
res.setHeader('Content-type', mimetype);
res.setHeader('Transfer-Encoding', 'chunked');
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
});