I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})
Related
I am currently using aws-sdk to upload pdf files to bucket S3, like this:
function uploadFile (filePath, remoteFilename, cb) {
var fileBuffer = fs.createReadStream(filePath); // ex.: 'temp/longFileName.pdf'
fileBuffer.on('error', function(err) {
logger.warn('Failed reading local pdf file');
cb(err);
});
s3.upload({
Bucket: 'someBucketName',
Key: remoteFilename,
Body: fileBuffer
}, function (error, response) {
cb(error, { response, remoteFilename });
});
}
the problem is that sometimes the file gets uploaded with 0B size, sometimes it gets uploaded with the correct size, but when I download it, it is corrupt, and of course sometimes it is correctly uploaded and opens properly.
I read the pdf file locally from system file, and that pdf file is correct.
could somebody help me to fix this issue?
update
I am creating a pdf using pdfkit:
function createPdf (data, cb) {
var fs = require('fs');
var PDFDocument = require('pdfkit');
var filePath = 'temp/longFileName.pdf';
var pdf = new PDFDocument({
size: 'LEGAL',
info: {
Title: 'Tile of File Here',
Author: 'Some Author',
}
});
// Write stuff into PDF
pdf.text('Hello World');
// Stream contents to a file
pdf.pipe(
fs.createWriteStream(filePath)
)
.on('finish', function () {
console.log('PDF closed');
});
// Close PDF and write file.
pdf.end();
cb(null, {filePath})
}
once the callback in this function is called, i call the uploadFile function:
function doAll (someData, cb) {
createPdf(someData, function(err, data) {
if (err) console.log(err)
uploadFile(data.filePath, function(err,data) {
if (err) console.log(err)
console.log('finished')
cb(null, 'done');
return;
})
})
}
The problem is that you're calling the callback immediately instead of waiting for the file to be fully written. Your callback function should be inside .on('finish')
pdf.pipe(
fs.createWriteStream('./path/to/file.pdf')
)
.on('finish', function () {
console.log('PDF closed');
cb(null, 'finished'); // The callback should e in here
});
// Close PDF and write file.
pdf.end();
I'm trying to save a Buffer (of a file uploaded from a form) to Google Cloud storage, but it seems like the Google Node SDK only allows files with a given path to be uploaded (Read / Write streams).
This is what I have used for AWS (S3) - is the anything else similar in the Google node SDK?:
var fileContents = new Buffer('buffer');
var params = {
Bucket: //bucket name
Key: //file name
ContentType: // Set mimetype
Body: fileContents
};
s3.putObject(params, function(err, data) {
// Do something
});
The only way that I have found to do it so far is write the buffer to disk, upload the file using the SDK (specifying the path to the new file) and then delete the file once it's uploaded successfully - the downside to this is that the whole process is significantly slower, to where it seems to be unfeasible to use Google storage. Is there any work around / way to upload a buffer?
.save to save the day! Some code below where I save my "pdf" that I created.
https://googleapis.dev/nodejs/storage/latest/File.html#save
const { Storage } = require("#google-cloud/storage");
const gc = new Storage({
keyFilename: path.join(__dirname, "./path to your service account .json"),
projectId: "your project id",
});
const file = gc.bucket(bucketName).file("tester.pdf");
file.save(pdf, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
This is actually easy:
let remotePath = 'some/key/to/store.json';
let localReadStream = new stream.PassThrough();
localReadStream.end(JSON.stringify(someObject, null, ' '));
let remoteWriteStream = bucket.file(remotePath).createWriteStream({
metadata : {
contentType : 'application/json'
}
});
localReadStream.pipe(remoteWriteStream)
.on('error', err => {
return callback(err);
})
.on('finish', () => {
return callback();
});
We have an issue about supporting this more easily: https://github.com/GoogleCloudPlatform/gcloud-node/issues/1179
But for now, you can try:
file.createWriteStream()
.on('error', function(err) {})
.on('finish', function() {})
.end(fileContents);
The following snippet is from a google example. The example assumes you have used multer, or something similar, and can access the file at req.file. You can stream the file to cloud storage using middleware that resembles the following:
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I have this approach working to me:
const destFileName = `someFolder/${file.name}`;
const fileCloud = this.storage.bucket(bucketName).file(destFileName);
fileCloud.save(file.buffer, {
contentType: file.mimetype
}, (err) => {
if (err) {
console.log("error");
}
});
I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});
I'm trying to get a file from S3, and store it in NoSQL Couchbase.
I'm trying to store the outputStream, with the following code:
var outputStream1 = fs.createWriteStream("./tmp/" + url);
// if statusCode == 200, then we have the file, lets save it in out cache and then send it to the client
res.on('data', function (chunk) {
outputStream1.write(chunk);
});
res.on('end', function () {
var value = outputStream1;
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value);
var readStream = result.value;
readStream.on('open', function () {
console.log(response);
});
readStream.on('end', function () {
readStream.close();
});
});
});
The problem is that I store the outputStream, and then trying to read it.
I search for a way to store the data object we get from S3 in Couchbase, and then have the ability to send it to the client. Is it possible?
I think you can just set a Buffer as value and get that later. First you need to save the data in a buffer:
var buffers = [];
res.on('data', function (chunk) {
buffers.push(chunk);
});
res.on('end', function () {
var value = Buffer.concat(buffers);
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value); // You should get your value in here });
});
You can also use concat-stream module to help you. I've answered a similar question on buffering stream.
Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})