I am trying to write an import script in Nodejs that pulls data from the web and formats it and then sends it to my API.
Part of that includes pulling artist data from LastFM, fetching the images for each artist and sending them off to my API to resize and save.
The import script is just ran in terminal.
The part of the import script that is responsible for pulling the images down and sending off to my API looks like:
_.forEach(artist.images, function(image){
console.log('uploading image to server ' + image.url)
request.get(image.url)
.pipe(request.post('http://MyAPI/files/upload', function(err, files){
if (err) {
console.log(err);
}
console.log('back from upload');
console.log(files);
}));
});
And the files.upload action looks like:
upload: function(req, res){
console.log('saving image upload');
console.log(req.file('image'));
res.setTimeout(0);
var sizes = [
['avatar', '280x210'],
['medium', '640x640'],
['large', '1024x768'],
['square', '100x100'],
['smallsquare', '50x50'],
['avatarsquare', '32x32']
];
//resize to the set dimensions
//for each dimension - save the output to gridfs
_.forEach(sizes, function(bucket){
var width = bucket[1, 0], height = bucket[1, 2];
// Let's create a custom receiver
var receiver = new Writable({objectMode: true});
receiver._write = function(file, enc, cb) {
gm(file).resize(width, height).upload({
adapter: require('skipper-gridfs'),
uri: 'mongodb://localhost:27017/sonatribe.' + bucket[0]
}, function (err, uploadedFiles) {
if (err){
return res.serverError(err);
}
else{
return res.json({
files: uploadedFiles,
textParams: req.params.all()
});
}
});
cb();
};
/* req.file('image').upload(receiver, function(err, files){
if(err) console.log(err);
console.log('returning files');
return files;
});*/
});
}
However, console.log(req.file('image')); is not what I'd hope - probably because this code is expecting the image to be uploaded as part of a multi-part form upload with a field named image - which it is not...
I'm trying to figure out how the file will end up inside my action but my google foo is completely out of action today and I'm fairly (very) new to Node.
Anyone able to offer some pointers?
Related
I have been trying to download image file using JS force for node js and able to create a file on local after retrieving data and converting it to base64 format but image if showing "file not supported message" whereas being able to download javascript type of file with correct data.
I am querying the attachment field of knowledge article in salesforce.
Following is my query :
SELECT Body__c, Attachment__Name__s, Attachment__ContentType__s, Attachment__Length__s, Attachment__Body__s, Id, KnowledgeArticleId, Title, UrlName FROM Knowledge__kav
I am sending GET request to Attachment__Body__s field of article.
Following is my node js code:
function createFile(attachmentBody,attachmntContentType,attachmntName){
var req = {
url: attachmentBody,
method: 'GET',
headers: {
"Content-Type": attachmntContentType
}
};
var test = conn.request(req, function(err, resp) {
if (err) {
console.log(err)
} else {
var fileBuffer=Buffer.from(resp, 'binary').toString('base64');
console.log('fileBuffer--- '+ fileBuffer);
fs.writeFile('./downloadedAttachments/'+attachmntName,fileBuffer,'base64', function(err){
if (err) throw err
console.log('File saved.')
})
}
});
}
Please help me with this.
I am successfully able to download the file in the correct format. following is my updated code :
function createFile(knbid,attachmntName,callback) {
var file_here = conn.sobject('Knowledge__kav').record(knbid);
file_here.retrieve(function (err, response) {
if (err) {
return console.error(err);
callback(0)
} else {
var obj = fs.createWriteStream('./downloadedAttachments/'+attachmntName, {defaultEncoding: 'binary'})
//console.log('blob--'+JSON.stringify(file_here.blob('Attachment__Body__s')));
var stream = file_here.blob('Attachment__Body__s').pipe(obj);
stream.on('finish', function (err, result) {
if (err)
console.log('not downloaded'+knbid);
else
console.log('downloaded-'+knbid);
})
}
});
}
I have an Express application that gets an image from a user via a form. There are several things I need to do with the image, and as it gets more complex, I'm not sure how to handle it. It is a message board post where there are some required text fields and an optional image upload. I need to:
Find the orientation of the image from EXIF data and reorient it if needed
Save a copy of the original image to the server (done)
Create a thumbnail of the image and save it to the server (done)
Save the record to the database, whether or not there's an uploaded image (done)
I'm concerned about the order in which I'm doing things, wondering if there's a more efficient way. I know I can call upload inside the route instead of passing it in, but I'd like to not repeat myself when I save the record to the database, since I need to save it whether there's an image or not.
I have code that's working for the final 3 steps, but am open to suggestions on how to improve it. For the first step, I'm stumped at how to go about getting the orientation of the original and rotating it if needed. Is this something I need to do client-side instead? And how do I work it into the existing code?
Here's the code:
Setup
var multer = require('multer');
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './public/uploads');
},
filename: function (req, file, cb) {
var fileExt = file.mimetype.split('/')[1];
if (fileExt == 'jpeg'){ fileExt = 'jpg';}
cb(null, req.user.username + '-' + Date.now() + '.' + fileExt);
}
})
var restrictImgType = function(req, file, cb) {
var allowedTypes = ['image/jpeg','image/gif','image/png'];
if (allowedTypes.indexOf(req.file.mimetype) !== -1){
// To accept the file pass `true`
cb(null, true);
} else {
// To reject this file pass `false`
cb(null, false);
//cb(new Error('File type not allowed'));// How to pass an error?
}
};
var upload = multer({ storage: storage, limits: {fileSize:3000000, fileFilter:restrictImgType} });
In Route
router.post('/new',upload.single('photo'),function(req,res){
var photo = null;
var allowedTypes = ['image/jpeg','image/gif','image/png'];
if (req.file){
photo = '/uploads/' + req.file.filename;
// save thumbnail -- should this part go elsewhere?
im.crop({
srcPath: './public/uploads/'+ req.file.filename,
dstPath: './public/uploads/thumbs/100x100/'+ req.file.filename,
width: 100,
height: 100
}, function(err, stdout, stderr){
if (err) throw err;
console.log('100x100 thumbnail created');
});
// I can get orientation here,
// but the image has already been saved
im.readMetadata('./public/uploads/'+ req.file.filename, function(err, metadata){
if (err) throw err;
console.log("exif orientation: " + metadata.exif.orientation);
});
}
// Save it
new Post({
username: req.user.username,
title: req.body.title,
body: req.body.messagebody,
photo: photo
}).save(function(err){
if (err){ console.log(err); }
res.redirect('/messageboard');
});
});
Thanks for your help
I'm currently building web using Sails.js and got stuck in retrieving image file from GridFS. I have successfully uploading the file using skipper-gridfs to my mongo gridfs. I have no idea to display the file in the correct way (I'm new in Sails.js and Node system)
Here is my code for retrieving image file from gridfs looks like in FileController.js (I'm using gridfs-stream):
show: function (req, res, next) {
var mongo = require('mongodb');
var Grid = require('gridfs-stream');
var buffer="";
// create or use an existing mongodb-native db instance
var db = new mongo.Db('testDb', new mongo.Server("192.168.0.2", 27017), {safe:true});
var gfs = Grid(db, mongo);
// streaming from gridfs
var readstream = gfs.createReadStream({
filename: 'e1ecfb02-e095-4e2f.png'
});
//check if file exist
gfs.exist({
filename: 'e1ecfb02-e095-4e2f.png'
}, function (err, found) {
if (err) return handleError(err);
found ? console.log('File exists') : console.log('File does not exist');
});
//buffer data
readstream.on("data", function (chunk) {
buffer += chunk;
console.log("adsf", chunk);
});
// dump contents to console when complete
readstream.on("end", function () {
console.log("contents of file:\n\n", buffer);
});
}
When I ran it, the console showed nothing.
There is no error either.
How should I fix this?
Additional Question:
Is it better & easier to store/read file to/from local disk instead of using gridfs?
Am I correct in choosing gridfs-stream to retrieve the file form gridfs?
In the skipper-gridfs codes and there's a 'read' method that accept fd value and returns the required file corresponding to that value. So, you just have to pull that file from mongo by that method and send as a response. It should work file.
download: function (req, res) {
var blobAdapter = require('skipper-gridfs')({
uri: 'mongodb://localhost:27017/mydbname.images'
});
var fd = req.param('fd'); // value of fd comes here from get request
blobAdapter.read(fd, function(error , file) {
if(error) {
res.json(error);
} else {
res.contentType('image/png');
res.send(new Buffer(file));
}
});
}
I hope it helps :)
Additional Questions:
Yes, using gridfs is better both in performance and efficiency. And normally mongodb has a limitation of 16MB probably for binary files, but using gridfs you can store any size file, it breaks them in chunks and stores them.
Retrieving has been shown above.
You can now use skipper-gridfs in sails to manage uploads/downloads.
var blobAdapter = require('skipper-gridfs')({uri: 'mongodb://jimmy#j1mtr0n1xx#mongo.jimmy.com:27017/coolapp.avatar_uploads' });
Upload:
req.file('avatar')
.upload(blobAdapter().receive(), function whenDone(err, uploadedFiles) {
if (err) return res.negotiate(err);
else return res.ok({
files: uploadedFiles,
textParams: req.params.all()
});
});
Download
blobAdapter.read(filename, callback);
Bear in mind the file name will change once you upload it to mongo, you have to use the file name returned in the first response.
I use Formidable to upload a file to my Node-js webserver (local installation for testing). The file is in ansi (iso-8859-1) before I upload it, and it still is when it has been uploaded, I've checked the file using Notepad++.
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
csv
.fromPath(files.upfile.path, {headers: true})
.on("record", function(data){
console.log(data.adresse);
})
.on("end", function(){
console.log("done");
});
res.end(console.log("form done"));
});
As soon as I use a stream or a plugin(fast-csv in this case) that uses a node fs to stream the content, the encoding is botched. See the data below, the ? should be the danish character ΓΈ.
{adresse: 'Niver?d By, Karlebo'}
Can anyone tell me why?
Update:
It worked by streaming the uploaded file and using Iconv to convert it.
router.post('/upload', function(req, res) {
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(files.upfile.path);
var stream = fs.createReadStream(files.upfile.path);
var csvStream = csv({headers : true})
.on("record", function(data){
var db = req.db;
// Set our collection
var collection = db.get('bbrcollection');
// Submit to the DB
collection.insert(data, function (err, doc) {
//console.log(data);
if (err) {
// If it failed, return error
console.log("There was a problem adding the information to the database.");
}
});
})
.on("end", function(){
console.log("done");
});
stream
.pipe(new Iconv('WINDOWS-1252', 'UTF-8'))
.pipe(csvStream);
});
res.render('upload', { title: 'upload file'});
});
You have to convert the encoding to utf8 to use the contents in javascript. Both iconv and iconv-lite should be able to do that for you.
I am working on an application that is sending a audio file to a server powered by sails.js.
I need to convert this audio file on-the-fly and send the converted data to amazon S3 using node streams.
I don't want to store data on the server but directly stream the upload file to S3 after it has been converted.
Do you know a way to do that?
I tried using formidable, but I couldn't get it working. Did anyone succeed implementing such a thing?
Thanks
EDIT
As jibsales noticed, it will probably be better if I show you a piece of what I've tried so far. So basically my strategy is to use formidable, fluent-ffmpeg and knox with streams.
I plan to receive the file stream with formidable and write chunks of received data in a first stream (stream 1) that will be the entry point for the conversion with fluent-ffmpeg. Then fluent-ffmpeg writes the output stream into stream2 which is the entry point of Knox.
The first problem I have to face, is the fact that formidable doesn't seem to work. However I'm not sure my strategy is good...
The code so far looks like this:
upload : function(req,res){
//to streams to transfer file data
var stream1 = new stream.Stream(); //stream for the incoming file data
var stream2 = new stream.Stream(); //stream for the converted file data
var client = knox.createClient({
key: 'APIKEY'
, secret: 'SECRET'
, bucket: 'bucket'
});
//Using formidable to acces data chunks
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files){ //form.parse is not called
if(err){
return res.json(err);
}else{
return res.send('ok');
}
});
//overriding form.onPart to get the file data chunk
form.onPart = function(part) {
sails.log('getting part...');
if (!part.filename) {
form.handlePart(part);
return;
}
//we put the data chunk in stream1 to convert it
part.on('data', function(chunk) {
stream1.write(chunk[1]);
});
}
form.on('error',function(err){
return sails.log(err);
});
form.on('progress', function(bytesReceived, bytesExpected) {
sails.log(bytesReceived);
});
//conversion process
var proc = new ffmpeg({ source : stream1})
.withAudioCodec('libfdk_aac')
.toFormat('mp3')
.writeToStream(stream2, {end:true}, function(retcode, error){
console.log('file has been converted succesfully');
});
client.putStream(stream2, '/file.mp3', headers, function(err, response){
return res.send(response);
});
},
The reason formidable isn't working is that the default Sails body parser is parsing the request before formidable can get to it. In order to get this to work, you'll have to bypass the Sails body parser for multi-part form uploads. So, in config/express.js:
var express = require('sails/node_modules/express');
module.exports.express = {
bodyParser: function(options) {
return function (req, res, next) {
if (!req.headers['content-type'] || req.headers['content-type'].indexOf('multipart/form-data') === -1) {
return express.bodyParser()(req, res, next);
} else {
return next();
}
}
}
}
This just skips the body parser entirely if the content-type header includes multipart/form-data. Otherwise, it executes the default express body parser. Note that the default Sails body parser is slightly fancier than what comes with Express (if it can't parse the request, it fakes an application/json header and retries), so if you want the extra goodness you'll have to copy/paste the code from the core in to the bodyParser function above. But in most cases you won't miss it ;-)
We're working on a better file parser for Sails that will hopefully take care of some of this for you, but in the mean time this is your best bet!
I've figured out a way to convert files on-the-fly with fluent-ffmpeg and formidable. However it seems impossible for the moment to stream the converted chunks comming out of ffmpeg conversion directly to amazon as you must precise the "Content-Length" header which is unknown during the conversion...
For the first part (client upload) I first had to disable express bodyParser on the upload route in a config/express.js file :
var express = require('sails/node_modules/express');
module.exports.express = {
bodyParser: function() {
return function (req, res, next){
console.log(req.path);
if (!(req.path === '/upload' && req.method === 'POST')) {
return express.bodyParser()(req, res, next);
} else {
return next();
}
}
}
}
For the implementation I used a transform stream that does basically nothing. It just get the right parts of the uploaded data (the ones related to file data). It links formidable parser to fluent-ffmpeg. Then I can only save the converted file on the disk before sending it to amazon...
upload : function(req,res){
var Transform = Stream.Transform; //stream for the incoming file data
var client = knox.createClient({
key: 'KEY'
, secret: 'SECRET'
, bucket: 'BUCKET',
region : 'eu-west-1' //don't forget the region (My bucket is in Europe)
});
function InputStream(options)
{
if(!(this instanceof InputStream))
{
return new InputStream(options);
}
Transform.call(this,options);
return;
};
util.inherits(InputStream, Transform);
var inputDataStream = new InputStream;
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files)
{
if(err){
return res.send(err);
}else{
return;
}
});
form.onPart = function(part)
{
if (!part.filename)
{
form.handlePart(part);
return;
}
//we put the data chunk in stream1 to convert it
part.on('data', function (chunk)
{
if(!inputDataStream.write(chunk));
form.pause()
inputDataStream.once('drain', function(){form.resume()});
});
part.on('end', function (chunk){
inputDataStream.end(chunk);
});
}
InputStream.prototype._transform = function (chunk, enc, cb)
{
this.push(chunk);
cb();
}
var proc = new ffmpeg({ source : inputDataStream})
.withAudioBitrate('64k')
.withAudioCodec('libmp3lame')
.toFormat('mp3')
.saveToFile('file.mp3', function (retcode, error){
console.log('file has been converted successfully');
res.send('ok');
var upload = new MultiPartUpload(
{
client : client,
objectName: 'file.mp3',
file: 'file.mp3'
}, function(err,body){
if(err) {
console.log(err);
return;
}
console.log(body);
return;
});
});
},
EDIT
Using knox-mpu you can actually stream data to amazon s3 directly ! You just have to create another transform stream that will be the source of your upload and knox-mpu do the magic. Thanks you all!