So I have mongoose, multer and lwip (they are required from the top part).
var express = require('express');
var router = express.Router();
var mongoose = require('mongoose');
var jwt = require('jsonwebtoken');
var Users = require('../models/users.js');
var multer = require('multer');
var mime = require('mime');
var lwip = require('lwip');
If I comment the Users.findOne part, the image is cropped as I want it to be cropped. But if I uncomment it the lwip part stops working, though no errors are thrown. It just doesn't enter lwip.open().
router.post('/image', upload.single('file'), function (req, res) {
//This part works always.
Users.findOne({userid: req.body.userid}, function (err, user) {
var imgpath = req.file.path.split("public\\")[1];
user.user_photos.push(imgpath);
user.save(function (err) {
if (err)
console.log('error');
else
console.log('success');
});
});
//If I remove the top part, this part will work too.
lwip.open(req.file.path, function(err, image){
image.batch()
.crop(200,200)
.writeFile(req.file.path, function(err) {
if (err)
console.log('error');
else
console.log('success');
});
});
});
You might need my multer config too, so here it is:
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './public/uploads/')
},
filename: function (req, file, cb) {
var extension;
if (mime.extension(file.mimetype) == "jpeg") {
extension = "jpg";
}
else {
extension = mime.extension(file.mimetype);
}
cb(null, file.fieldname + '-' + Date.now() + '.' + extension);
}
});
Can you help me to figure out what the problem is here?
They're both async functions. How can you guarantee both are done before the function exits? I recommend using a promise library like Bluebird. With it you can run multiple async functions at the same time and specify what happens when they both return.
Don't forget to 'promisify' any libraries that are used that you want to treat as promises. You're code will look something like:
my route('blah', function (){
return Promise.all([ myfunct1, myfunct2], (retval) => { return {f1val: retval[1], f2val: retval[2]}})
I know some asshat is going to come along and take my answer and write out the code for you so that all you have to do is copy paste it, but I really do hope that you take the time to learn WHY and HOW it works if you do not already know.
Related
I am able to upload a file using openDownloadStream of GridFSBucket and see that the file is uploaded and visible under songs.files chunks. But for some reason, get the following error while trying to download it -
Caught exception: Error: FileNotFound: file def1.txt was not found
My code is -
var express = require('express');
var gridModule = express.Router();
var mongoose = require('mongoose');
var fs = require('fs');
gridModule.post('/', (req, res) => {
console.log("::::grid");
//const gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db);
//const writeStream = gridfs.openUploadStream('test.dat');
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
fs.createReadStream('./def.txt').
pipe(gridfs.openUploadStream('def1.txt')).
on('error', function (error) {
assert.ifError(error);
}).
on('finish', function () {
console.log('done!');
process.exit(0);
});
});
gridModule.get('/', (req, res) => {
var gridfs = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
});
/* var bucket = new mongodb.GridFSBucket(db, {
chunkSizeBytes: 1024,
bucketName: 'songs'
}); */
gridfs.openDownloadStream('def1.txt').
pipe(fs.createWriteStream('./def1.txt')).
on('error', function(error) {
console.log(":::error");
assert.ifError(error);
}).
on('finish', function() {
console.log('done!');
process.exit(0);
});
});
module.exports = gridModule;
I tried using ObjectId id as well but same error. Anyone any guesses what I may be doing wrong here?
Note - Code may not seem optimized here like declaring bucket twice, kindly ignore it for now as I will correct it once it works.
According to the API doc here, in order to use filename as argument you should use
openDownloadStreamByName(filename, options)
not openDownloadStream. openDownloadStream takes id
of the file.
Another possible explanation for this, if you're already calling openDownloadStream and still experiencing the FileNotFound error, and you are 100% the id is correct, is that you didn't pass an ObjectId type.
In my case, I was passing an id string instead of an id as an ObjectId.
bucket.openDownloadStream(mongoose.Types.ObjectId(id));
vs
bucket.openDownloadStream(id);
I used promises, callbacks and external API for the firt time but I'm not sure that's the best way to use them.
My program traslates words from a langage to another using a langae Pivot and systran.io API.
the function Translate will translate word and send response via a callback.
then in the POST request I used promises to chain tasks.
var express = require('express');
var request = require('request');
var router = express.Router();
router.post("/", function(req, res) {
console.log
var resultat
var promise = new Promise((resolve, reject) => {
translate(req.query.source, "en", req.query.content, function(resa) {
resolve(resa);
})
}).then(function(resolve) {
console.log(resolve);
translate("en", req.query.target, resolve, function(resa2) {
console.log(resa2);
})
});
});
function translate(source, target, content, callback) {
let result;
result = request("https://api-platform.systran.net/translation/text/translate?input=" + content + "&source=" + source + "&target=" + target + "&key=xxxxxxxx-783f-4f90-aea4-7fb357016647", function(err, data, body) {
body = JSON.parse(body);
console.log(body);
callback(body.outputs[0].output)
})
}
module.exports = router;
Is there a best way to write my program which is already working ?
I have the following express server set up (server is just express() from another file). I am sure there is a way to simplify this to only one server.get() but I haven't been able to figure out how. Any help or points in the right direction would be appreciated.
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
fs.readFile(__dirname + '/data.json', function (err, data) {
if (err) throw err;
data = JSON.parse(data);
data.forEach(function (match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
var match = data.filter(function (e) {
return e.uuid == req.params.uuid
})[0];
res.send(200, match);
});
});
server.get('/api/getData', function (req, res) {
fs.readFile(__dirname + '/data.json', function (err, data) {
if (err) throw err;
data = JSON.parse(data);
data.forEach(function (match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
res.send(200, data);
});
});
};
Here's a solution that just moves the common code into a shared function, yet still uses the two routes for routing clarity:
function getData(res, uuid) {
fs.readFile(path.join(__dirname, 'data.json'), function (err, fileData) {
if (err) {
return res.send(500);
}
let data = JSON.parse(fileData);
data.forEach(function(match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
if (uuid) {
var match = data.filter(function (e) {
return e.uuid == uuid;
})[0];
}
res.send(200, match);
});
}
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
getData(res, req.params.uuid);
});
server.get('/api/getData', function (req, res) {
getData(res);
});
};
This changes the following things:
Puts shared code into getData() function that is called from both routes.
Sends an error response if fs.readFile() has an error
Creates new local variable so it doesn't assign back to a function argument which is now a less desirable practice because it prevents some interpreter optimizations.
Uses path.join() to join parts of a path in a more cross platform way.
FYI, unless the data in data.json actually changes from time to time, you could just read this data into a variable once and then cache it rather than rereading it on every one of these requests.
Note: You could use routing wildcards and reduce your code to a single route, but this is mostly considered an anti-pattern because wildcards often match much more than you want, creating situations where you have manually trigger 404 errors for things you didn't intend to match that ended up matching your routing wildcard. So, it is considered a good thing to explicitly declare the routes you intend to match rather and just share the appropriate implementation code rather than trying to collapse things down to a single route that matches more than one form of URL.
There are, of course, always exceptions by remember that the goal is clear, correct, maintainable, reliable code, not necessarily the fewest number of routes.
If you just want to cache the data.json data at server start up time, you can use require() to load and parse it for you like this and then there's really no reason for the sharef fucntion:
const cacheData = require('./data.json');
cacheData.forEach(function(match) {
match['uuid'] = match['x'] + '-' + match['y'];
});
module.exports.api = function (server, fs) {
server.get('/api/getData/:uuid', function (req, res) {
let match = cacheData.filter(function (e) {
return e.uuid == req.params.uid;
})[0];
res.send(match);
});
server.get('/api/getData', function (req, res) {
res.send(cacheData);
});
};
I am working with a NodeJS project where i need to update a table and afterwards restart a service. Unfortunately the service restarts before the table has been updated. So i assume this is a normal async behaviour.
How do i synchronize this?
var sqlite3 = require('sqlite3').verbose();
var express = require('express');
var app = express();
var router = express.Router();
var db = new sqlite3.Database('/home/test/testApp.db', 'OPEN_READWRITE');
router.route('/')
.get(function(req, res) {
res.render('index', { data: dbConfigRow });
})
.post(function(req, res) {
// console.log(req.body);
db.serialize(function() {
for (var key in req.body) {
db.run("UPDATE config SET " + key + "='" + req.body[key] + "'");
}
exec('systemctl restart demoApp');
});
res.json(200);
});
You should check out Async or any one of the popular promise libraries (When.js, Q.js, Bluebird).
Any of these should solve your problem. In Async it might look something like this using series:
.post(function(req, res) {
async.series([
function(callback){
db.serialize(function() {
for (var key in req.body) {
db.run("UPDATE config SET " + key + "='" + req.body[key] + "'");
}
callback()
})
},
function(callback){
exec('systemctl restart demoApp'); //Assuming this is synchronous
callback()
}
],
function(error, results){ //Using the optional callback
res.send(200);
}
);
});
This assumes db.run is synchronous (it looks like it is).
All this said, it looks like your current implementation was returning 200 before it finished all the db/restarting tasks. You might try moving the response after the exec. That may also work. Let me know if this solves your issue.
I've found few articles explaining the process but most of them are not up do date.
How do you handle image upload in node.js?
Im using multer and it works perfectly. It stores your image locally. You can also send it to mongodb if you want. This is how i am doing it.
var multer = require('multer');
var done = false;
//define the model you are working with*
var Slides = require('./models/work');
app.use(multer({
dest: './public/img',
rename: function (fieldname, filename) {
return filename+Date.now();
},
onFileUploadStart: function (file) {
console.log(file.originalname + ' is starting ...')
},
onFileUploadComplete: function (file) {
console.log(file.fieldname + ' uploaded to ' + file.path);
done = true;
var id= file.fieldname;
var str = file.path;
var image = str.replace('public', '');
var slidegegevens = {
"id": id,
"img": image
};
var s = new Slides(slidegegevens);
s.save(function (err, slidegegevens) {
console.log(err);
console.log('slidegegevens: ' + slidegegevens);
});
}
}));
I use busboy middleware in express to parse out images in a multipart/form-data request and it works pretty nice.
My code looks something like:
const busboy = require('connect-busboy');
//...
app.use(busboy());
app.use(function parseUploadMW(req,res,next){
req.busboy.on('file', function onFile(fieldname, file, filename, encoding, mimetype) {
file.fileRead = [];
file.on('data', function onData(chunk) {
this.fileRead.push(chunk);
});
file.on('error', function onError(err) {
console.log('Error while buffering the stream: ', err);
//handle error
});
file.on('end', function onEnd() {
var finalBuffer = Buffer.concat(this.fileRead);
req.files = req.files||{}
req.files[fieldname] = {
buffer: finalBuffer,
size: finalBuffer.length,
filename: filename,
mimetype: mimetype.toLowerCase()
};
});
});
req.busboy.on('finish', function onFinish() {
next()
});
req.pipe(req.busboy);
})
Then files will be in the req object for you at req.files in your express routes.
This technique works fine for small images. If you are doing some hardcore uploading, you may want to consider streaming the files (to save memory) to their destination - like s3 or similar - which can also be achieved with busboy
Another package that is popular and also decent is: https://github.com/andrewrk/node-multiparty.
I think is better use formidable to handle incoming images.