I want to save an image not in my server but in my database.
this is my model
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var categorySchema = new Schema({
img: { data: Buffer, contentType: String },
});
module.exports = mongoose.model('Category', categorySchema);
now in my router I'm getting this base64 image with a long string.
{ '$ngfDataUrl': 'data:image/png;base64,long-String'}
I want to know how do I save this information in my mongoose db database so far i have this.
router.post('/add', function (req, res) {
var category = new Category();
category.img.data = req.body.category.img;
category.img.contentType = 'image/png';
category.save(function (err) {
if (err) throw new Error(err);
res.sendStatus(200)
});
but obviously that is not working i get this error.
Error: ValidationError: CastError: Cast to Buffer failed for value "{
'$ngfDataUrl': 'data:image/png;base64, long-String'}
Thanks in advance I'm in saving files.
You'll want to actually pull out the base64 string from the request body, and save that.
Is req.body.category.img equal to { '$ngfDataUrl': 'data:image/png;base64,long-String' }, where long-String is the base64 representation of the image?
If so, do something like this:
const category = new Category();
const img = req.body.category.img;
const data = img['$ngfDataUrl'];
const split = data.split(','); // or whatever is appropriate here. this will work for the example given
const base64string = split[1];
const buffer = Buffer.from(base64string, 'base64');
category.img.data = buffer;
// carry on as you have it
Related
I am creating an REST API in nodejs
where I am trying to read the data from the csv file and
trying to store the data to mongodb and later trying to perform CRUD operations using csv file.
I am using Express framework
My Approach:
Call a method to read csv file.
create a array and add the rows into that array.
iterate the list to json(here I am struck).
Please Help!!!!
I am new to NodeJS
const express = require('express')
const app= express();
const parse = require('csv-parse');
const fs = require('fs');
//require('arrayList');
// var list= new arr.ArrayList;
var list= new Array();
fs.createReadStream('datasample.csv')
.pipe(parse({delimiter: ':'}))
.on('data', (row) => {
list.push(row)
console.log(row);
})
.on('end', () => {
console.log('CSV file successfully processed');
console.log(list)
for (var i in list) {
var str= list[i].toString()
var res= str.split(",")
//const post= new Post();
console.log(res);
//console.log(list[i]);
}
});
Schema which I am using
const mongoose= require("mongoose");
const employeeSchema= mongoose.Schema(
{
employee_id:{
type: Number,
required: true
} ,
employee_name:String
}
)
module.exports= mongoose.model("Post",employeeSchema)
You can find an approach of importing csv file data into mongodb from below link.
http://programmerblog.net/import-csv-file-using-nodejs/
I have an image with base64, e.g.
data:image/jpeg;base64,/9j/4AAQSkZJRgABAgAAAQABAAD/7QCcUGhvdG9zaG9w....
How to save in the database? What should be the type of the field in schema? Buffer?
The short answer is store as "Binary", for which in a mongoose schema you can use Buffer to do this.
The longer form is to demonstrate a round trip of conversion starting with the original binary and back again. Base64 encoding/decoding is not a necessary step in most real world cases, and is just there for demonstration:
Read an image (or any binary data) from file
Base64 encode that data (just to show it can be done) - optional
Turn back into Binary data from Base64 ( just to show it can be done ) - optional
Store Binary data in the database
Read Binary data from the database
Output binary data to a new file
So the Schema Part is simple, just use Buffer:
var albumSchema = new Schema({
name: String,
image: Buffer
})
Then all we are going to do is follow the process and put the binary data into the property and read it back out again.
Note though that if you are coming directly from a string with a MIME type on it like :
data:image/png;base64,long-String
Just use a JavaScript .split() and take the second array index for the base64 string itself:
var string = "data:image/png;base64,long-String"
var bindata = new Buffer(string.split(",")[1],"base64");
Here's a listing with a complete demo in and out:
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema,
fs = require('fs');
mongoose.Promise = global.Promise;
mongoose.set('debug',true);
mongoose.connect('mongodb://localhost/test');
var albumSchema = new Schema({
name: String,
image: Buffer
})
const Album = mongoose.model('Albumn', albumSchema);
async.series(
[
(callback) =>
async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
(callback) =>
async.waterfall(
[
(callback) => fs.readFile('./burger.png', callback),
(data,callback) => {
// Convert to Base64 and print out a bit to show it's a string
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
// Feed out string to a buffer and then put it in the database
let burger = new Buffer(base64, 'base64');
Album.create({
"title": "burger",
"image": burger
},callback)
},
// Get from the database
(album,callback) => Album.findOne().exec(callback),
// Show the data record and write out to a new file.
(album, callback) => {
console.log(album);
fs.writeFile('./output.png', album.image, callback)
}
],
callback
)
],
(err) => {
if (err) throw err;
mongoose.disconnect();
}
)
NOTE The example was originally given with asyncJS and older mongoose API, which notably has different connection options as shown in more modern and current API examples. Refer to these instead for testing on current NodeJS LTS releases:
Or with a a bit more modern syntax and API usage for comparison:
const fs = require('mz/fs');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useFindAndModify', false);
mongoose.set('useCreateIndex', true);
const albumSchema = new Schema({
name: String,
image: Buffer
});
const Album = mongoose.model('Album', albumSchema);
(async function() {
try {
const conn = await mongoose.connect(uri, opts);
await Promise.all(
Object.entries(conn.models).map(([k, m]) => m.deleteMany())
)
let data = await fs.readFile('./burger.png');
// Convert to Base64 and print out a bit to show it's a string
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
// Feed out string to a buffer and then put it in the database
let burger = new Buffer(base64, 'base64');
await Album.create({ "title": "burger", "image": burger });
// Get from the database
// - for demo, we could have just used the return from the create() instead
let album = Album.findOne();
// Show the data record and write out to a new file.
console.log(album);
await fs.writeFile('./output.png', album.image)
} catch(e) {
console.error(e);
} finally {
mongoose.disconnect()
}
})()
And even with "plain promises" where that is either preferred or you are still using a NodeJS without async/await support. But you really should not be, considering v6.x reaches end of life in April 2019:
// comments stripped - refer above
const fs = require('mz/fs');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useFindAndModify', false);
mongoose.set('useCreateIndex', true);
const albumSchema = new Schema({
name: String,
image: Buffer
});
mongoose.connect(uri, opts)
.then(conn =>
Promise.all(
Object.entries(conn.models).map(([k, m]) => m.deleteMany())
)
)
.then(() => fs.readFile('./burger.png'))
.then(data => {
let base64 = data.toString('base64');
console.log(base64.substr(0,200));
let burger = new Buffer(base64, 'base64');
return Album.create({ "title": "burger", "image": burger });
})
.then(() => Album.findOne() )
.then(album => {
console.log(album);
return fs.writeFile('./output.png', album.image)
})
.catch(console.error)
.then(() => mongoose.disconnect());
And here's a burger.png to play with:
Also kudos to How to reduce image size on Stack Overflow which allows the sample image here to not appear as "huge" as it originally was, and yet still download at full size.
I did researches about storing images in a mongodb database using meteor and I found this code and I get blocked in it before storing in the database:
var fs = Npm.require('fs');
// function to encode file data to base64 encoded string
function base64_encode(file) {
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
// convert image to base64 encoded string
var base64str = base64_encode('Chrysanthemum.jpg');
console.log(base64str);
The problem is that Npm.require('fs'); doesn't work in the client side.
If you have a solution for this or another solution such as a plugin working in meteor with a bar progress (for multiple images using bindata) on how to store images in mongodb, please help me. Thank you in advance.
Most common approach would be to use CollectionFS to store data in Mongo using their built in GridFS feature. This will also let you work around their 16mb document size limit. And provide set of various useful helper functions in client and server side.
const mongoose = require("mongoose");
const { Schema } = mongoose;
const fs = require('fs');
const path = require('path');
let uri = "mongodb://localhost:27017/testBin";
mongoose.connect(uri, {
useUnifiedTopology: true,
useCreateIndex: true,
useNewUrlParser: true
}).then(async(db) => {
console.log("connected success");
const blogSchema = new Schema({
file: { type: Buffer }
}, { strict: false });
const Blog = mongoose.model('mycollection', blogSchema, "mycollection");
console.log("path.resolve('./index.js') ", path.resolve(__dirname, 'index.js'));
const file = fs.readFileSync(path.resolve(__dirname, 'index.js'))
await new Blog({ file }).save()
mongoose.connection.close()
}).catch(err => {
console.log(err);
})
Hey so I am trying to use gridfs to store profile pictures... I am not sure what I am doing wrong, I am kind of new to this as well.
This is what I got so far, but it seems to be erroring this:
TypeError: Cannot read property 'primary' of undefined
at Stream.GridStore.open (..\node_modules\mongodb\lib\mongodb\gridfs\gridstore.js:146:69)
This is my code:
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
ObjectId = mongoose.Schema.Types.ObjectId;
var GridStore = require('mongodb').GridStore;
var Step = require('step');
exports.newProfilePicturePost = function(req, res) {
var db = mongoose.createConnection('mongodb://localhost/test5');
var fileId = new ObjectId();
var gridStore = new GridStore(db, fileId, "w", {
"metadata": {
category: 'image'
}, content_type: 'image'});
gridStore.chunkSize = 1024 * 256;
gridStore.open(function(err, gridStore) {
Step(
function writeData() {
var group = this.group();
for(var i = 0; i < 1000000; i += 5000) {
gridStore.write(new Buffer(5000), group());
}
},
function doneWithWrite() {
gridStore.close(function(err, result) {
console.log("File has been written to GridFS");
});
}
)
});
};
Any help or fixes I should make to my code is welcomed, also I am not sure how to specify the collection I want to store the picture in and I want to also add the userId to the picture being saved in the collection for fetching it later.
I'm trying to use a new schema in my db, but get errors while trying to instantiate it. I have two other schemas (in two different model files in the folder "models"), that works perfect, and they are shaped in the same way. What does the error message mean and what can I do different to prevent it from occur?
I don't thinks its any problem with the other code in the controller, because i've tried to instantiate another db model in the same place using the same syntax, and that works fine.
The error I get: 500 TypeError: object is not a function
at Schema.CALL_NON_FUNCTION_AS_CONSTRUCTOR (native)
Sorry for all the code below. I didn't know what I could exclude in this case.
Anyway, thanks in advance!
controller file:
module.exports = function(app, service) {
var imageModel = service.useModel('image');
app.post('/file-upload', function(req, res, next) {
// other code...
var imageAdd = new imageModel.ImgSchema();
}
}
mongodb model (models/image.js):
module.exports = function (mongoose) {
var modelObject = {};
var Schema = mongoose.Schema,
ObjectId = Schema.ObjectId;
var ImgSchema = new Schema({
name : String,
size : Number,
type : String
});
modelObject.ImgSchema = ImgSchema;
modelObject.Images = mongoose.model('Images', ImgSchema);
return modelObject;
};
For mongodb I'm using a service file (service.js):
var environment;
var mongoose = require('mongoose');
module.exports.init = function(env, mongoose) {
environment = env;
mongoose = mongoose;
};
module.exports.useModel = function (modelName) {
var checkConnectionExists = (mongoose.connection.readyState === 1 || mongoose.connection.readyState === 2);
if(!checkConnectionExists)
mongoose.connect(environment.db.URL);
return require("./models/" + modelName)(mongoose);
};
module.exports.useModule = function (moduleName) {
return require("./modules/" + moduleName);
};
The modelObject.ImgSchema is not a constructor, however, modelObject.Images is.
var imageAdd = new imageModel.Images();
I'd probably rename Images to Image