In my case I got the data from firestore now how can I save it to
serversettings.json:
var temp = {}
let query = db.collection('guilds')
let data = query.get().then(snapshot => {
snapshot.forEach(doc => {
console.log(doc.id, '=>',doc.data());
})
so I get the output as:
637301291068030997 => { welcomeChannelID: '<#648968505160630285>',
guildMemberCount: 4,
guildOwnerID: '348832732647784460',
guildOwner: 'Ethical Hacker',
prefix: '.',
guildID: '637301291068030997',
guildName: 'test server 3' }
and this:
GUqGqFanJuN7cRJx4S2w => {}
I need to save that data to serversettings.json
await fs.writeFile ("../serversettings.json", JSON.stringify(temp), function(err) {
if (err) throw err;
console.log('done');
})
here temp is variable where multiple data is stored like a:{},b:{}....
i tried var temp = {} temp.table = [] and then temp.table.push(doc.id, ':',doc.data())
but i get empty output so what can i do to get that expected output ?
also, adding to that how can I update the values if that object is already present in JSON will the above function work the same will it override the same value or delete all other values example update prefix from. to, then await fs.writeFile ("../serversettings.json", JSON.stringify(temp),..... so the temp field has a value of only guild id and that field prefix will it update the only prefix and not delete anything else in that array?
HERE is the code that added stuff to temp variable
var temp = {}
temp.guilds = [] // after some lines
snapshot.forEach(doc => {
console.log(doc.id, '=>',doc.data()); // output is above this code
temp.guilds.push(doc.id = doc.data()) // output is below this code
})
Above codes output
{ guilds:
[ { guildID: '637301291068030997', // here missing doc.id field
guildName: 'test server 3',
welcomeChannelID: '-',
guildMemberCount: 4,
guildOwnerID: '348832732647784460',
guildOwner: 'Ethical Hacker',
prefix: '.' },
{} // this missing thing before {} is (some number) also bracket is empty by the way so no worries
]
}
A fast solution for your issue would be to replace
let data = query.get().then(snapshot => {
with
await query.get().then(snapshot => {
so that your temp object can be filled before the program proceeds to save the file.
I haven't used writeFile yet, but here's what its documentation says:
When file is a filename, asynchronously writes data to the file, replacing the file if it already exists.
I don't think your object will be so large that a complete overwrite would be a problem, unless it's changing very often. In that case, I guess you'd have to use a different method that can support an offset, so that you can write only what has changed, but that seems like a real overkill.
Regarding the format of your JSON file, I think what you're trying to do is this:
var temp = {};
temp.guilds = {};
snapshot.forEach(doc => {
console.log(doc.id, '=>', doc.data());
temp.guilds[doc.id] = doc.data();
});
i`m not sure , but firebase has method to convert data from object to JSON, so i think this solution should work
let query = db.collection('guilds')
let data = query.get().then(snapshot => {
let temp = snapshot.toJSON()
await fs.writeFile ("../serversettings.json", temp, function(err) {
if (err) throw err;
console.log('done');
})
})
Related
How do I update a value in a json file and save it through node.js?
I have the file content:
var file_content = fs.readFileSync(filename);
var content = JSON.parse(file_content);
var val1 = content.val1;
Now I want to change the value of val1 and save it to the file.
Doing this asynchronously is quite easy. It's particularly useful if you're concerned with blocking the thread (likely). Otherwise, I'd suggest Peter Lyon's answer
const fs = require('fs');
const fileName = './file.json';
const file = require(fileName);
file.key = "new value";
fs.writeFile(fileName, JSON.stringify(file), function writeJSON(err) {
if (err) return console.log(err);
console.log(JSON.stringify(file));
console.log('writing to ' + fileName);
});
The caveat is that json is written to the file on one line and not prettified. ex:
{
"key": "value"
}
will be...
{"key": "value"}
To avoid this, simply add these two extra arguments to JSON.stringify
JSON.stringify(file, null, 2)
null - represents the replacer function. (in this case we don't want to alter the process)
2 - represents the spaces to indent.
//change the value in the in-memory object
content.val1 = 42;
//Serialize as JSON and Write it to a file
fs.writeFileSync(filename, JSON.stringify(content));
// read file and make object
let content = JSON.parse(fs.readFileSync('file.json', 'utf8'));
// edit or add property
content.expiry_date = 999999999999;
//write file
fs.writeFileSync('file.json', JSON.stringify(content));
addition to the previous answer add file path directory for the write operation
fs.writeFile(path.join(__dirname,jsonPath), JSON.stringify(newFileData), function (err) {})
I would strongly recommend not to use synchronous (blocking) functions, as they hold other concurrent operations. Instead, use asynchronous fs.promises:
const fs = require('fs').promises
const setValue = (fn, value) =>
fs.readFile(fn)
.then(body => JSON.parse(body))
.then(json => {
// manipulate your data here
json.value = value
return json
})
.then(json => JSON.stringify(json))
.then(body => fs.writeFile(fn, body))
.catch(error => console.warn(error))
Remeber that setValue returns a pending promise, you'll need to use .then function or, within async functions, the await operator.
// await operator
await setValue('temp.json', 1) // save "value": 1
await setValue('temp.json', 2) // then "value": 2
await setValue('temp.json', 3) // then "value": 3
// then-sequence
setValue('temp.json', 1) // save "value": 1
.then(() => setValue('temp.json', 2)) // then save "value": 2
.then(() => setValue('temp.json', 3)) // then save "value": 3
For those looking to add an item to a json collection
function save(item, path = './collection.json'){
if (!fs.existsSync(path)) {
fs.writeFile(path, JSON.stringify([item]));
} else {
var data = fs.readFileSync(path, 'utf8');
var list = (data.length) ? JSON.parse(data): [];
if (list instanceof Array) list.push(item)
else list = [item]
fs.writeFileSync(path, JSON.stringify(list));
}
}
Save data after task completion
fs.readFile("./sample.json", 'utf8', function readFileCallback(err, data) {
if (err) {
console.log(err);
} else {
fs.writeFile("./sample.json", JSON.stringify(result), 'utf8', err => {
if (err) throw err;
console.log('File has been saved!');
});
}
});
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
//// Here - update your json as per your requirement ////
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Update Success'); })
.catch(err => { console.log("Update Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.
I'm trying to make a queue in node js and want to delete the first JSON object in my JSON file.
I create and store users in a JSON file and then add them in the queue JSON file by reading the users.json. The problem is that when i get the user from user.json it comes as an object within an array, and i cant filter to compare the id. So how can i do this?
// helper method to read JSON FILE.
const readFile = (callback, returnJson = false, filePath = dataPath, encoding = 'utf8') => {
fs.readFile(filePath, encoding, (err, data) => {
if (err) {
throw err;
}
callback(returnJson ? JSON.parse(data) : data);
});
};
// helper method to write on JSON FILE.
const writeFile = (fileData, callback, filePath = dataPath, encoding = 'utf8') => {
fs.writeFile(filePath, fileData, encoding, (err) => {
if (err) {
throw err;
}
callback();
});
};
// thats how i try to delete first user from the queue, but it deletes the user with index 1.
app.delete('/popLine', (req, res) => {
readFile(data => {
//const userId = req.params["id"];
delete data[1]; // remove the first element from the Line.
writeFile(JSON.stringify(data, null, 2), () => {
res.status(200).send(`queue id: removed`);
});
},
true);
});
// thats how an user is stored in queue.json
"5": [
{
"name": "teresa may",
"email": "parliament",
"gender": "male",
"id": 3
}
],
I would prefer loading the JSON file as a JSON object (if its manageable) then delete the first entry by key and then persisting the file back on the disk (over-write) periodically or instantaneously which be necessary.
You can do a require to load a JSON file as a JSON object. But note, if you do a re-require of the changed file to reload it again during runtime, it will not get you the changes you made to the file in between but the older content from last require. In that case you need to clear the item from the require cache before getting the changes.
I am assuming the file is in order of KBs.. for larger files I wouldn’t prefer this approach, rather I would figure a way out to get that file data in a NoSQL document database.
Hope this helps :)
I'm making a discord bot with node.js and I have a .txt file with 3 lines:
User1|421
User2|5543
User3|12
And I want to parse only numbers after '|', I've used
fs.readFile('balance.txt', 'utf8', function(err, contents) {
message.channel.sendMessage(contents);
})
to get the text from it, what I want to do is check if the user that sends the request to check balance exists in the list, and if it exists print out the balance.
I'd like for the input to be
!balance
and output to be
$43
while checking:
User exists?
Get line in which the user is in
Get number after '|' and store it in a temporary variable to print
Thanks!
I'd try this approach, read this file into memory and create an accessor function for it:
const fs = require("fs");
const os = require('os');
fs.readFile('balance.txt', 'utf8', function(err, contents) {
if (err) {
console.error("Error occurred: ", err);
} else {
let balancesDetails = getUserBalances(contents);
let user3BalanceDetails = balancesDetails.find(result => result.user === "User3");
console.log("User 3 balance: ", user3BalanceDetails.balance);
// Send the balance if required
//message.channel.sendMessage(user3BalanceDetails.balance);
}
});
function getUserBalances(fileContents) {
return fileContents.split(os.EOL).map(line => line.split("|")).map(([user, balance]) => { return { user, balance: parseFloat(balance)} });
}
An example of this working in plain JavaScript would be like below (the only real difference is we can't use the OS object since this is Node.js only!):
let contents = `User1|421\nUser2|5543\nUser3|12`;
function getUserBalanceArray(fileContents) {
return fileContents.split('\n').map(line => line.split("|")).map(([user, balance]) => { return { user, balance: parseFloat(balance)} });
}
function getUserBalanceMap(fileContents) {
return fileContents.split('\n').map(line => line.split("|")).reduce((map, [user, balance]) => { map[user] = parseFloat(balance);
return map }, {});
}
console.log("File contents:\n" + contents);
console.log("Balance array: ", getUserBalanceArray(contents));
console.log("Balance map: ", getUserBalanceMap(contents));
so i've written a function that should query everything in the files inventory of my mongodb using mongoose, but instead... doesn't. while each element is in fact being read, files.push() doesn't seem to have any effect on the array, as the array is still empty.
while i know that console.log() is not an effective way to debug, considering express still does not render the contents of the array, it is in fact not being populated.
yes, it is being called as getAllFiles(Image).
code below:
const Image = module.exports = mongoose.model('files', imageSchema);
function getAllFiles(collection) {
let files = [];
collection.find({}, (err, buns) => {
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log(fin);
});
});
console.log(files);
return files;
}
terminal output (ignore extraneous outputs):
wildflower :: src/bunnydb » node app.js
(node:23296) DeprecationWarning: current URL string parser is deprecated, and will be removed in a future version. To use the new parser, pass option { useNewUrlParser: true } to MongoClient.connect.running on port 3000
[]
uploads/9160d961-3d9b-4dea-a39c-f79b86647408.jpg
was able to fix by adding a callback as it was running asynchronously:
function getAllFiles(collection, cb) {
let files = [];
collection.find({}, (err, buns) => {
console.log('err: ' + err);
console.log('buns: ' + buns);
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log('data: ' + fin);
});
cb(files);
});
console.log('arr: ' + files);
return files;
}
and on invocation the callback argument can be used to do stuff with the files
After retrieving data from database, I would like to compare the old against the new, and then store the new into the variable oldData. I have trouble storing it into the oldData. Am I writing it correctly regarding the scope? It does not seem to be able to store it to oldData. Could you help?
Thanks.
const myDB = require('./dbModel/model');
let oldData = {}
const getApiAndEmit = (socket) => {
try {
// Get the latest data point
const res = myDB.find()
.limit(1)
.sort({created_at: 'desc'})
.then((response)=>{
const newData = response[0]
if (oldData != newData) {
// Send update to clients
io.sockets.emit('Update', response )
// Store new data
oldData = newData
} else {
// do nothing
}
})
.catch((err)=>{
console.log(err);
})
} catch (err) {
console.error(`Error: ${error.code}`);
}
};
if (odlData != newData) {
This looks like a typo (odlData -> oldData).
You can't compare objects like this. Use lodash.isEqual() or something comparable.