How to get the last element in a nested database in firebase.
This is my db
In this there are several ids and for each id under the bot's msg i want the last message which in this case will be
I am fine.Thank You....
I know the brute approach but it will take a lot of time.
Here is my code :
return ref.child(user).once('value').then((snapshot) => {
var i = 0;
snapshot.forEach((snap) => {
console.log(snap.child('bot').child('msg').msg);
snap.child('bot').child('msg')
.forEach((openTicketSnapshot) => {
var val = openTicketSnapshot.val();
val.forEach((text) => {
userMsg2.push({
'who': 'User',
'msg': text.msg,
'time': text.timestamp
});
})
});
//Here i will access the last index of userMsg
var data1 = {
"data": "found"
};
res.json(data1);
// ...
return data1;
})
How to do this efficiently?
Related
i got error on my pg-promise like this:
this is my snippet code:
// create sample payload
let samplePayload = [];
for (let index = 0; index < 10; index++) {
samplePayload.push({
id: uuidv1(),
});
}
return db.task((t) => {
samplePayload.map( sp => {
// i want check if the id is exist on db
let sql = `select * from sf_data where id = '${sp.id}'`;
console.log(sql)
const sampleBatch = t.any(sql).then(th => {
// i want insert it if data not exists
console.log(th);
});
})
});
i want to check if the list of data is exist in DF. If not exists, need to insert the data.
i try to fix my old code and change into this
const sfdata = await t.any('SELECT * FROM sf_data');
const queries = sfdata.map((sp) => {
return t.oneOrNone('select * from sf_data where id = ${id}', sp).then(result => {
console.log(result)
if(result){
t.tx(async t2 => {
return t2.none("insert into test_sf values ($1, $2, $3)", [uuidv1(), result.id, result.sfid]);
})
}
});
});
return t.batch(queries);
but it return error:
(node:6547) UnhandledPromiseRejectionWarning: Error: Client was closed and is not queryable
at /Users/dsa/Telkom/dtp-dsa-middleware-sf/node_modules/pg/lib/client.js:570:27
at processTicksAndRejections (internal/process/task_queues.js:75:11)
any clue about this?
You are missing return in a few places, and your code creates a bunch of loose promises - queries that are trying to execute outside the connection, hence the error.
Here's correct code:
return db.task(t => {
const queries = samplePayload.map(sp => {
return t.any('select * from sf_data where id = ${id}', sp);
})
return t.batch(queries);
});
Better yet, this can be done with one query:
const ids = samplePayload.map(sp => sp.id);
return db.any('select * from sf_data where id in ($1)', [ids]);
Using MySQL 8.0 on a node.js server (X DevAPI), I'm trying to get data with
var res = session.sql('SELECT * FROM users').execute(function (row) {
console.log(row); // [1,'foo','bar']
});
but this returns only an array of values. The column names are missing. Is there a way I can get key-value pairs such as that in classic SQL data sets?
i.e. id: 1, fName: 'foo', lName: 'bar'
Right now you would have to construct the key-value pair structure "by hand" using the column metadata available through a second callback parameter on execute().
Something like the following would do the trick:
var result = []
session.sql('SELECT 1 AS id, "foo" AS fName, "bar" AS lName')
.execute(row => {
row.forEach((value, i) => { result[i] = Object.assign({}, result[i], { value }) })
}, columns => {
columns.forEach((key, i) => { result[i] = Object.assign({}, result[i], { key: key.getColumnName() }) })
})
console.log(result.reduce((res, pair) => Object.assign(res, { [pair.key]: pair.value }), {}))
I have to admit this is convoluted and making it better is already on the radar.
Disclaimer: I'm the X DevAPI connector lead dev.
Use the JSON_OBJECT function in your query.
var res = session.sql("select JSON_OBJECT('id', id, 'name', name) from users").execute(function (row) {
console.log(row); // {"id": 87, "name": "rui"}
});
This is a workaround maybe will help someone.
//This is declared outside of class
const mapTest = (row) =>{
return{
id:row[0],
price:row[1]
}
}
//This where you get the result.
.execute()
.then(myResult =>{
let myRows = myResult.fetchAll();
resolve(myRows.map(row=>mapTest(row))); // returns {"id":1,"price":200000},{"id":2,"price":300000}
})
I have a nested json which have a key and value is another json with key and value. Below is the json.
{
"Testicular Torsion": {
"What is testicular torsion?": "ABC",
"Symptoms": "AB",
"Risks": "AL",
"Diagnosis": "LK",
"Treatment": "UY"
},
"XYZ": {
"X": "ABC",
"Symptoms": "AB",
"Risks": "AL",
"Diagnosis": "LK",
"Treatment": "UY"
}
};
What I am trying to do is insert the data in cloud firestore of firebase. Following is the code for the same. But the issue is, only the first key value pair(In this case the Testicular Torsion key and it's value which is another JSON) is getting inserted and not other key value pairs. Why is the case and what needs to be done in the code?
var string_medical_data = JSON.stringify(medical_json);
var json_medical = JSON.parse(string_medical_data);
function abc(poi){
firestore.collection('medical').doc(poi).set(json_medical[poi])
.then(() => {
return console.log("Added");
})
.catch((e => {
console.log('error: ', e);
return console.log(e);
}))
}
exports.medical = functions.https.onRequest((request, response) => {
var problems = [];
for(var myKey in json_medical) {
problems.push(myKey);
break;
}
for(var i=0;i<problems.length;i++){
// firestore.collection('medical').doc(problems[i]).set(json_medical[problems[i]])
abc(problems[i]);
}
response.send({
'fulfillmentText': `Success!!!`
});
});
You'd probably be better off doing this in a batch request. You can commit multiple writes in a single request. If your data has more than 500 entries you'll have to do break it up and do 500 at a time.
exports.medical = functions.https.onRequest((request, response) => {
var batch = firestore.batch();
for(var myKey in json_medical) {
var myKeyRef = firestore.collection('medical').doc(myKey);
batch.set(myKeyRef, json_medical[myKey]);
}
batch.commit().then(function () {
response.send({
'fulfillmentText': `Success!!!`
});
});
});
How can I save all of the json into my mongoldb?
Strangely, only the first value is stored every time.
It might be blocking/non-blocking issue.
json = {
["name":"Karl","id":"azo0"],
["name":"Robert","id":"bdd10"],
["name":"Joan","id":"difj90"],
["name":"Hallyn","id":"fmak88"],
["name":"Michael","id":"vma91"]
};
for(var i = 0; i < json.length; i++){
id = json[i].id;
name = json[i].name;
var ctx = {"id":id,"name":name};
db.json_db.count(ctx).exec(function(err, count) {
if(count < 1){
var model = new User({
"name":json[i].name,
"id":json[i].id
});
model.save(function(){
console.log("ok"+i);
});
}
});
};
After inserting, all of datas are filled with ["name":"Karl","id":"azo0"]
To check out console.log("ok"+i), it prints always "ok0" not "ok1", "ok2", "ok3"... etc..
How can I prevent this issue?
Incase you're using Async package, this is an best way to solve your problem...
async.eachSeries(json, (item, done) => {
let user = new User(
{
"name":json[i].name,
"id":json[i].id
},
(err, user) => {
if(err){
// handle err
}
return done();
}
);
});
.exec() tells me you're using Mongoose. So your loop can rewritten as:
const json = [
{name: "Karl", id: "azo0"},
{name: "Robert", id: "bdd10"},
{name: "Joan", id: "difj90"},
{name: "Hallyn", id: "fmak88"},
{name: "Michael", id: "vma91"}
];
for (const item of json) {
const count = await db.json_db.count(item).exec()
if (!count) {
await new User(item).save()
}
}
Error handling omitted.
See http://exploringjs.com/es2016-es2017/ch_async-functions.html
I noticed this strange behaviour, when for few users only on production, it inserts every item multiple times to collection on asynchronous Meteor call. I tried multiple things, but nothing worked. I can't test on localhost, bc it never happens to me on localhost or in production.
I spent the whole night solving this, but didn't find any solution. I suppose it's caused by new Date(), but I have to call it somewhere. The production server is in Amsterdam and it seems like it happens only for users located further outside of Europe.
I found this to be similar issue, but can't really wrap my head on how to implement - https://github.com/meteor/meteor/issues/4263
Slug is what same songs are supposed to have the same.
This is the workflow, user clicks on song that triggers addNewSong function:
addNewSong = function (track) {
Globals.current_track = track;
checkIfSongAlreadySaved();
}
I need to check if song is already in collection, if it's -> route to it, else create the new song and route to it.
checkIfSongAlreadySaved = function() {
loadPrimaryGlobalItems();
Meteor.call('checkIfSongAlreadySaved', Globals.current_song_item_slug, function(error, result) {
if( result.item ) {
Globals.current_song_item_id = result.item._id;
Globals.current_song_item_slug = result.item.slug;
routeToSongPage();
if (! (result.item.download && result.item.mp3) ) {
downloadSong();
}
}
else {
loadSecondaryGlobalItems();
var item = {
slug:Globals.current_song_item_slug,
duration:Globals.current_duration,
thumbnail:Globals.current_song_thumbnail,
title:Globals.current_cleaned_song,
album:Globals.current_track.album,
artist:Globals.current_track.artists[0],
track:Globals.current_track.name,
date:result.date,
}
Globals.current_song_item_id = Songs.insert(item);
routeToSongPage();
downloadSong();
recentSongItem(result.date);
}
});
}
Add recent song
recentSongItem = function (date) {
Recentsongs.insert({
slug:Globals.current_song_item_slug,
songId:Globals.current_song_item_id,
title:Globals.current_cleaned_song,
duration:Globals.current_duration,
date:date,
});
}
If creating new song,
downloadSong = function() {
Meteor.call('findSong', Globals.current_song, function(error, result) {
console.log(result);
if (result) {
Globals.current_song_mp3 = true;
updateSongItemDownload(result.itemDetails);
}
else {
alert('not found')
}
});
}
and update song, to add download and mp3 values.
updateSongItemDownload = function(link) {
Songs.update({
_id: Globals.current_song_item_id
},
{
$set: {
download: link,
mp3: Globals.current_song_mp3,
}
});
}
Server methods:
Meteor.methods({
checkIfSongAlreadySaved: function(slug) {
return {item: Songs.findOne({slug:slug}), date: new Date()};
},
findSong:function(song) {
ServerGlobals.current_song = song;
var result = searchSite();
return result;
},
});
EDIT:
This is my subscription, just in case it might be causing the problem:
Template.songPage.onCreated(function() {
Session.set('processing', true);
var self = this;
self.autorun(function() {
var id = Router.current().params.id;
self.subscribe('singleSong', id);
var item = Songs.findOne({_id: id});
if (item) {
if (item.download) {
createSong(item.download);
}
else if( item.download === false ) {
console.log('item not found');
}
Session.set('loader', false);
Session.set('processing', false);
}
});
});
Meteor.publish('singleSong', function(id) {
check(id, String);
return Songs.find({_id: id});
});
You can apply a unique index on the slug field to ensure the same slug can only exist once and the second operation to insert will fail and show up as an error in your callback which you can discard or alert user as you desire.
db.collection.createIndex( { slug: 1 }, { unique: true } )
You will need to clear or modify the slug name on the dups from the db before applying the index though